(*zeroextract[qs]i_compare0_scratch): Use const_int_operand
[official-gcc.git] / gcc / cse.c
blobfd086c5f778ad3ab4a86387cb393db52f99268d0
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* Must precede rtl.h for FFS. */
24 #include <stdio.h>
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
34 #include <setjmp.h>
36 /* The basic idea of common subexpression elimination is to go
37 through the code, keeping a record of expressions that would
38 have the same value at the current scan point, and replacing
39 expressions encountered with the cheapest equivalent expression.
41 It is too complicated to keep track of the different possibilities
42 when control paths merge; so, at each label, we forget all that is
43 known and start fresh. This can be described as processing each
44 basic block separately. Note, however, that these are not quite
45 the same as the basic blocks found by a later pass and used for
46 data flow analysis and register packing. We do not need to start fresh
47 after a conditional jump instruction if there is no label there.
49 We use two data structures to record the equivalent expressions:
50 a hash table for most expressions, and several vectors together
51 with "quantity numbers" to record equivalent (pseudo) registers.
53 The use of the special data structure for registers is desirable
54 because it is faster. It is possible because registers references
55 contain a fairly small number, the register number, taken from
56 a contiguously allocated series, and two register references are
57 identical if they have the same number. General expressions
58 do not have any such thing, so the only way to retrieve the
59 information recorded on an expression other than a register
60 is to keep it in a hash table.
62 Registers and "quantity numbers":
64 At the start of each basic block, all of the (hardware and pseudo)
65 registers used in the function are given distinct quantity
66 numbers to indicate their contents. During scan, when the code
67 copies one register into another, we copy the quantity number.
68 When a register is loaded in any other way, we allocate a new
69 quantity number to describe the value generated by this operation.
70 `reg_qty' records what quantity a register is currently thought
71 of as containing.
73 All real quantity numbers are greater than or equal to `max_reg'.
74 If register N has not been assigned a quantity, reg_qty[N] will equal N.
76 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
77 variables should be referenced with an index below `max_reg'.
79 We also maintain a bidirectional chain of registers for each
80 quantity number. `qty_first_reg', `qty_last_reg',
81 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
83 The first register in a chain is the one whose lifespan is least local.
84 Among equals, it is the one that was seen first.
85 We replace any equivalent register with that one.
87 If two registers have the same quantity number, it must be true that
88 REG expressions with `qty_mode' must be in the hash table for both
89 registers and must be in the same class.
91 The converse is not true. Since hard registers may be referenced in
92 any mode, two REG expressions might be equivalent in the hash table
93 but not have the same quantity number if the quantity number of one
94 of the registers is not the same mode as those expressions.
96 Constants and quantity numbers
98 When a quantity has a known constant value, that value is stored
99 in the appropriate element of qty_const. This is in addition to
100 putting the constant in the hash table as is usual for non-regs.
102 Whether a reg or a constant is preferred is determined by the configuration
103 macro CONST_COSTS and will often depend on the constant value. In any
104 event, expressions containing constants can be simplified, by fold_rtx.
106 When a quantity has a known nearly constant value (such as an address
107 of a stack slot), that value is stored in the appropriate element
108 of qty_const.
110 Integer constants don't have a machine mode. However, cse
111 determines the intended machine mode from the destination
112 of the instruction that moves the constant. The machine mode
113 is recorded in the hash table along with the actual RTL
114 constant expression so that different modes are kept separate.
116 Other expressions:
118 To record known equivalences among expressions in general
119 we use a hash table called `table'. It has a fixed number of buckets
120 that contain chains of `struct table_elt' elements for expressions.
121 These chains connect the elements whose expressions have the same
122 hash codes.
124 Other chains through the same elements connect the elements which
125 currently have equivalent values.
127 Register references in an expression are canonicalized before hashing
128 the expression. This is done using `reg_qty' and `qty_first_reg'.
129 The hash code of a register reference is computed using the quantity
130 number, not the register number.
132 When the value of an expression changes, it is necessary to remove from the
133 hash table not just that expression but all expressions whose values
134 could be different as a result.
136 1. If the value changing is in memory, except in special cases
137 ANYTHING referring to memory could be changed. That is because
138 nobody knows where a pointer does not point.
139 The function `invalidate_memory' removes what is necessary.
141 The special cases are when the address is constant or is
142 a constant plus a fixed register such as the frame pointer
143 or a static chain pointer. When such addresses are stored in,
144 we can tell exactly which other such addresses must be invalidated
145 due to overlap. `invalidate' does this.
146 All expressions that refer to non-constant
147 memory addresses are also invalidated. `invalidate_memory' does this.
149 2. If the value changing is a register, all expressions
150 containing references to that register, and only those,
151 must be removed.
153 Because searching the entire hash table for expressions that contain
154 a register is very slow, we try to figure out when it isn't necessary.
155 Precisely, this is necessary only when expressions have been
156 entered in the hash table using this register, and then the value has
157 changed, and then another expression wants to be added to refer to
158 the register's new value. This sequence of circumstances is rare
159 within any one basic block.
161 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
162 reg_tick[i] is incremented whenever a value is stored in register i.
163 reg_in_table[i] holds -1 if no references to register i have been
164 entered in the table; otherwise, it contains the value reg_tick[i] had
165 when the references were entered. If we want to enter a reference
166 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
167 Until we want to enter a new entry, the mere fact that the two vectors
168 don't match makes the entries be ignored if anyone tries to match them.
170 Registers themselves are entered in the hash table as well as in
171 the equivalent-register chains. However, the vectors `reg_tick'
172 and `reg_in_table' do not apply to expressions which are simple
173 register references. These expressions are removed from the table
174 immediately when they become invalid, and this can be done even if
175 we do not immediately search for all the expressions that refer to
176 the register.
178 A CLOBBER rtx in an instruction invalidates its operand for further
179 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
180 invalidates everything that resides in memory.
182 Related expressions:
184 Constant expressions that differ only by an additive integer
185 are called related. When a constant expression is put in
186 the table, the related expression with no constant term
187 is also entered. These are made to point at each other
188 so that it is possible to find out if there exists any
189 register equivalent to an expression related to a given expression. */
191 /* One plus largest register number used in this function. */
193 static int max_reg;
195 /* Length of vectors indexed by quantity number.
196 We know in advance we will not need a quantity number this big. */
198 static int max_qty;
200 /* Next quantity number to be allocated.
201 This is 1 + the largest number needed so far. */
203 static int next_qty;
205 /* Indexed by quantity number, gives the first (or last) (pseudo) register
206 in the chain of registers that currently contain this quantity. */
208 static int *qty_first_reg;
209 static int *qty_last_reg;
211 /* Index by quantity number, gives the mode of the quantity. */
213 static enum machine_mode *qty_mode;
215 /* Indexed by quantity number, gives the rtx of the constant value of the
216 quantity, or zero if it does not have a known value.
217 A sum of the frame pointer (or arg pointer) plus a constant
218 can also be entered here. */
220 static rtx *qty_const;
222 /* Indexed by qty number, gives the insn that stored the constant value
223 recorded in `qty_const'. */
225 static rtx *qty_const_insn;
227 /* The next three variables are used to track when a comparison between a
228 quantity and some constant or register has been passed. In that case, we
229 know the results of the comparison in case we see it again. These variables
230 record a comparison that is known to be true. */
232 /* Indexed by qty number, gives the rtx code of a comparison with a known
233 result involving this quantity. If none, it is UNKNOWN. */
234 static enum rtx_code *qty_comparison_code;
236 /* Indexed by qty number, gives the constant being compared against in a
237 comparison of known result. If no such comparison, it is undefined.
238 If the comparison is not with a constant, it is zero. */
240 static rtx *qty_comparison_const;
242 /* Indexed by qty number, gives the quantity being compared against in a
243 comparison of known result. If no such comparison, if it undefined.
244 If the comparison is not with a register, it is -1. */
246 static int *qty_comparison_qty;
248 #ifdef HAVE_cc0
249 /* For machines that have a CC0, we do not record its value in the hash
250 table since its use is guaranteed to be the insn immediately following
251 its definition and any other insn is presumed to invalidate it.
253 Instead, we store below the value last assigned to CC0. If it should
254 happen to be a constant, it is stored in preference to the actual
255 assigned value. In case it is a constant, we store the mode in which
256 the constant should be interpreted. */
258 static rtx prev_insn_cc0;
259 static enum machine_mode prev_insn_cc0_mode;
260 #endif
262 /* Previous actual insn. 0 if at first insn of basic block. */
264 static rtx prev_insn;
266 /* Insn being scanned. */
268 static rtx this_insn;
270 /* Index by (pseudo) register number, gives the quantity number
271 of the register's current contents. */
273 static int *reg_qty;
275 /* Index by (pseudo) register number, gives the number of the next (or
276 previous) (pseudo) register in the chain of registers sharing the same
277 value.
279 Or -1 if this register is at the end of the chain.
281 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
283 static int *reg_next_eqv;
284 static int *reg_prev_eqv;
286 /* Index by (pseudo) register number, gives the number of times
287 that register has been altered in the current basic block. */
289 static int *reg_tick;
291 /* Index by (pseudo) register number, gives the reg_tick value at which
292 rtx's containing this register are valid in the hash table.
293 If this does not equal the current reg_tick value, such expressions
294 existing in the hash table are invalid.
295 If this is -1, no expressions containing this register have been
296 entered in the table. */
298 static int *reg_in_table;
300 /* A HARD_REG_SET containing all the hard registers for which there is
301 currently a REG expression in the hash table. Note the difference
302 from the above variables, which indicate if the REG is mentioned in some
303 expression in the table. */
305 static HARD_REG_SET hard_regs_in_table;
307 /* A HARD_REG_SET containing all the hard registers that are invalidated
308 by a CALL_INSN. */
310 static HARD_REG_SET regs_invalidated_by_call;
312 /* Two vectors of ints:
313 one containing max_reg -1's; the other max_reg + 500 (an approximation
314 for max_qty) elements where element i contains i.
315 These are used to initialize various other vectors fast. */
317 static int *all_minus_one;
318 static int *consec_ints;
320 /* CUID of insn that starts the basic block currently being cse-processed. */
322 static int cse_basic_block_start;
324 /* CUID of insn that ends the basic block currently being cse-processed. */
326 static int cse_basic_block_end;
328 /* Vector mapping INSN_UIDs to cuids.
329 The cuids are like uids but increase monotonically always.
330 We use them to see whether a reg is used outside a given basic block. */
332 static int *uid_cuid;
334 /* Highest UID in UID_CUID. */
335 static int max_uid;
337 /* Get the cuid of an insn. */
339 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
341 /* Nonzero if cse has altered conditional jump insns
342 in such a way that jump optimization should be redone. */
344 static int cse_jumps_altered;
346 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
347 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
348 to put in the note. */
349 static int recorded_label_ref;
351 /* canon_hash stores 1 in do_not_record
352 if it notices a reference to CC0, PC, or some other volatile
353 subexpression. */
355 static int do_not_record;
357 #ifdef LOAD_EXTEND_OP
359 /* Scratch rtl used when looking for load-extended copy of a MEM. */
360 static rtx memory_extend_rtx;
361 #endif
363 /* canon_hash stores 1 in hash_arg_in_memory
364 if it notices a reference to memory within the expression being hashed. */
366 static int hash_arg_in_memory;
368 /* canon_hash stores 1 in hash_arg_in_struct
369 if it notices a reference to memory that's part of a structure. */
371 static int hash_arg_in_struct;
373 /* The hash table contains buckets which are chains of `struct table_elt's,
374 each recording one expression's information.
375 That expression is in the `exp' field.
377 Those elements with the same hash code are chained in both directions
378 through the `next_same_hash' and `prev_same_hash' fields.
380 Each set of expressions with equivalent values
381 are on a two-way chain through the `next_same_value'
382 and `prev_same_value' fields, and all point with
383 the `first_same_value' field at the first element in
384 that chain. The chain is in order of increasing cost.
385 Each element's cost value is in its `cost' field.
387 The `in_memory' field is nonzero for elements that
388 involve any reference to memory. These elements are removed
389 whenever a write is done to an unidentified location in memory.
390 To be safe, we assume that a memory address is unidentified unless
391 the address is either a symbol constant or a constant plus
392 the frame pointer or argument pointer.
394 The `in_struct' field is nonzero for elements that
395 involve any reference to memory inside a structure or array.
397 The `related_value' field is used to connect related expressions
398 (that differ by adding an integer).
399 The related expressions are chained in a circular fashion.
400 `related_value' is zero for expressions for which this
401 chain is not useful.
403 The `cost' field stores the cost of this element's expression.
405 The `is_const' flag is set if the element is a constant (including
406 a fixed address).
408 The `flag' field is used as a temporary during some search routines.
410 The `mode' field is usually the same as GET_MODE (`exp'), but
411 if `exp' is a CONST_INT and has no machine mode then the `mode'
412 field is the mode it was being used as. Each constant is
413 recorded separately for each mode it is used with. */
416 struct table_elt
418 rtx exp;
419 struct table_elt *next_same_hash;
420 struct table_elt *prev_same_hash;
421 struct table_elt *next_same_value;
422 struct table_elt *prev_same_value;
423 struct table_elt *first_same_value;
424 struct table_elt *related_value;
425 int cost;
426 enum machine_mode mode;
427 char in_memory;
428 char in_struct;
429 char is_const;
430 char flag;
433 /* We don't want a lot of buckets, because we rarely have very many
434 things stored in the hash table, and a lot of buckets slows
435 down a lot of loops that happen frequently. */
436 #define NBUCKETS 31
438 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
439 register (hard registers may require `do_not_record' to be set). */
441 #define HASH(X, M) \
442 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
443 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
444 : canon_hash (X, M) % NBUCKETS)
446 /* Determine whether register number N is considered a fixed register for CSE.
447 It is desirable to replace other regs with fixed regs, to reduce need for
448 non-fixed hard regs.
449 A reg wins if it is either the frame pointer or designated as fixed,
450 but not if it is an overlapping register. */
451 #ifdef OVERLAPPING_REGNO_P
452 #define FIXED_REGNO_P(N) \
453 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454 || fixed_regs[N] || global_regs[N]) \
455 && ! OVERLAPPING_REGNO_P ((N)))
456 #else
457 #define FIXED_REGNO_P(N) \
458 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
459 || fixed_regs[N] || global_regs[N])
460 #endif
462 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
463 hard registers and pointers into the frame are the cheapest with a cost
464 of 0. Next come pseudos with a cost of one and other hard registers with
465 a cost of 2. Aside from these special cases, call `rtx_cost'. */
467 #define CHEAP_REGNO(N) \
468 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
469 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
470 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
471 || ((N) < FIRST_PSEUDO_REGISTER \
472 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
474 /* A register is cheap if it is a user variable assigned to the register
475 or if its register number always corresponds to a cheap register. */
477 #define CHEAP_REG(N) \
478 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
479 || CHEAP_REGNO (REGNO (N)))
481 #define COST(X) \
482 (GET_CODE (X) == REG \
483 ? (CHEAP_REG (X) ? 0 \
484 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
485 : 2) \
486 : rtx_cost (X, SET) * 2)
488 /* Determine if the quantity number for register X represents a valid index
489 into the `qty_...' variables. */
491 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
493 static struct table_elt *table[NBUCKETS];
495 /* Chain of `struct table_elt's made so far for this function
496 but currently removed from the table. */
498 static struct table_elt *free_element_chain;
500 /* Number of `struct table_elt' structures made so far for this function. */
502 static int n_elements_made;
504 /* Maximum value `n_elements_made' has had so far in this compilation
505 for functions previously processed. */
507 static int max_elements_made;
509 /* Surviving equivalence class when two equivalence classes are merged
510 by recording the effects of a jump in the last insn. Zero if the
511 last insn was not a conditional jump. */
513 static struct table_elt *last_jump_equiv_class;
515 /* Set to the cost of a constant pool reference if one was found for a
516 symbolic constant. If this was found, it means we should try to
517 convert constants into constant pool entries if they don't fit in
518 the insn. */
520 static int constant_pool_entries_cost;
522 /* Bits describing what kind of values in memory must be invalidated
523 for a particular instruction. If all three bits are zero,
524 no memory refs need to be invalidated. Each bit is more powerful
525 than the preceding ones, and if a bit is set then the preceding
526 bits are also set.
528 Here is how the bits are set:
529 Pushing onto the stack invalidates only the stack pointer,
530 writing at a fixed address invalidates only variable addresses,
531 writing in a structure element at variable address
532 invalidates all but scalar variables,
533 and writing in anything else at variable address invalidates everything. */
535 struct write_data
537 int sp : 1; /* Invalidate stack pointer. */
538 int var : 1; /* Invalidate variable addresses. */
539 int nonscalar : 1; /* Invalidate all but scalar variables. */
540 int all : 1; /* Invalidate all memory refs. */
543 /* Define maximum length of a branch path. */
545 #define PATHLENGTH 10
547 /* This data describes a block that will be processed by cse_basic_block. */
549 struct cse_basic_block_data {
550 /* Lowest CUID value of insns in block. */
551 int low_cuid;
552 /* Highest CUID value of insns in block. */
553 int high_cuid;
554 /* Total number of SETs in block. */
555 int nsets;
556 /* Last insn in the block. */
557 rtx last;
558 /* Size of current branch path, if any. */
559 int path_size;
560 /* Current branch path, indicating which branches will be taken. */
561 struct branch_path {
562 /* The branch insn. */
563 rtx branch;
564 /* Whether it should be taken or not. AROUND is the same as taken
565 except that it is used when the destination label is not preceded
566 by a BARRIER. */
567 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
568 } path[PATHLENGTH];
571 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
572 virtual regs here because the simplify_*_operation routines are called
573 by integrate.c, which is called before virtual register instantiation. */
575 #define FIXED_BASE_PLUS_P(X) \
576 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
577 || (X) == arg_pointer_rtx \
578 || (X) == virtual_stack_vars_rtx \
579 || (X) == virtual_incoming_args_rtx \
580 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
581 && (XEXP (X, 0) == frame_pointer_rtx \
582 || XEXP (X, 0) == hard_frame_pointer_rtx \
583 || XEXP (X, 0) == arg_pointer_rtx \
584 || XEXP (X, 0) == virtual_stack_vars_rtx \
585 || XEXP (X, 0) == virtual_incoming_args_rtx)))
587 /* Similar, but also allows reference to the stack pointer.
589 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
590 arg_pointer_rtx by itself is nonzero, because on at least one machine,
591 the i960, the arg pointer is zero when it is unused. */
593 #define NONZERO_BASE_PLUS_P(X) \
594 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
595 || (X) == virtual_stack_vars_rtx \
596 || (X) == virtual_incoming_args_rtx \
597 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
598 && (XEXP (X, 0) == frame_pointer_rtx \
599 || XEXP (X, 0) == hard_frame_pointer_rtx \
600 || XEXP (X, 0) == arg_pointer_rtx \
601 || XEXP (X, 0) == virtual_stack_vars_rtx \
602 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
603 || (X) == stack_pointer_rtx \
604 || (X) == virtual_stack_dynamic_rtx \
605 || (X) == virtual_outgoing_args_rtx \
606 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
607 && (XEXP (X, 0) == stack_pointer_rtx \
608 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
609 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
611 static void new_basic_block PROTO((void));
612 static void make_new_qty PROTO((int));
613 static void make_regs_eqv PROTO((int, int));
614 static void delete_reg_equiv PROTO((int));
615 static int mention_regs PROTO((rtx));
616 static int insert_regs PROTO((rtx, struct table_elt *, int));
617 static void free_element PROTO((struct table_elt *));
618 static void remove_from_table PROTO((struct table_elt *, unsigned));
619 static struct table_elt *get_element PROTO((void));
620 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
621 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
622 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
623 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
624 enum machine_mode));
625 static void merge_equiv_classes PROTO((struct table_elt *,
626 struct table_elt *));
627 static void invalidate PROTO((rtx, enum machine_mode));
628 static void remove_invalid_refs PROTO((int));
629 static void rehash_using_reg PROTO((rtx));
630 static void invalidate_memory PROTO((struct write_data *));
631 static void invalidate_for_call PROTO((void));
632 static rtx use_related_value PROTO((rtx, struct table_elt *));
633 static unsigned canon_hash PROTO((rtx, enum machine_mode));
634 static unsigned safe_hash PROTO((rtx, enum machine_mode));
635 static int exp_equiv_p PROTO((rtx, rtx, int, int));
636 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
637 HOST_WIDE_INT *,
638 HOST_WIDE_INT *));
639 static int refers_to_p PROTO((rtx, rtx));
640 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
641 HOST_WIDE_INT));
642 static int cse_rtx_addr_varies_p PROTO((rtx));
643 static rtx canon_reg PROTO((rtx, rtx));
644 static void find_best_addr PROTO((rtx, rtx *));
645 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
646 enum machine_mode *,
647 enum machine_mode *));
648 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
649 rtx, rtx));
650 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
651 rtx, rtx));
652 static rtx fold_rtx PROTO((rtx, rtx));
653 static rtx equiv_constant PROTO((rtx));
654 static void record_jump_equiv PROTO((rtx, int));
655 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
656 rtx, rtx, int));
657 static void cse_insn PROTO((rtx, int));
658 static void note_mem_written PROTO((rtx, struct write_data *));
659 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
660 static rtx cse_process_notes PROTO((rtx, rtx));
661 static void cse_around_loop PROTO((rtx));
662 static void invalidate_skipped_set PROTO((rtx, rtx));
663 static void invalidate_skipped_block PROTO((rtx));
664 static void cse_check_loop_start PROTO((rtx, rtx));
665 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
666 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
667 static void count_reg_usage PROTO((rtx, int *, rtx, int));
669 extern int rtx_equal_function_value_matters;
671 /* Return an estimate of the cost of computing rtx X.
672 One use is in cse, to decide which expression to keep in the hash table.
673 Another is in rtl generation, to pick the cheapest way to multiply.
674 Other uses like the latter are expected in the future. */
676 /* Return the right cost to give to an operation
677 to make the cost of the corresponding register-to-register instruction
678 N times that of a fast register-to-register instruction. */
680 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
683 rtx_cost (x, outer_code)
684 rtx x;
685 enum rtx_code outer_code;
687 register int i, j;
688 register enum rtx_code code;
689 register char *fmt;
690 register int total;
692 if (x == 0)
693 return 0;
695 /* Compute the default costs of certain things.
696 Note that RTX_COSTS can override the defaults. */
698 code = GET_CODE (x);
699 switch (code)
701 case MULT:
702 /* Count multiplication by 2**n as a shift,
703 because if we are considering it, we would output it as a shift. */
704 if (GET_CODE (XEXP (x, 1)) == CONST_INT
705 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
706 total = 2;
707 else
708 total = COSTS_N_INSNS (5);
709 break;
710 case DIV:
711 case UDIV:
712 case MOD:
713 case UMOD:
714 total = COSTS_N_INSNS (7);
715 break;
716 case USE:
717 /* Used in loop.c and combine.c as a marker. */
718 total = 0;
719 break;
720 case ASM_OPERANDS:
721 /* We don't want these to be used in substitutions because
722 we have no way of validating the resulting insn. So assign
723 anything containing an ASM_OPERANDS a very high cost. */
724 total = 1000;
725 break;
726 default:
727 total = 2;
730 switch (code)
732 case REG:
733 return ! CHEAP_REG (x);
735 case SUBREG:
736 /* If we can't tie these modes, make this expensive. The larger
737 the mode, the more expensive it is. */
738 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
739 return COSTS_N_INSNS (2
740 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
741 return 2;
742 #ifdef RTX_COSTS
743 RTX_COSTS (x, code, outer_code);
744 #endif
745 CONST_COSTS (x, code, outer_code);
748 /* Sum the costs of the sub-rtx's, plus cost of this operation,
749 which is already in total. */
751 fmt = GET_RTX_FORMAT (code);
752 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
753 if (fmt[i] == 'e')
754 total += rtx_cost (XEXP (x, i), code);
755 else if (fmt[i] == 'E')
756 for (j = 0; j < XVECLEN (x, i); j++)
757 total += rtx_cost (XVECEXP (x, i, j), code);
759 return total;
762 /* Clear the hash table and initialize each register with its own quantity,
763 for a new basic block. */
765 static void
766 new_basic_block ()
768 register int i;
770 next_qty = max_reg;
772 bzero ((char *) reg_tick, max_reg * sizeof (int));
774 bcopy ((char *) all_minus_one, (char *) reg_in_table,
775 max_reg * sizeof (int));
776 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
777 CLEAR_HARD_REG_SET (hard_regs_in_table);
779 /* The per-quantity values used to be initialized here, but it is
780 much faster to initialize each as it is made in `make_new_qty'. */
782 for (i = 0; i < NBUCKETS; i++)
784 register struct table_elt *this, *next;
785 for (this = table[i]; this; this = next)
787 next = this->next_same_hash;
788 free_element (this);
792 bzero ((char *) table, sizeof table);
794 prev_insn = 0;
796 #ifdef HAVE_cc0
797 prev_insn_cc0 = 0;
798 #endif
801 /* Say that register REG contains a quantity not in any register before
802 and initialize that quantity. */
804 static void
805 make_new_qty (reg)
806 register int reg;
808 register int q;
810 if (next_qty >= max_qty)
811 abort ();
813 q = reg_qty[reg] = next_qty++;
814 qty_first_reg[q] = reg;
815 qty_last_reg[q] = reg;
816 qty_const[q] = qty_const_insn[q] = 0;
817 qty_comparison_code[q] = UNKNOWN;
819 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
822 /* Make reg NEW equivalent to reg OLD.
823 OLD is not changing; NEW is. */
825 static void
826 make_regs_eqv (new, old)
827 register int new, old;
829 register int lastr, firstr;
830 register int q = reg_qty[old];
832 /* Nothing should become eqv until it has a "non-invalid" qty number. */
833 if (! REGNO_QTY_VALID_P (old))
834 abort ();
836 reg_qty[new] = q;
837 firstr = qty_first_reg[q];
838 lastr = qty_last_reg[q];
840 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
841 hard regs. Among pseudos, if NEW will live longer than any other reg
842 of the same qty, and that is beyond the current basic block,
843 make it the new canonical replacement for this qty. */
844 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
845 /* Certain fixed registers might be of the class NO_REGS. This means
846 that not only can they not be allocated by the compiler, but
847 they cannot be used in substitutions or canonicalizations
848 either. */
849 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
850 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
851 || (new >= FIRST_PSEUDO_REGISTER
852 && (firstr < FIRST_PSEUDO_REGISTER
853 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
854 || (uid_cuid[regno_first_uid[new]]
855 < cse_basic_block_start))
856 && (uid_cuid[regno_last_uid[new]]
857 > uid_cuid[regno_last_uid[firstr]]))))))
859 reg_prev_eqv[firstr] = new;
860 reg_next_eqv[new] = firstr;
861 reg_prev_eqv[new] = -1;
862 qty_first_reg[q] = new;
864 else
866 /* If NEW is a hard reg (known to be non-fixed), insert at end.
867 Otherwise, insert before any non-fixed hard regs that are at the
868 end. Registers of class NO_REGS cannot be used as an
869 equivalent for anything. */
870 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
871 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
872 && new >= FIRST_PSEUDO_REGISTER)
873 lastr = reg_prev_eqv[lastr];
874 reg_next_eqv[new] = reg_next_eqv[lastr];
875 if (reg_next_eqv[lastr] >= 0)
876 reg_prev_eqv[reg_next_eqv[lastr]] = new;
877 else
878 qty_last_reg[q] = new;
879 reg_next_eqv[lastr] = new;
880 reg_prev_eqv[new] = lastr;
884 /* Remove REG from its equivalence class. */
886 static void
887 delete_reg_equiv (reg)
888 register int reg;
890 register int q = reg_qty[reg];
891 register int p, n;
893 /* If invalid, do nothing. */
894 if (q == reg)
895 return;
897 p = reg_prev_eqv[reg];
898 n = reg_next_eqv[reg];
900 if (n != -1)
901 reg_prev_eqv[n] = p;
902 else
903 qty_last_reg[q] = p;
904 if (p != -1)
905 reg_next_eqv[p] = n;
906 else
907 qty_first_reg[q] = n;
909 reg_qty[reg] = reg;
912 /* Remove any invalid expressions from the hash table
913 that refer to any of the registers contained in expression X.
915 Make sure that newly inserted references to those registers
916 as subexpressions will be considered valid.
918 mention_regs is not called when a register itself
919 is being stored in the table.
921 Return 1 if we have done something that may have changed the hash code
922 of X. */
924 static int
925 mention_regs (x)
926 rtx x;
928 register enum rtx_code code;
929 register int i, j;
930 register char *fmt;
931 register int changed = 0;
933 if (x == 0)
934 return 0;
936 code = GET_CODE (x);
937 if (code == REG)
939 register int regno = REGNO (x);
940 register int endregno
941 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
942 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
943 int i;
945 for (i = regno; i < endregno; i++)
947 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
948 remove_invalid_refs (i);
950 reg_in_table[i] = reg_tick[i];
953 return 0;
956 /* If X is a comparison or a COMPARE and either operand is a register
957 that does not have a quantity, give it one. This is so that a later
958 call to record_jump_equiv won't cause X to be assigned a different
959 hash code and not found in the table after that call.
961 It is not necessary to do this here, since rehash_using_reg can
962 fix up the table later, but doing this here eliminates the need to
963 call that expensive function in the most common case where the only
964 use of the register is in the comparison. */
966 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
968 if (GET_CODE (XEXP (x, 0)) == REG
969 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
970 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
972 rehash_using_reg (XEXP (x, 0));
973 changed = 1;
976 if (GET_CODE (XEXP (x, 1)) == REG
977 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
978 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
980 rehash_using_reg (XEXP (x, 1));
981 changed = 1;
985 fmt = GET_RTX_FORMAT (code);
986 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
987 if (fmt[i] == 'e')
988 changed |= mention_regs (XEXP (x, i));
989 else if (fmt[i] == 'E')
990 for (j = 0; j < XVECLEN (x, i); j++)
991 changed |= mention_regs (XVECEXP (x, i, j));
993 return changed;
996 /* Update the register quantities for inserting X into the hash table
997 with a value equivalent to CLASSP.
998 (If the class does not contain a REG, it is irrelevant.)
999 If MODIFIED is nonzero, X is a destination; it is being modified.
1000 Note that delete_reg_equiv should be called on a register
1001 before insert_regs is done on that register with MODIFIED != 0.
1003 Nonzero value means that elements of reg_qty have changed
1004 so X's hash code may be different. */
1006 static int
1007 insert_regs (x, classp, modified)
1008 rtx x;
1009 struct table_elt *classp;
1010 int modified;
1012 if (GET_CODE (x) == REG)
1014 register int regno = REGNO (x);
1016 /* If REGNO is in the equivalence table already but is of the
1017 wrong mode for that equivalence, don't do anything here. */
1019 if (REGNO_QTY_VALID_P (regno)
1020 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1021 return 0;
1023 if (modified || ! REGNO_QTY_VALID_P (regno))
1025 if (classp)
1026 for (classp = classp->first_same_value;
1027 classp != 0;
1028 classp = classp->next_same_value)
1029 if (GET_CODE (classp->exp) == REG
1030 && GET_MODE (classp->exp) == GET_MODE (x))
1032 make_regs_eqv (regno, REGNO (classp->exp));
1033 return 1;
1036 make_new_qty (regno);
1037 qty_mode[reg_qty[regno]] = GET_MODE (x);
1038 return 1;
1041 return 0;
1044 /* If X is a SUBREG, we will likely be inserting the inner register in the
1045 table. If that register doesn't have an assigned quantity number at
1046 this point but does later, the insertion that we will be doing now will
1047 not be accessible because its hash code will have changed. So assign
1048 a quantity number now. */
1050 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1051 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1053 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1054 mention_regs (SUBREG_REG (x));
1055 return 1;
1057 else
1058 return mention_regs (x);
1061 /* Look in or update the hash table. */
1063 /* Put the element ELT on the list of free elements. */
1065 static void
1066 free_element (elt)
1067 struct table_elt *elt;
1069 elt->next_same_hash = free_element_chain;
1070 free_element_chain = elt;
1073 /* Return an element that is free for use. */
1075 static struct table_elt *
1076 get_element ()
1078 struct table_elt *elt = free_element_chain;
1079 if (elt)
1081 free_element_chain = elt->next_same_hash;
1082 return elt;
1084 n_elements_made++;
1085 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1088 /* Remove table element ELT from use in the table.
1089 HASH is its hash code, made using the HASH macro.
1090 It's an argument because often that is known in advance
1091 and we save much time not recomputing it. */
1093 static void
1094 remove_from_table (elt, hash)
1095 register struct table_elt *elt;
1096 unsigned hash;
1098 if (elt == 0)
1099 return;
1101 /* Mark this element as removed. See cse_insn. */
1102 elt->first_same_value = 0;
1104 /* Remove the table element from its equivalence class. */
1107 register struct table_elt *prev = elt->prev_same_value;
1108 register struct table_elt *next = elt->next_same_value;
1110 if (next) next->prev_same_value = prev;
1112 if (prev)
1113 prev->next_same_value = next;
1114 else
1116 register struct table_elt *newfirst = next;
1117 while (next)
1119 next->first_same_value = newfirst;
1120 next = next->next_same_value;
1125 /* Remove the table element from its hash bucket. */
1128 register struct table_elt *prev = elt->prev_same_hash;
1129 register struct table_elt *next = elt->next_same_hash;
1131 if (next) next->prev_same_hash = prev;
1133 if (prev)
1134 prev->next_same_hash = next;
1135 else if (table[hash] == elt)
1136 table[hash] = next;
1137 else
1139 /* This entry is not in the proper hash bucket. This can happen
1140 when two classes were merged by `merge_equiv_classes'. Search
1141 for the hash bucket that it heads. This happens only very
1142 rarely, so the cost is acceptable. */
1143 for (hash = 0; hash < NBUCKETS; hash++)
1144 if (table[hash] == elt)
1145 table[hash] = next;
1149 /* Remove the table element from its related-value circular chain. */
1151 if (elt->related_value != 0 && elt->related_value != elt)
1153 register struct table_elt *p = elt->related_value;
1154 while (p->related_value != elt)
1155 p = p->related_value;
1156 p->related_value = elt->related_value;
1157 if (p->related_value == p)
1158 p->related_value = 0;
1161 free_element (elt);
1164 /* Look up X in the hash table and return its table element,
1165 or 0 if X is not in the table.
1167 MODE is the machine-mode of X, or if X is an integer constant
1168 with VOIDmode then MODE is the mode with which X will be used.
1170 Here we are satisfied to find an expression whose tree structure
1171 looks like X. */
1173 static struct table_elt *
1174 lookup (x, hash, mode)
1175 rtx x;
1176 unsigned hash;
1177 enum machine_mode mode;
1179 register struct table_elt *p;
1181 for (p = table[hash]; p; p = p->next_same_hash)
1182 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1183 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1184 return p;
1186 return 0;
1189 /* Like `lookup' but don't care whether the table element uses invalid regs.
1190 Also ignore discrepancies in the machine mode of a register. */
1192 static struct table_elt *
1193 lookup_for_remove (x, hash, mode)
1194 rtx x;
1195 unsigned hash;
1196 enum machine_mode mode;
1198 register struct table_elt *p;
1200 if (GET_CODE (x) == REG)
1202 int regno = REGNO (x);
1203 /* Don't check the machine mode when comparing registers;
1204 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1205 for (p = table[hash]; p; p = p->next_same_hash)
1206 if (GET_CODE (p->exp) == REG
1207 && REGNO (p->exp) == regno)
1208 return p;
1210 else
1212 for (p = table[hash]; p; p = p->next_same_hash)
1213 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1214 return p;
1217 return 0;
1220 /* Look for an expression equivalent to X and with code CODE.
1221 If one is found, return that expression. */
1223 static rtx
1224 lookup_as_function (x, code)
1225 rtx x;
1226 enum rtx_code code;
1228 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1229 GET_MODE (x));
1230 if (p == 0)
1231 return 0;
1233 for (p = p->first_same_value; p; p = p->next_same_value)
1235 if (GET_CODE (p->exp) == code
1236 /* Make sure this is a valid entry in the table. */
1237 && exp_equiv_p (p->exp, p->exp, 1, 0))
1238 return p->exp;
1241 return 0;
1244 /* Insert X in the hash table, assuming HASH is its hash code
1245 and CLASSP is an element of the class it should go in
1246 (or 0 if a new class should be made).
1247 It is inserted at the proper position to keep the class in
1248 the order cheapest first.
1250 MODE is the machine-mode of X, or if X is an integer constant
1251 with VOIDmode then MODE is the mode with which X will be used.
1253 For elements of equal cheapness, the most recent one
1254 goes in front, except that the first element in the list
1255 remains first unless a cheaper element is added. The order of
1256 pseudo-registers does not matter, as canon_reg will be called to
1257 find the cheapest when a register is retrieved from the table.
1259 The in_memory field in the hash table element is set to 0.
1260 The caller must set it nonzero if appropriate.
1262 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1263 and if insert_regs returns a nonzero value
1264 you must then recompute its hash code before calling here.
1266 If necessary, update table showing constant values of quantities. */
1268 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1270 static struct table_elt *
1271 insert (x, classp, hash, mode)
1272 register rtx x;
1273 register struct table_elt *classp;
1274 unsigned hash;
1275 enum machine_mode mode;
1277 register struct table_elt *elt;
1279 /* If X is a register and we haven't made a quantity for it,
1280 something is wrong. */
1281 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1282 abort ();
1284 /* If X is a hard register, show it is being put in the table. */
1285 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1287 int regno = REGNO (x);
1288 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1289 int i;
1291 for (i = regno; i < endregno; i++)
1292 SET_HARD_REG_BIT (hard_regs_in_table, i);
1295 /* If X is a label, show we recorded it. */
1296 if (GET_CODE (x) == LABEL_REF
1297 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1298 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1299 recorded_label_ref = 1;
1301 /* Put an element for X into the right hash bucket. */
1303 elt = get_element ();
1304 elt->exp = x;
1305 elt->cost = COST (x);
1306 elt->next_same_value = 0;
1307 elt->prev_same_value = 0;
1308 elt->next_same_hash = table[hash];
1309 elt->prev_same_hash = 0;
1310 elt->related_value = 0;
1311 elt->in_memory = 0;
1312 elt->mode = mode;
1313 elt->is_const = (CONSTANT_P (x)
1314 /* GNU C++ takes advantage of this for `this'
1315 (and other const values). */
1316 || (RTX_UNCHANGING_P (x)
1317 && GET_CODE (x) == REG
1318 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1319 || FIXED_BASE_PLUS_P (x));
1321 if (table[hash])
1322 table[hash]->prev_same_hash = elt;
1323 table[hash] = elt;
1325 /* Put it into the proper value-class. */
1326 if (classp)
1328 classp = classp->first_same_value;
1329 if (CHEAPER (elt, classp))
1330 /* Insert at the head of the class */
1332 register struct table_elt *p;
1333 elt->next_same_value = classp;
1334 classp->prev_same_value = elt;
1335 elt->first_same_value = elt;
1337 for (p = classp; p; p = p->next_same_value)
1338 p->first_same_value = elt;
1340 else
1342 /* Insert not at head of the class. */
1343 /* Put it after the last element cheaper than X. */
1344 register struct table_elt *p, *next;
1345 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1346 p = next);
1347 /* Put it after P and before NEXT. */
1348 elt->next_same_value = next;
1349 if (next)
1350 next->prev_same_value = elt;
1351 elt->prev_same_value = p;
1352 p->next_same_value = elt;
1353 elt->first_same_value = classp;
1356 else
1357 elt->first_same_value = elt;
1359 /* If this is a constant being set equivalent to a register or a register
1360 being set equivalent to a constant, note the constant equivalence.
1362 If this is a constant, it cannot be equivalent to a different constant,
1363 and a constant is the only thing that can be cheaper than a register. So
1364 we know the register is the head of the class (before the constant was
1365 inserted).
1367 If this is a register that is not already known equivalent to a
1368 constant, we must check the entire class.
1370 If this is a register that is already known equivalent to an insn,
1371 update `qty_const_insn' to show that `this_insn' is the latest
1372 insn making that quantity equivalent to the constant. */
1374 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1375 && GET_CODE (x) != REG)
1377 qty_const[reg_qty[REGNO (classp->exp)]]
1378 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1379 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1382 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1383 && ! elt->is_const)
1385 register struct table_elt *p;
1387 for (p = classp; p != 0; p = p->next_same_value)
1389 if (p->is_const && GET_CODE (p->exp) != REG)
1391 qty_const[reg_qty[REGNO (x)]]
1392 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1393 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1394 break;
1399 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1400 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1401 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1403 /* If this is a constant with symbolic value,
1404 and it has a term with an explicit integer value,
1405 link it up with related expressions. */
1406 if (GET_CODE (x) == CONST)
1408 rtx subexp = get_related_value (x);
1409 unsigned subhash;
1410 struct table_elt *subelt, *subelt_prev;
1412 if (subexp != 0)
1414 /* Get the integer-free subexpression in the hash table. */
1415 subhash = safe_hash (subexp, mode) % NBUCKETS;
1416 subelt = lookup (subexp, subhash, mode);
1417 if (subelt == 0)
1418 subelt = insert (subexp, NULL_PTR, subhash, mode);
1419 /* Initialize SUBELT's circular chain if it has none. */
1420 if (subelt->related_value == 0)
1421 subelt->related_value = subelt;
1422 /* Find the element in the circular chain that precedes SUBELT. */
1423 subelt_prev = subelt;
1424 while (subelt_prev->related_value != subelt)
1425 subelt_prev = subelt_prev->related_value;
1426 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1427 This way the element that follows SUBELT is the oldest one. */
1428 elt->related_value = subelt_prev->related_value;
1429 subelt_prev->related_value = elt;
1433 return elt;
1436 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1437 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1438 the two classes equivalent.
1440 CLASS1 will be the surviving class; CLASS2 should not be used after this
1441 call.
1443 Any invalid entries in CLASS2 will not be copied. */
1445 static void
1446 merge_equiv_classes (class1, class2)
1447 struct table_elt *class1, *class2;
1449 struct table_elt *elt, *next, *new;
1451 /* Ensure we start with the head of the classes. */
1452 class1 = class1->first_same_value;
1453 class2 = class2->first_same_value;
1455 /* If they were already equal, forget it. */
1456 if (class1 == class2)
1457 return;
1459 for (elt = class2; elt; elt = next)
1461 unsigned hash;
1462 rtx exp = elt->exp;
1463 enum machine_mode mode = elt->mode;
1465 next = elt->next_same_value;
1467 /* Remove old entry, make a new one in CLASS1's class.
1468 Don't do this for invalid entries as we cannot find their
1469 hash code (it also isn't necessary). */
1470 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1472 hash_arg_in_memory = 0;
1473 hash_arg_in_struct = 0;
1474 hash = HASH (exp, mode);
1476 if (GET_CODE (exp) == REG)
1477 delete_reg_equiv (REGNO (exp));
1479 remove_from_table (elt, hash);
1481 if (insert_regs (exp, class1, 0))
1483 rehash_using_reg (exp);
1484 hash = HASH (exp, mode);
1486 new = insert (exp, class1, hash, mode);
1487 new->in_memory = hash_arg_in_memory;
1488 new->in_struct = hash_arg_in_struct;
1493 /* Remove from the hash table, or mark as invalid,
1494 all expressions whose values could be altered by storing in X.
1495 X is a register, a subreg, or a memory reference with nonvarying address
1496 (because, when a memory reference with a varying address is stored in,
1497 all memory references are removed by invalidate_memory
1498 so specific invalidation is superfluous).
1499 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1500 instead of just the amount indicated by the mode of X. This is only used
1501 for bitfield stores into memory.
1503 A nonvarying address may be just a register or just
1504 a symbol reference, or it may be either of those plus
1505 a numeric offset. */
1507 static void
1508 invalidate (x, full_mode)
1509 rtx x;
1510 enum machine_mode full_mode;
1512 register int i;
1513 register struct table_elt *p;
1514 rtx base;
1515 HOST_WIDE_INT start, end;
1517 /* If X is a register, dependencies on its contents
1518 are recorded through the qty number mechanism.
1519 Just change the qty number of the register,
1520 mark it as invalid for expressions that refer to it,
1521 and remove it itself. */
1523 if (GET_CODE (x) == REG)
1525 register int regno = REGNO (x);
1526 register unsigned hash = HASH (x, GET_MODE (x));
1528 /* Remove REGNO from any quantity list it might be on and indicate
1529 that it's value might have changed. If it is a pseudo, remove its
1530 entry from the hash table.
1532 For a hard register, we do the first two actions above for any
1533 additional hard registers corresponding to X. Then, if any of these
1534 registers are in the table, we must remove any REG entries that
1535 overlap these registers. */
1537 delete_reg_equiv (regno);
1538 reg_tick[regno]++;
1540 if (regno >= FIRST_PSEUDO_REGISTER)
1542 /* Because a register can be referenced in more than one mode,
1543 we might have to remove more than one table entry. */
1545 struct table_elt *elt;
1547 while (elt = lookup_for_remove (x, hash, GET_MODE (x)))
1548 remove_from_table (elt, hash);
1550 else
1552 HOST_WIDE_INT in_table
1553 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1554 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1555 int tregno, tendregno;
1556 register struct table_elt *p, *next;
1558 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1560 for (i = regno + 1; i < endregno; i++)
1562 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1563 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1564 delete_reg_equiv (i);
1565 reg_tick[i]++;
1568 if (in_table)
1569 for (hash = 0; hash < NBUCKETS; hash++)
1570 for (p = table[hash]; p; p = next)
1572 next = p->next_same_hash;
1574 if (GET_CODE (p->exp) != REG
1575 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1576 continue;
1578 tregno = REGNO (p->exp);
1579 tendregno
1580 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1581 if (tendregno > regno && tregno < endregno)
1582 remove_from_table (p, hash);
1586 return;
1589 if (GET_CODE (x) == SUBREG)
1591 if (GET_CODE (SUBREG_REG (x)) != REG)
1592 abort ();
1593 invalidate (SUBREG_REG (x), VOIDmode);
1594 return;
1597 /* X is not a register; it must be a memory reference with
1598 a nonvarying address. Remove all hash table elements
1599 that refer to overlapping pieces of memory. */
1601 if (GET_CODE (x) != MEM)
1602 abort ();
1604 if (full_mode == VOIDmode)
1605 full_mode = GET_MODE (x);
1607 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (full_mode),
1608 &base, &start, &end);
1610 for (i = 0; i < NBUCKETS; i++)
1612 register struct table_elt *next;
1613 for (p = table[i]; p; p = next)
1615 next = p->next_same_hash;
1616 if (refers_to_mem_p (p->exp, base, start, end))
1617 remove_from_table (p, i);
1622 /* Remove all expressions that refer to register REGNO,
1623 since they are already invalid, and we are about to
1624 mark that register valid again and don't want the old
1625 expressions to reappear as valid. */
1627 static void
1628 remove_invalid_refs (regno)
1629 int regno;
1631 register int i;
1632 register struct table_elt *p, *next;
1634 for (i = 0; i < NBUCKETS; i++)
1635 for (p = table[i]; p; p = next)
1637 next = p->next_same_hash;
1638 if (GET_CODE (p->exp) != REG
1639 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1640 remove_from_table (p, i);
1644 /* Recompute the hash codes of any valid entries in the hash table that
1645 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1647 This is called when we make a jump equivalence. */
1649 static void
1650 rehash_using_reg (x)
1651 rtx x;
1653 int i;
1654 struct table_elt *p, *next;
1655 unsigned hash;
1657 if (GET_CODE (x) == SUBREG)
1658 x = SUBREG_REG (x);
1660 /* If X is not a register or if the register is known not to be in any
1661 valid entries in the table, we have no work to do. */
1663 if (GET_CODE (x) != REG
1664 || reg_in_table[REGNO (x)] < 0
1665 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1666 return;
1668 /* Scan all hash chains looking for valid entries that mention X.
1669 If we find one and it is in the wrong hash chain, move it. We can skip
1670 objects that are registers, since they are handled specially. */
1672 for (i = 0; i < NBUCKETS; i++)
1673 for (p = table[i]; p; p = next)
1675 next = p->next_same_hash;
1676 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1677 && exp_equiv_p (p->exp, p->exp, 1, 0)
1678 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1680 if (p->next_same_hash)
1681 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1683 if (p->prev_same_hash)
1684 p->prev_same_hash->next_same_hash = p->next_same_hash;
1685 else
1686 table[i] = p->next_same_hash;
1688 p->next_same_hash = table[hash];
1689 p->prev_same_hash = 0;
1690 if (table[hash])
1691 table[hash]->prev_same_hash = p;
1692 table[hash] = p;
1697 /* Remove from the hash table all expressions that reference memory,
1698 or some of them as specified by *WRITES. */
1700 static void
1701 invalidate_memory (writes)
1702 struct write_data *writes;
1704 register int i;
1705 register struct table_elt *p, *next;
1706 int all = writes->all;
1707 int nonscalar = writes->nonscalar;
1709 for (i = 0; i < NBUCKETS; i++)
1710 for (p = table[i]; p; p = next)
1712 next = p->next_same_hash;
1713 if (p->in_memory
1714 && (all
1715 || (nonscalar && p->in_struct)
1716 || cse_rtx_addr_varies_p (p->exp)))
1717 remove_from_table (p, i);
1721 /* Remove from the hash table any expression that is a call-clobbered
1722 register. Also update their TICK values. */
1724 static void
1725 invalidate_for_call ()
1727 int regno, endregno;
1728 int i;
1729 unsigned hash;
1730 struct table_elt *p, *next;
1731 int in_table = 0;
1733 /* Go through all the hard registers. For each that is clobbered in
1734 a CALL_INSN, remove the register from quantity chains and update
1735 reg_tick if defined. Also see if any of these registers is currently
1736 in the table. */
1738 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1739 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1741 delete_reg_equiv (regno);
1742 if (reg_tick[regno] >= 0)
1743 reg_tick[regno]++;
1745 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1748 /* In the case where we have no call-clobbered hard registers in the
1749 table, we are done. Otherwise, scan the table and remove any
1750 entry that overlaps a call-clobbered register. */
1752 if (in_table)
1753 for (hash = 0; hash < NBUCKETS; hash++)
1754 for (p = table[hash]; p; p = next)
1756 next = p->next_same_hash;
1758 if (GET_CODE (p->exp) != REG
1759 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1760 continue;
1762 regno = REGNO (p->exp);
1763 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1765 for (i = regno; i < endregno; i++)
1766 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1768 remove_from_table (p, hash);
1769 break;
1774 /* Given an expression X of type CONST,
1775 and ELT which is its table entry (or 0 if it
1776 is not in the hash table),
1777 return an alternate expression for X as a register plus integer.
1778 If none can be found, return 0. */
1780 static rtx
1781 use_related_value (x, elt)
1782 rtx x;
1783 struct table_elt *elt;
1785 register struct table_elt *relt = 0;
1786 register struct table_elt *p, *q;
1787 HOST_WIDE_INT offset;
1789 /* First, is there anything related known?
1790 If we have a table element, we can tell from that.
1791 Otherwise, must look it up. */
1793 if (elt != 0 && elt->related_value != 0)
1794 relt = elt;
1795 else if (elt == 0 && GET_CODE (x) == CONST)
1797 rtx subexp = get_related_value (x);
1798 if (subexp != 0)
1799 relt = lookup (subexp,
1800 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1801 GET_MODE (subexp));
1804 if (relt == 0)
1805 return 0;
1807 /* Search all related table entries for one that has an
1808 equivalent register. */
1810 p = relt;
1811 while (1)
1813 /* This loop is strange in that it is executed in two different cases.
1814 The first is when X is already in the table. Then it is searching
1815 the RELATED_VALUE list of X's class (RELT). The second case is when
1816 X is not in the table. Then RELT points to a class for the related
1817 value.
1819 Ensure that, whatever case we are in, that we ignore classes that have
1820 the same value as X. */
1822 if (rtx_equal_p (x, p->exp))
1823 q = 0;
1824 else
1825 for (q = p->first_same_value; q; q = q->next_same_value)
1826 if (GET_CODE (q->exp) == REG)
1827 break;
1829 if (q)
1830 break;
1832 p = p->related_value;
1834 /* We went all the way around, so there is nothing to be found.
1835 Alternatively, perhaps RELT was in the table for some other reason
1836 and it has no related values recorded. */
1837 if (p == relt || p == 0)
1838 break;
1841 if (q == 0)
1842 return 0;
1844 offset = (get_integer_term (x) - get_integer_term (p->exp));
1845 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1846 return plus_constant (q->exp, offset);
1849 /* Hash an rtx. We are careful to make sure the value is never negative.
1850 Equivalent registers hash identically.
1851 MODE is used in hashing for CONST_INTs only;
1852 otherwise the mode of X is used.
1854 Store 1 in do_not_record if any subexpression is volatile.
1856 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1857 which does not have the RTX_UNCHANGING_P bit set.
1858 In this case, also store 1 in hash_arg_in_struct
1859 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1861 Note that cse_insn knows that the hash code of a MEM expression
1862 is just (int) MEM plus the hash code of the address. */
1864 static unsigned
1865 canon_hash (x, mode)
1866 rtx x;
1867 enum machine_mode mode;
1869 register int i, j;
1870 register unsigned hash = 0;
1871 register enum rtx_code code;
1872 register char *fmt;
1874 /* repeat is used to turn tail-recursion into iteration. */
1875 repeat:
1876 if (x == 0)
1877 return hash;
1879 code = GET_CODE (x);
1880 switch (code)
1882 case REG:
1884 register int regno = REGNO (x);
1886 /* On some machines, we can't record any non-fixed hard register,
1887 because extending its life will cause reload problems. We
1888 consider ap, fp, and sp to be fixed for this purpose.
1889 On all machines, we can't record any global registers. */
1891 if (regno < FIRST_PSEUDO_REGISTER
1892 && (global_regs[regno]
1893 #ifdef SMALL_REGISTER_CLASSES
1894 || (! fixed_regs[regno]
1895 && regno != FRAME_POINTER_REGNUM
1896 && regno != HARD_FRAME_POINTER_REGNUM
1897 && regno != ARG_POINTER_REGNUM
1898 && regno != STACK_POINTER_REGNUM)
1899 #endif
1902 do_not_record = 1;
1903 return 0;
1905 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1906 return hash;
1909 case CONST_INT:
1911 unsigned HOST_WIDE_INT tem = INTVAL (x);
1912 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1913 return hash;
1916 case CONST_DOUBLE:
1917 /* This is like the general case, except that it only counts
1918 the integers representing the constant. */
1919 hash += (unsigned) code + (unsigned) GET_MODE (x);
1920 if (GET_MODE (x) != VOIDmode)
1921 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1923 unsigned tem = XINT (x, i);
1924 hash += tem;
1926 else
1927 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1928 + (unsigned) CONST_DOUBLE_HIGH (x));
1929 return hash;
1931 /* Assume there is only one rtx object for any given label. */
1932 case LABEL_REF:
1933 hash
1934 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1935 return hash;
1937 case SYMBOL_REF:
1938 hash
1939 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1940 return hash;
1942 case MEM:
1943 if (MEM_VOLATILE_P (x))
1945 do_not_record = 1;
1946 return 0;
1948 if (! RTX_UNCHANGING_P (x))
1950 hash_arg_in_memory = 1;
1951 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1953 /* Now that we have already found this special case,
1954 might as well speed it up as much as possible. */
1955 hash += (unsigned) MEM;
1956 x = XEXP (x, 0);
1957 goto repeat;
1959 case PRE_DEC:
1960 case PRE_INC:
1961 case POST_DEC:
1962 case POST_INC:
1963 case PC:
1964 case CC0:
1965 case CALL:
1966 case UNSPEC_VOLATILE:
1967 do_not_record = 1;
1968 return 0;
1970 case ASM_OPERANDS:
1971 if (MEM_VOLATILE_P (x))
1973 do_not_record = 1;
1974 return 0;
1978 i = GET_RTX_LENGTH (code) - 1;
1979 hash += (unsigned) code + (unsigned) GET_MODE (x);
1980 fmt = GET_RTX_FORMAT (code);
1981 for (; i >= 0; i--)
1983 if (fmt[i] == 'e')
1985 rtx tem = XEXP (x, i);
1987 /* If we are about to do the last recursive call
1988 needed at this level, change it into iteration.
1989 This function is called enough to be worth it. */
1990 if (i == 0)
1992 x = tem;
1993 goto repeat;
1995 hash += canon_hash (tem, 0);
1997 else if (fmt[i] == 'E')
1998 for (j = 0; j < XVECLEN (x, i); j++)
1999 hash += canon_hash (XVECEXP (x, i, j), 0);
2000 else if (fmt[i] == 's')
2002 register unsigned char *p = (unsigned char *) XSTR (x, i);
2003 if (p)
2004 while (*p)
2005 hash += *p++;
2007 else if (fmt[i] == 'i')
2009 register unsigned tem = XINT (x, i);
2010 hash += tem;
2012 else
2013 abort ();
2015 return hash;
2018 /* Like canon_hash but with no side effects. */
2020 static unsigned
2021 safe_hash (x, mode)
2022 rtx x;
2023 enum machine_mode mode;
2025 int save_do_not_record = do_not_record;
2026 int save_hash_arg_in_memory = hash_arg_in_memory;
2027 int save_hash_arg_in_struct = hash_arg_in_struct;
2028 unsigned hash = canon_hash (x, mode);
2029 hash_arg_in_memory = save_hash_arg_in_memory;
2030 hash_arg_in_struct = save_hash_arg_in_struct;
2031 do_not_record = save_do_not_record;
2032 return hash;
2035 /* Return 1 iff X and Y would canonicalize into the same thing,
2036 without actually constructing the canonicalization of either one.
2037 If VALIDATE is nonzero,
2038 we assume X is an expression being processed from the rtl
2039 and Y was found in the hash table. We check register refs
2040 in Y for being marked as valid.
2042 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2043 that is known to be in the register. Ordinarily, we don't allow them
2044 to match, because letting them match would cause unpredictable results
2045 in all the places that search a hash table chain for an equivalent
2046 for a given value. A possible equivalent that has different structure
2047 has its hash code computed from different data. Whether the hash code
2048 is the same as that of the the given value is pure luck. */
2050 static int
2051 exp_equiv_p (x, y, validate, equal_values)
2052 rtx x, y;
2053 int validate;
2054 int equal_values;
2056 register int i, j;
2057 register enum rtx_code code;
2058 register char *fmt;
2060 /* Note: it is incorrect to assume an expression is equivalent to itself
2061 if VALIDATE is nonzero. */
2062 if (x == y && !validate)
2063 return 1;
2064 if (x == 0 || y == 0)
2065 return x == y;
2067 code = GET_CODE (x);
2068 if (code != GET_CODE (y))
2070 if (!equal_values)
2071 return 0;
2073 /* If X is a constant and Y is a register or vice versa, they may be
2074 equivalent. We only have to validate if Y is a register. */
2075 if (CONSTANT_P (x) && GET_CODE (y) == REG
2076 && REGNO_QTY_VALID_P (REGNO (y))
2077 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2078 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2079 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2080 return 1;
2082 if (CONSTANT_P (y) && code == REG
2083 && REGNO_QTY_VALID_P (REGNO (x))
2084 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2085 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2086 return 1;
2088 return 0;
2091 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2092 if (GET_MODE (x) != GET_MODE (y))
2093 return 0;
2095 switch (code)
2097 case PC:
2098 case CC0:
2099 return x == y;
2101 case CONST_INT:
2102 return INTVAL (x) == INTVAL (y);
2104 case LABEL_REF:
2105 return XEXP (x, 0) == XEXP (y, 0);
2107 case SYMBOL_REF:
2108 return XSTR (x, 0) == XSTR (y, 0);
2110 case REG:
2112 int regno = REGNO (y);
2113 int endregno
2114 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2115 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2116 int i;
2118 /* If the quantities are not the same, the expressions are not
2119 equivalent. If there are and we are not to validate, they
2120 are equivalent. Otherwise, ensure all regs are up-to-date. */
2122 if (reg_qty[REGNO (x)] != reg_qty[regno])
2123 return 0;
2125 if (! validate)
2126 return 1;
2128 for (i = regno; i < endregno; i++)
2129 if (reg_in_table[i] != reg_tick[i])
2130 return 0;
2132 return 1;
2135 /* For commutative operations, check both orders. */
2136 case PLUS:
2137 case MULT:
2138 case AND:
2139 case IOR:
2140 case XOR:
2141 case NE:
2142 case EQ:
2143 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2144 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2145 validate, equal_values))
2146 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2147 validate, equal_values)
2148 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2149 validate, equal_values)));
2152 /* Compare the elements. If any pair of corresponding elements
2153 fail to match, return 0 for the whole things. */
2155 fmt = GET_RTX_FORMAT (code);
2156 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2158 switch (fmt[i])
2160 case 'e':
2161 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2162 return 0;
2163 break;
2165 case 'E':
2166 if (XVECLEN (x, i) != XVECLEN (y, i))
2167 return 0;
2168 for (j = 0; j < XVECLEN (x, i); j++)
2169 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2170 validate, equal_values))
2171 return 0;
2172 break;
2174 case 's':
2175 if (strcmp (XSTR (x, i), XSTR (y, i)))
2176 return 0;
2177 break;
2179 case 'i':
2180 if (XINT (x, i) != XINT (y, i))
2181 return 0;
2182 break;
2184 case 'w':
2185 if (XWINT (x, i) != XWINT (y, i))
2186 return 0;
2187 break;
2189 case '0':
2190 break;
2192 default:
2193 abort ();
2197 return 1;
2200 /* Return 1 iff any subexpression of X matches Y.
2201 Here we do not require that X or Y be valid (for registers referred to)
2202 for being in the hash table. */
2204 static int
2205 refers_to_p (x, y)
2206 rtx x, y;
2208 register int i;
2209 register enum rtx_code code;
2210 register char *fmt;
2212 repeat:
2213 if (x == y)
2214 return 1;
2215 if (x == 0 || y == 0)
2216 return 0;
2218 code = GET_CODE (x);
2219 /* If X as a whole has the same code as Y, they may match.
2220 If so, return 1. */
2221 if (code == GET_CODE (y))
2223 if (exp_equiv_p (x, y, 0, 1))
2224 return 1;
2227 /* X does not match, so try its subexpressions. */
2229 fmt = GET_RTX_FORMAT (code);
2230 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2231 if (fmt[i] == 'e')
2233 if (i == 0)
2235 x = XEXP (x, 0);
2236 goto repeat;
2238 else
2239 if (refers_to_p (XEXP (x, i), y))
2240 return 1;
2242 else if (fmt[i] == 'E')
2244 int j;
2245 for (j = 0; j < XVECLEN (x, i); j++)
2246 if (refers_to_p (XVECEXP (x, i, j), y))
2247 return 1;
2250 return 0;
2253 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2254 set PBASE, PSTART, and PEND which correspond to the base of the address,
2255 the starting offset, and ending offset respectively.
2257 ADDR is known to be a nonvarying address. */
2259 /* ??? Despite what the comments say, this function is in fact frequently
2260 passed varying addresses. This does not appear to cause any problems. */
2262 static void
2263 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2264 rtx addr;
2265 int size;
2266 rtx *pbase;
2267 HOST_WIDE_INT *pstart, *pend;
2269 rtx base;
2270 HOST_WIDE_INT start, end;
2272 base = addr;
2273 start = 0;
2274 end = 0;
2276 /* Registers with nonvarying addresses usually have constant equivalents;
2277 but the frame pointer register is also possible. */
2278 if (GET_CODE (base) == REG
2279 && qty_const != 0
2280 && REGNO_QTY_VALID_P (REGNO (base))
2281 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2282 && qty_const[reg_qty[REGNO (base)]] != 0)
2283 base = qty_const[reg_qty[REGNO (base)]];
2284 else if (GET_CODE (base) == PLUS
2285 && GET_CODE (XEXP (base, 1)) == CONST_INT
2286 && GET_CODE (XEXP (base, 0)) == REG
2287 && qty_const != 0
2288 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2289 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2290 == GET_MODE (XEXP (base, 0)))
2291 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2293 start = INTVAL (XEXP (base, 1));
2294 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2296 /* This can happen as the result of virtual register instantiation,
2297 if the initial offset is too large to be a valid address. */
2298 else if (GET_CODE (base) == PLUS
2299 && GET_CODE (XEXP (base, 0)) == REG
2300 && GET_CODE (XEXP (base, 1)) == REG
2301 && qty_const != 0
2302 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2303 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2304 == GET_MODE (XEXP (base, 0)))
2305 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2306 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2307 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2308 == GET_MODE (XEXP (base, 1)))
2309 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2311 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2312 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2314 /* One of the two values must be a constant. */
2315 if (GET_CODE (base) != CONST_INT)
2317 if (GET_CODE (tem) != CONST_INT)
2318 abort ();
2319 start = INTVAL (tem);
2321 else
2323 start = INTVAL (base);
2324 base = tem;
2328 /* Handle everything that we can find inside an address that has been
2329 viewed as constant. */
2331 while (1)
2333 /* If no part of this switch does a "continue", the code outside
2334 will exit this loop. */
2336 switch (GET_CODE (base))
2338 case LO_SUM:
2339 /* By definition, operand1 of a LO_SUM is the associated constant
2340 address. Use the associated constant address as the base
2341 instead. */
2342 base = XEXP (base, 1);
2343 continue;
2345 case CONST:
2346 /* Strip off CONST. */
2347 base = XEXP (base, 0);
2348 continue;
2350 case PLUS:
2351 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2353 start += INTVAL (XEXP (base, 1));
2354 base = XEXP (base, 0);
2355 continue;
2357 break;
2359 case AND:
2360 /* Handle the case of an AND which is the negative of a power of
2361 two. This is used to represent unaligned memory operations. */
2362 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2363 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2365 set_nonvarying_address_components (XEXP (base, 0), size,
2366 pbase, pstart, pend);
2368 /* Assume the worst misalignment. START is affected, but not
2369 END, so compensate but adjusting SIZE. Don't lose any
2370 constant we already had. */
2372 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2373 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2374 end += *pend;
2375 base = *pbase;
2377 break;
2380 break;
2383 if (GET_CODE (base) == CONST_INT)
2385 start += INTVAL (base);
2386 base = const0_rtx;
2389 end = start + size;
2391 /* Set the return values. */
2392 *pbase = base;
2393 *pstart = start;
2394 *pend = end;
2397 /* Return 1 iff any subexpression of X refers to memory
2398 at an address of BASE plus some offset
2399 such that any of the bytes' offsets fall between START (inclusive)
2400 and END (exclusive).
2402 The value is undefined if X is a varying address (as determined by
2403 cse_rtx_addr_varies_p). This function is not used in such cases.
2405 When used in the cse pass, `qty_const' is nonzero, and it is used
2406 to treat an address that is a register with a known constant value
2407 as if it were that constant value.
2408 In the loop pass, `qty_const' is zero, so this is not done. */
2410 static int
2411 refers_to_mem_p (x, base, start, end)
2412 rtx x, base;
2413 HOST_WIDE_INT start, end;
2415 register HOST_WIDE_INT i;
2416 register enum rtx_code code;
2417 register char *fmt;
2419 repeat:
2420 if (x == 0)
2421 return 0;
2423 code = GET_CODE (x);
2424 if (code == MEM)
2426 register rtx addr = XEXP (x, 0); /* Get the address. */
2427 rtx mybase;
2428 HOST_WIDE_INT mystart, myend;
2430 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2431 &mybase, &mystart, &myend);
2434 /* refers_to_mem_p is never called with varying addresses.
2435 If the base addresses are not equal, there is no chance
2436 of the memory addresses conflicting. */
2437 if (! rtx_equal_p (mybase, base))
2438 return 0;
2440 return myend > start && mystart < end;
2443 /* X does not match, so try its subexpressions. */
2445 fmt = GET_RTX_FORMAT (code);
2446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2447 if (fmt[i] == 'e')
2449 if (i == 0)
2451 x = XEXP (x, 0);
2452 goto repeat;
2454 else
2455 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2456 return 1;
2458 else if (fmt[i] == 'E')
2460 int j;
2461 for (j = 0; j < XVECLEN (x, i); j++)
2462 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2463 return 1;
2466 return 0;
2469 /* Nonzero if X refers to memory at a varying address;
2470 except that a register which has at the moment a known constant value
2471 isn't considered variable. */
2473 static int
2474 cse_rtx_addr_varies_p (x)
2475 rtx x;
2477 /* We need not check for X and the equivalence class being of the same
2478 mode because if X is equivalent to a constant in some mode, it
2479 doesn't vary in any mode. */
2481 if (GET_CODE (x) == MEM
2482 && GET_CODE (XEXP (x, 0)) == REG
2483 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2484 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2485 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2486 return 0;
2488 if (GET_CODE (x) == MEM
2489 && GET_CODE (XEXP (x, 0)) == PLUS
2490 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2491 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2492 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2493 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2494 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2495 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2496 return 0;
2498 /* This can happen as the result of virtual register instantiation, if
2499 the initial constant is too large to be a valid address. This gives
2500 us a three instruction sequence, load large offset into a register,
2501 load fp minus a constant into a register, then a MEM which is the
2502 sum of the two `constant' registers. */
2503 if (GET_CODE (x) == MEM
2504 && GET_CODE (XEXP (x, 0)) == PLUS
2505 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2506 && GET_CODE (XEXP (XEXP (x, 0), 1)) == REG
2507 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2508 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2509 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2510 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]]
2511 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 1)))
2512 && (GET_MODE (XEXP (XEXP (x, 0), 1))
2513 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 1))]])
2514 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 1))]])
2515 return 0;
2517 return rtx_addr_varies_p (x);
2520 /* Canonicalize an expression:
2521 replace each register reference inside it
2522 with the "oldest" equivalent register.
2524 If INSN is non-zero and we are replacing a pseudo with a hard register
2525 or vice versa, validate_change is used to ensure that INSN remains valid
2526 after we make our substitution. The calls are made with IN_GROUP non-zero
2527 so apply_change_group must be called upon the outermost return from this
2528 function (unless INSN is zero). The result of apply_change_group can
2529 generally be discarded since the changes we are making are optional. */
2531 static rtx
2532 canon_reg (x, insn)
2533 rtx x;
2534 rtx insn;
2536 register int i;
2537 register enum rtx_code code;
2538 register char *fmt;
2540 if (x == 0)
2541 return x;
2543 code = GET_CODE (x);
2544 switch (code)
2546 case PC:
2547 case CC0:
2548 case CONST:
2549 case CONST_INT:
2550 case CONST_DOUBLE:
2551 case SYMBOL_REF:
2552 case LABEL_REF:
2553 case ADDR_VEC:
2554 case ADDR_DIFF_VEC:
2555 return x;
2557 case REG:
2559 register int first;
2561 /* Never replace a hard reg, because hard regs can appear
2562 in more than one machine mode, and we must preserve the mode
2563 of each occurrence. Also, some hard regs appear in
2564 MEMs that are shared and mustn't be altered. Don't try to
2565 replace any reg that maps to a reg of class NO_REGS. */
2566 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2567 || ! REGNO_QTY_VALID_P (REGNO (x)))
2568 return x;
2570 first = qty_first_reg[reg_qty[REGNO (x)]];
2571 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2572 : REGNO_REG_CLASS (first) == NO_REGS ? x
2573 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2577 fmt = GET_RTX_FORMAT (code);
2578 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2580 register int j;
2582 if (fmt[i] == 'e')
2584 rtx new = canon_reg (XEXP (x, i), insn);
2586 /* If replacing pseudo with hard reg or vice versa, ensure the
2587 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2588 if (insn != 0 && new != 0
2589 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2590 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2591 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2592 || insn_n_dups[recog_memoized (insn)] > 0))
2593 validate_change (insn, &XEXP (x, i), new, 1);
2594 else
2595 XEXP (x, i) = new;
2597 else if (fmt[i] == 'E')
2598 for (j = 0; j < XVECLEN (x, i); j++)
2599 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2602 return x;
2605 /* LOC is a location with INSN that is an operand address (the contents of
2606 a MEM). Find the best equivalent address to use that is valid for this
2607 insn.
2609 On most CISC machines, complicated address modes are costly, and rtx_cost
2610 is a good approximation for that cost. However, most RISC machines have
2611 only a few (usually only one) memory reference formats. If an address is
2612 valid at all, it is often just as cheap as any other address. Hence, for
2613 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2614 costs of various addresses. For two addresses of equal cost, choose the one
2615 with the highest `rtx_cost' value as that has the potential of eliminating
2616 the most insns. For equal costs, we choose the first in the equivalence
2617 class. Note that we ignore the fact that pseudo registers are cheaper
2618 than hard registers here because we would also prefer the pseudo registers.
2621 static void
2622 find_best_addr (insn, loc)
2623 rtx insn;
2624 rtx *loc;
2626 struct table_elt *elt, *p;
2627 rtx addr = *loc;
2628 int our_cost;
2629 int found_better = 1;
2630 int save_do_not_record = do_not_record;
2631 int save_hash_arg_in_memory = hash_arg_in_memory;
2632 int save_hash_arg_in_struct = hash_arg_in_struct;
2633 int addr_volatile;
2634 int regno;
2635 unsigned hash;
2637 /* Do not try to replace constant addresses or addresses of local and
2638 argument slots. These MEM expressions are made only once and inserted
2639 in many instructions, as well as being used to control symbol table
2640 output. It is not safe to clobber them.
2642 There are some uncommon cases where the address is already in a register
2643 for some reason, but we cannot take advantage of that because we have
2644 no easy way to unshare the MEM. In addition, looking up all stack
2645 addresses is costly. */
2646 if ((GET_CODE (addr) == PLUS
2647 && GET_CODE (XEXP (addr, 0)) == REG
2648 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2649 && (regno = REGNO (XEXP (addr, 0)),
2650 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2651 || regno == ARG_POINTER_REGNUM))
2652 || (GET_CODE (addr) == REG
2653 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2654 || regno == HARD_FRAME_POINTER_REGNUM
2655 || regno == ARG_POINTER_REGNUM))
2656 || CONSTANT_ADDRESS_P (addr))
2657 return;
2659 /* If this address is not simply a register, try to fold it. This will
2660 sometimes simplify the expression. Many simplifications
2661 will not be valid, but some, usually applying the associative rule, will
2662 be valid and produce better code. */
2663 if (GET_CODE (addr) != REG)
2665 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2667 if (1
2668 #ifdef ADDRESS_COST
2669 && (ADDRESS_COST (folded) < ADDRESS_COST (addr)
2670 || (ADDRESS_COST (folded) == ADDRESS_COST (addr)
2671 && rtx_cost (folded) > rtx_cost (addr)))
2672 #else
2673 && rtx_cost (folded) < rtx_cost (addr)
2674 #endif
2675 && validate_change (insn, loc, folded, 0))
2676 addr = folded;
2679 /* If this address is not in the hash table, we can't look for equivalences
2680 of the whole address. Also, ignore if volatile. */
2682 do_not_record = 0;
2683 hash = HASH (addr, Pmode);
2684 addr_volatile = do_not_record;
2685 do_not_record = save_do_not_record;
2686 hash_arg_in_memory = save_hash_arg_in_memory;
2687 hash_arg_in_struct = save_hash_arg_in_struct;
2689 if (addr_volatile)
2690 return;
2692 elt = lookup (addr, hash, Pmode);
2694 #ifndef ADDRESS_COST
2695 if (elt)
2697 our_cost = elt->cost;
2699 /* Find the lowest cost below ours that works. */
2700 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2701 if (elt->cost < our_cost
2702 && (GET_CODE (elt->exp) == REG
2703 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2704 && validate_change (insn, loc,
2705 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2706 return;
2708 #else
2710 if (elt)
2712 /* We need to find the best (under the criteria documented above) entry
2713 in the class that is valid. We use the `flag' field to indicate
2714 choices that were invalid and iterate until we can't find a better
2715 one that hasn't already been tried. */
2717 for (p = elt->first_same_value; p; p = p->next_same_value)
2718 p->flag = 0;
2720 while (found_better)
2722 int best_addr_cost = ADDRESS_COST (*loc);
2723 int best_rtx_cost = (elt->cost + 1) >> 1;
2724 struct table_elt *best_elt = elt;
2726 found_better = 0;
2727 for (p = elt->first_same_value; p; p = p->next_same_value)
2728 if (! p->flag
2729 && (GET_CODE (p->exp) == REG
2730 || exp_equiv_p (p->exp, p->exp, 1, 0))
2731 && (ADDRESS_COST (p->exp) < best_addr_cost
2732 || (ADDRESS_COST (p->exp) == best_addr_cost
2733 && (p->cost + 1) >> 1 > best_rtx_cost)))
2735 found_better = 1;
2736 best_addr_cost = ADDRESS_COST (p->exp);
2737 best_rtx_cost = (p->cost + 1) >> 1;
2738 best_elt = p;
2741 if (found_better)
2743 if (validate_change (insn, loc,
2744 canon_reg (copy_rtx (best_elt->exp),
2745 NULL_RTX), 0))
2746 return;
2747 else
2748 best_elt->flag = 1;
2753 /* If the address is a binary operation with the first operand a register
2754 and the second a constant, do the same as above, but looking for
2755 equivalences of the register. Then try to simplify before checking for
2756 the best address to use. This catches a few cases: First is when we
2757 have REG+const and the register is another REG+const. We can often merge
2758 the constants and eliminate one insn and one register. It may also be
2759 that a machine has a cheap REG+REG+const. Finally, this improves the
2760 code on the Alpha for unaligned byte stores. */
2762 if (flag_expensive_optimizations
2763 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2764 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2765 && GET_CODE (XEXP (*loc, 0)) == REG
2766 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2768 rtx c = XEXP (*loc, 1);
2770 do_not_record = 0;
2771 hash = HASH (XEXP (*loc, 0), Pmode);
2772 do_not_record = save_do_not_record;
2773 hash_arg_in_memory = save_hash_arg_in_memory;
2774 hash_arg_in_struct = save_hash_arg_in_struct;
2776 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2777 if (elt == 0)
2778 return;
2780 /* We need to find the best (under the criteria documented above) entry
2781 in the class that is valid. We use the `flag' field to indicate
2782 choices that were invalid and iterate until we can't find a better
2783 one that hasn't already been tried. */
2785 for (p = elt->first_same_value; p; p = p->next_same_value)
2786 p->flag = 0;
2788 while (found_better)
2790 int best_addr_cost = ADDRESS_COST (*loc);
2791 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2792 struct table_elt *best_elt = elt;
2793 rtx best_rtx = *loc;
2794 int count;
2796 /* This is at worst case an O(n^2) algorithm, so limit our search
2797 to the first 32 elements on the list. This avoids trouble
2798 compiling code with very long basic blocks that can easily
2799 call cse_gen_binary so many times that we run out of memory. */
2801 found_better = 0;
2802 for (p = elt->first_same_value, count = 0;
2803 p && count < 32;
2804 p = p->next_same_value, count++)
2805 if (! p->flag
2806 && (GET_CODE (p->exp) == REG
2807 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2809 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2811 if ((ADDRESS_COST (new) < best_addr_cost
2812 || (ADDRESS_COST (new) == best_addr_cost
2813 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2815 found_better = 1;
2816 best_addr_cost = ADDRESS_COST (new);
2817 best_rtx_cost = (COST (new) + 1) >> 1;
2818 best_elt = p;
2819 best_rtx = new;
2823 if (found_better)
2825 if (validate_change (insn, loc,
2826 canon_reg (copy_rtx (best_rtx),
2827 NULL_RTX), 0))
2828 return;
2829 else
2830 best_elt->flag = 1;
2834 #endif
2837 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2838 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2839 what values are being compared.
2841 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2842 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2843 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2844 compared to produce cc0.
2846 The return value is the comparison operator and is either the code of
2847 A or the code corresponding to the inverse of the comparison. */
2849 static enum rtx_code
2850 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2851 enum rtx_code code;
2852 rtx *parg1, *parg2;
2853 enum machine_mode *pmode1, *pmode2;
2855 rtx arg1, arg2;
2857 arg1 = *parg1, arg2 = *parg2;
2859 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2861 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2863 /* Set non-zero when we find something of interest. */
2864 rtx x = 0;
2865 int reverse_code = 0;
2866 struct table_elt *p = 0;
2868 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2869 On machines with CC0, this is the only case that can occur, since
2870 fold_rtx will return the COMPARE or item being compared with zero
2871 when given CC0. */
2873 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2874 x = arg1;
2876 /* If ARG1 is a comparison operator and CODE is testing for
2877 STORE_FLAG_VALUE, get the inner arguments. */
2879 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2881 if (code == NE
2882 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2883 && code == LT && STORE_FLAG_VALUE == -1)
2884 #ifdef FLOAT_STORE_FLAG_VALUE
2885 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2886 && FLOAT_STORE_FLAG_VALUE < 0)
2887 #endif
2889 x = arg1;
2890 else if (code == EQ
2891 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2892 && code == GE && STORE_FLAG_VALUE == -1)
2893 #ifdef FLOAT_STORE_FLAG_VALUE
2894 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2895 && FLOAT_STORE_FLAG_VALUE < 0)
2896 #endif
2898 x = arg1, reverse_code = 1;
2901 /* ??? We could also check for
2903 (ne (and (eq (...) (const_int 1))) (const_int 0))
2905 and related forms, but let's wait until we see them occurring. */
2907 if (x == 0)
2908 /* Look up ARG1 in the hash table and see if it has an equivalence
2909 that lets us see what is being compared. */
2910 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2911 GET_MODE (arg1));
2912 if (p) p = p->first_same_value;
2914 for (; p; p = p->next_same_value)
2916 enum machine_mode inner_mode = GET_MODE (p->exp);
2918 /* If the entry isn't valid, skip it. */
2919 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2920 continue;
2922 if (GET_CODE (p->exp) == COMPARE
2923 /* Another possibility is that this machine has a compare insn
2924 that includes the comparison code. In that case, ARG1 would
2925 be equivalent to a comparison operation that would set ARG1 to
2926 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2927 ORIG_CODE is the actual comparison being done; if it is an EQ,
2928 we must reverse ORIG_CODE. On machine with a negative value
2929 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2930 || ((code == NE
2931 || (code == LT
2932 && GET_MODE_CLASS (inner_mode) == MODE_INT
2933 && (GET_MODE_BITSIZE (inner_mode)
2934 <= HOST_BITS_PER_WIDE_INT)
2935 && (STORE_FLAG_VALUE
2936 & ((HOST_WIDE_INT) 1
2937 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2938 #ifdef FLOAT_STORE_FLAG_VALUE
2939 || (code == LT
2940 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2941 && FLOAT_STORE_FLAG_VALUE < 0)
2942 #endif
2944 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2946 x = p->exp;
2947 break;
2949 else if ((code == EQ
2950 || (code == GE
2951 && GET_MODE_CLASS (inner_mode) == MODE_INT
2952 && (GET_MODE_BITSIZE (inner_mode)
2953 <= HOST_BITS_PER_WIDE_INT)
2954 && (STORE_FLAG_VALUE
2955 & ((HOST_WIDE_INT) 1
2956 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2957 #ifdef FLOAT_STORE_FLAG_VALUE
2958 || (code == GE
2959 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2960 && FLOAT_STORE_FLAG_VALUE < 0)
2961 #endif
2963 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2965 reverse_code = 1;
2966 x = p->exp;
2967 break;
2970 /* If this is fp + constant, the equivalent is a better operand since
2971 it may let us predict the value of the comparison. */
2972 else if (NONZERO_BASE_PLUS_P (p->exp))
2974 arg1 = p->exp;
2975 continue;
2979 /* If we didn't find a useful equivalence for ARG1, we are done.
2980 Otherwise, set up for the next iteration. */
2981 if (x == 0)
2982 break;
2984 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2985 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2986 code = GET_CODE (x);
2988 if (reverse_code)
2989 code = reverse_condition (code);
2992 /* Return our results. Return the modes from before fold_rtx
2993 because fold_rtx might produce const_int, and then it's too late. */
2994 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2995 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2997 return code;
3000 /* Try to simplify a unary operation CODE whose output mode is to be
3001 MODE with input operand OP whose mode was originally OP_MODE.
3002 Return zero if no simplification can be made. */
3005 simplify_unary_operation (code, mode, op, op_mode)
3006 enum rtx_code code;
3007 enum machine_mode mode;
3008 rtx op;
3009 enum machine_mode op_mode;
3011 register int width = GET_MODE_BITSIZE (mode);
3013 /* The order of these tests is critical so that, for example, we don't
3014 check the wrong mode (input vs. output) for a conversion operation,
3015 such as FIX. At some point, this should be simplified. */
3017 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3019 if (code == FLOAT && GET_MODE (op) == VOIDmode
3020 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3022 HOST_WIDE_INT hv, lv;
3023 REAL_VALUE_TYPE d;
3025 if (GET_CODE (op) == CONST_INT)
3026 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3027 else
3028 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3030 #ifdef REAL_ARITHMETIC
3031 REAL_VALUE_FROM_INT (d, lv, hv);
3032 #else
3033 if (hv < 0)
3035 d = (double) (~ hv);
3036 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3037 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3038 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3039 d = (- d - 1.0);
3041 else
3043 d = (double) hv;
3044 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3045 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3046 d += (double) (unsigned HOST_WIDE_INT) lv;
3048 #endif /* REAL_ARITHMETIC */
3049 d = real_value_truncate (mode, d);
3050 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3052 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3053 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3055 HOST_WIDE_INT hv, lv;
3056 REAL_VALUE_TYPE d;
3058 if (GET_CODE (op) == CONST_INT)
3059 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3060 else
3061 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3063 if (op_mode == VOIDmode)
3065 /* We don't know how to interpret negative-looking numbers in
3066 this case, so don't try to fold those. */
3067 if (hv < 0)
3068 return 0;
3070 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3072 else
3073 hv = 0, lv &= GET_MODE_MASK (op_mode);
3075 #ifdef REAL_ARITHMETIC
3076 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
3077 #else
3079 d = (double) (unsigned HOST_WIDE_INT) hv;
3080 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3081 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3082 d += (double) (unsigned HOST_WIDE_INT) lv;
3083 #endif /* REAL_ARITHMETIC */
3084 d = real_value_truncate (mode, d);
3085 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3087 #endif
3089 if (GET_CODE (op) == CONST_INT
3090 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3092 register HOST_WIDE_INT arg0 = INTVAL (op);
3093 register HOST_WIDE_INT val;
3095 switch (code)
3097 case NOT:
3098 val = ~ arg0;
3099 break;
3101 case NEG:
3102 val = - arg0;
3103 break;
3105 case ABS:
3106 val = (arg0 >= 0 ? arg0 : - arg0);
3107 break;
3109 case FFS:
3110 /* Don't use ffs here. Instead, get low order bit and then its
3111 number. If arg0 is zero, this will return 0, as desired. */
3112 arg0 &= GET_MODE_MASK (mode);
3113 val = exact_log2 (arg0 & (- arg0)) + 1;
3114 break;
3116 case TRUNCATE:
3117 val = arg0;
3118 break;
3120 case ZERO_EXTEND:
3121 if (op_mode == VOIDmode)
3122 op_mode = mode;
3123 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3125 /* If we were really extending the mode,
3126 we would have to distinguish between zero-extension
3127 and sign-extension. */
3128 if (width != GET_MODE_BITSIZE (op_mode))
3129 abort ();
3130 val = arg0;
3132 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3133 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3134 else
3135 return 0;
3136 break;
3138 case SIGN_EXTEND:
3139 if (op_mode == VOIDmode)
3140 op_mode = mode;
3141 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3143 /* If we were really extending the mode,
3144 we would have to distinguish between zero-extension
3145 and sign-extension. */
3146 if (width != GET_MODE_BITSIZE (op_mode))
3147 abort ();
3148 val = arg0;
3150 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3153 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3154 if (val
3155 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3156 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3158 else
3159 return 0;
3160 break;
3162 case SQRT:
3163 return 0;
3165 default:
3166 abort ();
3169 /* Clear the bits that don't belong in our mode,
3170 unless they and our sign bit are all one.
3171 So we get either a reasonable negative value or a reasonable
3172 unsigned value for this mode. */
3173 if (width < HOST_BITS_PER_WIDE_INT
3174 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3175 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3176 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3178 return GEN_INT (val);
3181 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3182 for a DImode operation on a CONST_INT. */
3183 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3184 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3186 HOST_WIDE_INT l1, h1, lv, hv;
3188 if (GET_CODE (op) == CONST_DOUBLE)
3189 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3190 else
3191 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3193 switch (code)
3195 case NOT:
3196 lv = ~ l1;
3197 hv = ~ h1;
3198 break;
3200 case NEG:
3201 neg_double (l1, h1, &lv, &hv);
3202 break;
3204 case ABS:
3205 if (h1 < 0)
3206 neg_double (l1, h1, &lv, &hv);
3207 else
3208 lv = l1, hv = h1;
3209 break;
3211 case FFS:
3212 hv = 0;
3213 if (l1 == 0)
3214 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3215 else
3216 lv = exact_log2 (l1 & (-l1)) + 1;
3217 break;
3219 case TRUNCATE:
3220 /* This is just a change-of-mode, so do nothing. */
3221 lv = l1, hv = h1;
3222 break;
3224 case ZERO_EXTEND:
3225 if (op_mode == VOIDmode
3226 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3227 return 0;
3229 hv = 0;
3230 lv = l1 & GET_MODE_MASK (op_mode);
3231 break;
3233 case SIGN_EXTEND:
3234 if (op_mode == VOIDmode
3235 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3236 return 0;
3237 else
3239 lv = l1 & GET_MODE_MASK (op_mode);
3240 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3241 && (lv & ((HOST_WIDE_INT) 1
3242 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3243 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3245 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3247 break;
3249 case SQRT:
3250 return 0;
3252 default:
3253 return 0;
3256 return immed_double_const (lv, hv, mode);
3259 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3260 else if (GET_CODE (op) == CONST_DOUBLE
3261 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3263 REAL_VALUE_TYPE d;
3264 jmp_buf handler;
3265 rtx x;
3267 if (setjmp (handler))
3268 /* There used to be a warning here, but that is inadvisable.
3269 People may want to cause traps, and the natural way
3270 to do it should not get a warning. */
3271 return 0;
3273 set_float_handler (handler);
3275 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3277 switch (code)
3279 case NEG:
3280 d = REAL_VALUE_NEGATE (d);
3281 break;
3283 case ABS:
3284 if (REAL_VALUE_NEGATIVE (d))
3285 d = REAL_VALUE_NEGATE (d);
3286 break;
3288 case FLOAT_TRUNCATE:
3289 d = real_value_truncate (mode, d);
3290 break;
3292 case FLOAT_EXTEND:
3293 /* All this does is change the mode. */
3294 break;
3296 case FIX:
3297 d = REAL_VALUE_RNDZINT (d);
3298 break;
3300 case UNSIGNED_FIX:
3301 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3302 break;
3304 case SQRT:
3305 return 0;
3307 default:
3308 abort ();
3311 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3312 set_float_handler (NULL_PTR);
3313 return x;
3316 else if (GET_CODE (op) == CONST_DOUBLE
3317 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3318 && GET_MODE_CLASS (mode) == MODE_INT
3319 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3321 REAL_VALUE_TYPE d;
3322 jmp_buf handler;
3323 HOST_WIDE_INT val;
3325 if (setjmp (handler))
3326 return 0;
3328 set_float_handler (handler);
3330 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3332 switch (code)
3334 case FIX:
3335 val = REAL_VALUE_FIX (d);
3336 break;
3338 case UNSIGNED_FIX:
3339 val = REAL_VALUE_UNSIGNED_FIX (d);
3340 break;
3342 default:
3343 abort ();
3346 set_float_handler (NULL_PTR);
3348 /* Clear the bits that don't belong in our mode,
3349 unless they and our sign bit are all one.
3350 So we get either a reasonable negative value or a reasonable
3351 unsigned value for this mode. */
3352 if (width < HOST_BITS_PER_WIDE_INT
3353 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3354 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3355 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3357 /* If this would be an entire word for the target, but is not for
3358 the host, then sign-extend on the host so that the number will look
3359 the same way on the host that it would on the target.
3361 For example, when building a 64 bit alpha hosted 32 bit sparc
3362 targeted compiler, then we want the 32 bit unsigned value -1 to be
3363 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3364 The later confuses the sparc backend. */
3366 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3367 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3368 val |= ((HOST_WIDE_INT) (-1) << width);
3370 return GEN_INT (val);
3372 #endif
3373 /* This was formerly used only for non-IEEE float.
3374 eggert@twinsun.com says it is safe for IEEE also. */
3375 else
3377 /* There are some simplifications we can do even if the operands
3378 aren't constant. */
3379 switch (code)
3381 case NEG:
3382 case NOT:
3383 /* (not (not X)) == X, similarly for NEG. */
3384 if (GET_CODE (op) == code)
3385 return XEXP (op, 0);
3386 break;
3388 case SIGN_EXTEND:
3389 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3390 becomes just the MINUS if its mode is MODE. This allows
3391 folding switch statements on machines using casesi (such as
3392 the Vax). */
3393 if (GET_CODE (op) == TRUNCATE
3394 && GET_MODE (XEXP (op, 0)) == mode
3395 && GET_CODE (XEXP (op, 0)) == MINUS
3396 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3397 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3398 return XEXP (op, 0);
3400 #ifdef POINTERS_EXTEND_UNSIGNED
3401 if (! POINTERS_EXTEND_UNSIGNED
3402 && mode == Pmode && GET_MODE (op) == ptr_mode
3403 && CONSTANT_P (op))
3404 return convert_memory_address (Pmode, op);
3405 #endif
3406 break;
3408 #ifdef POINTERS_EXTEND_UNSIGNED
3409 case ZERO_EXTEND:
3410 if (POINTERS_EXTEND_UNSIGNED
3411 && mode == Pmode && GET_MODE (op) == ptr_mode
3412 && CONSTANT_P (op))
3413 return convert_memory_address (Pmode, op);
3414 break;
3415 #endif
3418 return 0;
3422 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3423 and OP1. Return 0 if no simplification is possible.
3425 Don't use this for relational operations such as EQ or LT.
3426 Use simplify_relational_operation instead. */
3429 simplify_binary_operation (code, mode, op0, op1)
3430 enum rtx_code code;
3431 enum machine_mode mode;
3432 rtx op0, op1;
3434 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3435 HOST_WIDE_INT val;
3436 int width = GET_MODE_BITSIZE (mode);
3437 rtx tem;
3439 /* Relational operations don't work here. We must know the mode
3440 of the operands in order to do the comparison correctly.
3441 Assuming a full word can give incorrect results.
3442 Consider comparing 128 with -128 in QImode. */
3444 if (GET_RTX_CLASS (code) == '<')
3445 abort ();
3447 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3448 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3449 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3450 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3452 REAL_VALUE_TYPE f0, f1, value;
3453 jmp_buf handler;
3455 if (setjmp (handler))
3456 return 0;
3458 set_float_handler (handler);
3460 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3461 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3462 f0 = real_value_truncate (mode, f0);
3463 f1 = real_value_truncate (mode, f1);
3465 #ifdef REAL_ARITHMETIC
3466 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3467 #else
3468 switch (code)
3470 case PLUS:
3471 value = f0 + f1;
3472 break;
3473 case MINUS:
3474 value = f0 - f1;
3475 break;
3476 case MULT:
3477 value = f0 * f1;
3478 break;
3479 case DIV:
3480 #ifndef REAL_INFINITY
3481 if (f1 == 0)
3482 return 0;
3483 #endif
3484 value = f0 / f1;
3485 break;
3486 case SMIN:
3487 value = MIN (f0, f1);
3488 break;
3489 case SMAX:
3490 value = MAX (f0, f1);
3491 break;
3492 default:
3493 abort ();
3495 #endif
3497 value = real_value_truncate (mode, value);
3498 set_float_handler (NULL_PTR);
3499 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3501 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3503 /* We can fold some multi-word operations. */
3504 if (GET_MODE_CLASS (mode) == MODE_INT
3505 && width == HOST_BITS_PER_WIDE_INT * 2
3506 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3507 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3509 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3511 if (GET_CODE (op0) == CONST_DOUBLE)
3512 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3513 else
3514 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3516 if (GET_CODE (op1) == CONST_DOUBLE)
3517 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3518 else
3519 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3521 switch (code)
3523 case MINUS:
3524 /* A - B == A + (-B). */
3525 neg_double (l2, h2, &lv, &hv);
3526 l2 = lv, h2 = hv;
3528 /* .. fall through ... */
3530 case PLUS:
3531 add_double (l1, h1, l2, h2, &lv, &hv);
3532 break;
3534 case MULT:
3535 mul_double (l1, h1, l2, h2, &lv, &hv);
3536 break;
3538 case DIV: case MOD: case UDIV: case UMOD:
3539 /* We'd need to include tree.h to do this and it doesn't seem worth
3540 it. */
3541 return 0;
3543 case AND:
3544 lv = l1 & l2, hv = h1 & h2;
3545 break;
3547 case IOR:
3548 lv = l1 | l2, hv = h1 | h2;
3549 break;
3551 case XOR:
3552 lv = l1 ^ l2, hv = h1 ^ h2;
3553 break;
3555 case SMIN:
3556 if (h1 < h2
3557 || (h1 == h2
3558 && ((unsigned HOST_WIDE_INT) l1
3559 < (unsigned HOST_WIDE_INT) l2)))
3560 lv = l1, hv = h1;
3561 else
3562 lv = l2, hv = h2;
3563 break;
3565 case SMAX:
3566 if (h1 > h2
3567 || (h1 == h2
3568 && ((unsigned HOST_WIDE_INT) l1
3569 > (unsigned HOST_WIDE_INT) l2)))
3570 lv = l1, hv = h1;
3571 else
3572 lv = l2, hv = h2;
3573 break;
3575 case UMIN:
3576 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3577 || (h1 == h2
3578 && ((unsigned HOST_WIDE_INT) l1
3579 < (unsigned HOST_WIDE_INT) l2)))
3580 lv = l1, hv = h1;
3581 else
3582 lv = l2, hv = h2;
3583 break;
3585 case UMAX:
3586 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3587 || (h1 == h2
3588 && ((unsigned HOST_WIDE_INT) l1
3589 > (unsigned HOST_WIDE_INT) l2)))
3590 lv = l1, hv = h1;
3591 else
3592 lv = l2, hv = h2;
3593 break;
3595 case LSHIFTRT: case ASHIFTRT:
3596 case ASHIFT:
3597 case ROTATE: case ROTATERT:
3598 #ifdef SHIFT_COUNT_TRUNCATED
3599 if (SHIFT_COUNT_TRUNCATED)
3600 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3601 #endif
3603 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3604 return 0;
3606 if (code == LSHIFTRT || code == ASHIFTRT)
3607 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3608 code == ASHIFTRT);
3609 else if (code == ASHIFT)
3610 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3611 else if (code == ROTATE)
3612 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3613 else /* code == ROTATERT */
3614 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3615 break;
3617 default:
3618 return 0;
3621 return immed_double_const (lv, hv, mode);
3624 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3625 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3627 /* Even if we can't compute a constant result,
3628 there are some cases worth simplifying. */
3630 switch (code)
3632 case PLUS:
3633 /* In IEEE floating point, x+0 is not the same as x. Similarly
3634 for the other optimizations below. */
3635 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3636 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3637 break;
3639 if (op1 == CONST0_RTX (mode))
3640 return op0;
3642 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3643 if (GET_CODE (op0) == NEG)
3644 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3645 else if (GET_CODE (op1) == NEG)
3646 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3648 /* Handle both-operands-constant cases. We can only add
3649 CONST_INTs to constants since the sum of relocatable symbols
3650 can't be handled by most assemblers. Don't add CONST_INT
3651 to CONST_INT since overflow won't be computed properly if wider
3652 than HOST_BITS_PER_WIDE_INT. */
3654 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3655 && GET_CODE (op1) == CONST_INT)
3656 return plus_constant (op0, INTVAL (op1));
3657 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3658 && GET_CODE (op0) == CONST_INT)
3659 return plus_constant (op1, INTVAL (op0));
3661 /* See if this is something like X * C - X or vice versa or
3662 if the multiplication is written as a shift. If so, we can
3663 distribute and make a new multiply, shift, or maybe just
3664 have X (if C is 2 in the example above). But don't make
3665 real multiply if we didn't have one before. */
3667 if (! FLOAT_MODE_P (mode))
3669 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3670 rtx lhs = op0, rhs = op1;
3671 int had_mult = 0;
3673 if (GET_CODE (lhs) == NEG)
3674 coeff0 = -1, lhs = XEXP (lhs, 0);
3675 else if (GET_CODE (lhs) == MULT
3676 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3678 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3679 had_mult = 1;
3681 else if (GET_CODE (lhs) == ASHIFT
3682 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3683 && INTVAL (XEXP (lhs, 1)) >= 0
3684 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3686 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3687 lhs = XEXP (lhs, 0);
3690 if (GET_CODE (rhs) == NEG)
3691 coeff1 = -1, rhs = XEXP (rhs, 0);
3692 else if (GET_CODE (rhs) == MULT
3693 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3695 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3696 had_mult = 1;
3698 else if (GET_CODE (rhs) == ASHIFT
3699 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3700 && INTVAL (XEXP (rhs, 1)) >= 0
3701 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3703 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3704 rhs = XEXP (rhs, 0);
3707 if (rtx_equal_p (lhs, rhs))
3709 tem = cse_gen_binary (MULT, mode, lhs,
3710 GEN_INT (coeff0 + coeff1));
3711 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3715 /* If one of the operands is a PLUS or a MINUS, see if we can
3716 simplify this by the associative law.
3717 Don't use the associative law for floating point.
3718 The inaccuracy makes it nonassociative,
3719 and subtle programs can break if operations are associated. */
3721 if (INTEGRAL_MODE_P (mode)
3722 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3723 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3724 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3725 return tem;
3726 break;
3728 case COMPARE:
3729 #ifdef HAVE_cc0
3730 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3731 using cc0, in which case we want to leave it as a COMPARE
3732 so we can distinguish it from a register-register-copy.
3734 In IEEE floating point, x-0 is not the same as x. */
3736 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3737 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3738 && op1 == CONST0_RTX (mode))
3739 return op0;
3740 #else
3741 /* Do nothing here. */
3742 #endif
3743 break;
3745 case MINUS:
3746 /* None of these optimizations can be done for IEEE
3747 floating point. */
3748 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3749 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3750 break;
3752 /* We can't assume x-x is 0 even with non-IEEE floating point,
3753 but since it is zero except in very strange circumstances, we
3754 will treat it as zero with -ffast-math. */
3755 if (rtx_equal_p (op0, op1)
3756 && ! side_effects_p (op0)
3757 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3758 return CONST0_RTX (mode);
3760 /* Change subtraction from zero into negation. */
3761 if (op0 == CONST0_RTX (mode))
3762 return gen_rtx (NEG, mode, op1);
3764 /* (-1 - a) is ~a. */
3765 if (op0 == constm1_rtx)
3766 return gen_rtx (NOT, mode, op1);
3768 /* Subtracting 0 has no effect. */
3769 if (op1 == CONST0_RTX (mode))
3770 return op0;
3772 /* See if this is something like X * C - X or vice versa or
3773 if the multiplication is written as a shift. If so, we can
3774 distribute and make a new multiply, shift, or maybe just
3775 have X (if C is 2 in the example above). But don't make
3776 real multiply if we didn't have one before. */
3778 if (! FLOAT_MODE_P (mode))
3780 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3781 rtx lhs = op0, rhs = op1;
3782 int had_mult = 0;
3784 if (GET_CODE (lhs) == NEG)
3785 coeff0 = -1, lhs = XEXP (lhs, 0);
3786 else if (GET_CODE (lhs) == MULT
3787 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3789 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3790 had_mult = 1;
3792 else if (GET_CODE (lhs) == ASHIFT
3793 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3794 && INTVAL (XEXP (lhs, 1)) >= 0
3795 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3797 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3798 lhs = XEXP (lhs, 0);
3801 if (GET_CODE (rhs) == NEG)
3802 coeff1 = - 1, rhs = XEXP (rhs, 0);
3803 else if (GET_CODE (rhs) == MULT
3804 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3806 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3807 had_mult = 1;
3809 else if (GET_CODE (rhs) == ASHIFT
3810 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3811 && INTVAL (XEXP (rhs, 1)) >= 0
3812 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3814 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3815 rhs = XEXP (rhs, 0);
3818 if (rtx_equal_p (lhs, rhs))
3820 tem = cse_gen_binary (MULT, mode, lhs,
3821 GEN_INT (coeff0 - coeff1));
3822 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3826 /* (a - (-b)) -> (a + b). */
3827 if (GET_CODE (op1) == NEG)
3828 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3830 /* If one of the operands is a PLUS or a MINUS, see if we can
3831 simplify this by the associative law.
3832 Don't use the associative law for floating point.
3833 The inaccuracy makes it nonassociative,
3834 and subtle programs can break if operations are associated. */
3836 if (INTEGRAL_MODE_P (mode)
3837 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3838 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3839 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3840 return tem;
3842 /* Don't let a relocatable value get a negative coeff. */
3843 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3844 return plus_constant (op0, - INTVAL (op1));
3846 /* (x - (x & y)) -> (x & ~y) */
3847 if (GET_CODE (op1) == AND)
3849 if (rtx_equal_p (op0, XEXP (op1, 0)))
3850 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
3851 if (rtx_equal_p (op0, XEXP (op1, 1)))
3852 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
3854 break;
3856 case MULT:
3857 if (op1 == constm1_rtx)
3859 tem = simplify_unary_operation (NEG, mode, op0, mode);
3861 return tem ? tem : gen_rtx (NEG, mode, op0);
3864 /* In IEEE floating point, x*0 is not always 0. */
3865 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3866 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3867 && op1 == CONST0_RTX (mode)
3868 && ! side_effects_p (op0))
3869 return op1;
3871 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3872 However, ANSI says we can drop signals,
3873 so we can do this anyway. */
3874 if (op1 == CONST1_RTX (mode))
3875 return op0;
3877 /* Convert multiply by constant power of two into shift unless
3878 we are still generating RTL. This test is a kludge. */
3879 if (GET_CODE (op1) == CONST_INT
3880 && (val = exact_log2 (INTVAL (op1))) >= 0
3881 && ! rtx_equal_function_value_matters)
3882 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3884 if (GET_CODE (op1) == CONST_DOUBLE
3885 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3887 REAL_VALUE_TYPE d;
3888 jmp_buf handler;
3889 int op1is2, op1ism1;
3891 if (setjmp (handler))
3892 return 0;
3894 set_float_handler (handler);
3895 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3896 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3897 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3898 set_float_handler (NULL_PTR);
3900 /* x*2 is x+x and x*(-1) is -x */
3901 if (op1is2 && GET_MODE (op0) == mode)
3902 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3904 else if (op1ism1 && GET_MODE (op0) == mode)
3905 return gen_rtx (NEG, mode, op0);
3907 break;
3909 case IOR:
3910 if (op1 == const0_rtx)
3911 return op0;
3912 if (GET_CODE (op1) == CONST_INT
3913 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3914 return op1;
3915 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3916 return op0;
3917 /* A | (~A) -> -1 */
3918 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3919 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3920 && ! side_effects_p (op0)
3921 && GET_MODE_CLASS (mode) != MODE_CC)
3922 return constm1_rtx;
3923 break;
3925 case XOR:
3926 if (op1 == const0_rtx)
3927 return op0;
3928 if (GET_CODE (op1) == CONST_INT
3929 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3930 return gen_rtx (NOT, mode, op0);
3931 if (op0 == op1 && ! side_effects_p (op0)
3932 && GET_MODE_CLASS (mode) != MODE_CC)
3933 return const0_rtx;
3934 break;
3936 case AND:
3937 if (op1 == const0_rtx && ! side_effects_p (op0))
3938 return const0_rtx;
3939 if (GET_CODE (op1) == CONST_INT
3940 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3941 return op0;
3942 if (op0 == op1 && ! side_effects_p (op0)
3943 && GET_MODE_CLASS (mode) != MODE_CC)
3944 return op0;
3945 /* A & (~A) -> 0 */
3946 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3947 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3948 && ! side_effects_p (op0)
3949 && GET_MODE_CLASS (mode) != MODE_CC)
3950 return const0_rtx;
3951 break;
3953 case UDIV:
3954 /* Convert divide by power of two into shift (divide by 1 handled
3955 below). */
3956 if (GET_CODE (op1) == CONST_INT
3957 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3958 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3960 /* ... fall through ... */
3962 case DIV:
3963 if (op1 == CONST1_RTX (mode))
3964 return op0;
3966 /* In IEEE floating point, 0/x is not always 0. */
3967 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3968 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3969 && op0 == CONST0_RTX (mode)
3970 && ! side_effects_p (op1))
3971 return op0;
3973 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3974 /* Change division by a constant into multiplication. Only do
3975 this with -ffast-math until an expert says it is safe in
3976 general. */
3977 else if (GET_CODE (op1) == CONST_DOUBLE
3978 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3979 && op1 != CONST0_RTX (mode)
3980 && flag_fast_math)
3982 REAL_VALUE_TYPE d;
3983 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3985 if (! REAL_VALUES_EQUAL (d, dconst0))
3987 #if defined (REAL_ARITHMETIC)
3988 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3989 return gen_rtx (MULT, mode, op0,
3990 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3991 #else
3992 return gen_rtx (MULT, mode, op0,
3993 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3994 #endif
3997 #endif
3998 break;
4000 case UMOD:
4001 /* Handle modulus by power of two (mod with 1 handled below). */
4002 if (GET_CODE (op1) == CONST_INT
4003 && exact_log2 (INTVAL (op1)) > 0)
4004 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
4006 /* ... fall through ... */
4008 case MOD:
4009 if ((op0 == const0_rtx || op1 == const1_rtx)
4010 && ! side_effects_p (op0) && ! side_effects_p (op1))
4011 return const0_rtx;
4012 break;
4014 case ROTATERT:
4015 case ROTATE:
4016 /* Rotating ~0 always results in ~0. */
4017 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4018 && INTVAL (op0) == GET_MODE_MASK (mode)
4019 && ! side_effects_p (op1))
4020 return op0;
4022 /* ... fall through ... */
4024 case ASHIFT:
4025 case ASHIFTRT:
4026 case LSHIFTRT:
4027 if (op1 == const0_rtx)
4028 return op0;
4029 if (op0 == const0_rtx && ! side_effects_p (op1))
4030 return op0;
4031 break;
4033 case SMIN:
4034 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4035 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4036 && ! side_effects_p (op0))
4037 return op1;
4038 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4039 return op0;
4040 break;
4042 case SMAX:
4043 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4044 && (INTVAL (op1)
4045 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4046 && ! side_effects_p (op0))
4047 return op1;
4048 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4049 return op0;
4050 break;
4052 case UMIN:
4053 if (op1 == const0_rtx && ! side_effects_p (op0))
4054 return op1;
4055 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4056 return op0;
4057 break;
4059 case UMAX:
4060 if (op1 == constm1_rtx && ! side_effects_p (op0))
4061 return op1;
4062 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4063 return op0;
4064 break;
4066 default:
4067 abort ();
4070 return 0;
4073 /* Get the integer argument values in two forms:
4074 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4076 arg0 = INTVAL (op0);
4077 arg1 = INTVAL (op1);
4079 if (width < HOST_BITS_PER_WIDE_INT)
4081 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4082 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4084 arg0s = arg0;
4085 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4086 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4088 arg1s = arg1;
4089 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4090 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4092 else
4094 arg0s = arg0;
4095 arg1s = arg1;
4098 /* Compute the value of the arithmetic. */
4100 switch (code)
4102 case PLUS:
4103 val = arg0s + arg1s;
4104 break;
4106 case MINUS:
4107 val = arg0s - arg1s;
4108 break;
4110 case MULT:
4111 val = arg0s * arg1s;
4112 break;
4114 case DIV:
4115 if (arg1s == 0)
4116 return 0;
4117 val = arg0s / arg1s;
4118 break;
4120 case MOD:
4121 if (arg1s == 0)
4122 return 0;
4123 val = arg0s % arg1s;
4124 break;
4126 case UDIV:
4127 if (arg1 == 0)
4128 return 0;
4129 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4130 break;
4132 case UMOD:
4133 if (arg1 == 0)
4134 return 0;
4135 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4136 break;
4138 case AND:
4139 val = arg0 & arg1;
4140 break;
4142 case IOR:
4143 val = arg0 | arg1;
4144 break;
4146 case XOR:
4147 val = arg0 ^ arg1;
4148 break;
4150 case LSHIFTRT:
4151 /* If shift count is undefined, don't fold it; let the machine do
4152 what it wants. But truncate it if the machine will do that. */
4153 if (arg1 < 0)
4154 return 0;
4156 #ifdef SHIFT_COUNT_TRUNCATED
4157 if (SHIFT_COUNT_TRUNCATED)
4158 arg1 %= width;
4159 #endif
4161 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4162 break;
4164 case ASHIFT:
4165 if (arg1 < 0)
4166 return 0;
4168 #ifdef SHIFT_COUNT_TRUNCATED
4169 if (SHIFT_COUNT_TRUNCATED)
4170 arg1 %= width;
4171 #endif
4173 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4174 break;
4176 case ASHIFTRT:
4177 if (arg1 < 0)
4178 return 0;
4180 #ifdef SHIFT_COUNT_TRUNCATED
4181 if (SHIFT_COUNT_TRUNCATED)
4182 arg1 %= width;
4183 #endif
4185 val = arg0s >> arg1;
4187 /* Bootstrap compiler may not have sign extended the right shift.
4188 Manually extend the sign to insure bootstrap cc matches gcc. */
4189 if (arg0s < 0 && arg1 > 0)
4190 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4192 break;
4194 case ROTATERT:
4195 if (arg1 < 0)
4196 return 0;
4198 arg1 %= width;
4199 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4200 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4201 break;
4203 case ROTATE:
4204 if (arg1 < 0)
4205 return 0;
4207 arg1 %= width;
4208 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4209 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4210 break;
4212 case COMPARE:
4213 /* Do nothing here. */
4214 return 0;
4216 case SMIN:
4217 val = arg0s <= arg1s ? arg0s : arg1s;
4218 break;
4220 case UMIN:
4221 val = ((unsigned HOST_WIDE_INT) arg0
4222 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4223 break;
4225 case SMAX:
4226 val = arg0s > arg1s ? arg0s : arg1s;
4227 break;
4229 case UMAX:
4230 val = ((unsigned HOST_WIDE_INT) arg0
4231 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4232 break;
4234 default:
4235 abort ();
4238 /* Clear the bits that don't belong in our mode, unless they and our sign
4239 bit are all one. So we get either a reasonable negative value or a
4240 reasonable unsigned value for this mode. */
4241 if (width < HOST_BITS_PER_WIDE_INT
4242 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4243 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4244 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4246 /* If this would be an entire word for the target, but is not for
4247 the host, then sign-extend on the host so that the number will look
4248 the same way on the host that it would on the target.
4250 For example, when building a 64 bit alpha hosted 32 bit sparc
4251 targeted compiler, then we want the 32 bit unsigned value -1 to be
4252 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4253 The later confuses the sparc backend. */
4255 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4256 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4257 val |= ((HOST_WIDE_INT) (-1) << width);
4259 return GEN_INT (val);
4262 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4263 PLUS or MINUS.
4265 Rather than test for specific case, we do this by a brute-force method
4266 and do all possible simplifications until no more changes occur. Then
4267 we rebuild the operation. */
4269 static rtx
4270 simplify_plus_minus (code, mode, op0, op1)
4271 enum rtx_code code;
4272 enum machine_mode mode;
4273 rtx op0, op1;
4275 rtx ops[8];
4276 int negs[8];
4277 rtx result, tem;
4278 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4279 int first = 1, negate = 0, changed;
4280 int i, j;
4282 bzero ((char *) ops, sizeof ops);
4284 /* Set up the two operands and then expand them until nothing has been
4285 changed. If we run out of room in our array, give up; this should
4286 almost never happen. */
4288 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4290 changed = 1;
4291 while (changed)
4293 changed = 0;
4295 for (i = 0; i < n_ops; i++)
4296 switch (GET_CODE (ops[i]))
4298 case PLUS:
4299 case MINUS:
4300 if (n_ops == 7)
4301 return 0;
4303 ops[n_ops] = XEXP (ops[i], 1);
4304 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4305 ops[i] = XEXP (ops[i], 0);
4306 input_ops++;
4307 changed = 1;
4308 break;
4310 case NEG:
4311 ops[i] = XEXP (ops[i], 0);
4312 negs[i] = ! negs[i];
4313 changed = 1;
4314 break;
4316 case CONST:
4317 ops[i] = XEXP (ops[i], 0);
4318 input_consts++;
4319 changed = 1;
4320 break;
4322 case NOT:
4323 /* ~a -> (-a - 1) */
4324 if (n_ops != 7)
4326 ops[n_ops] = constm1_rtx;
4327 negs[n_ops++] = negs[i];
4328 ops[i] = XEXP (ops[i], 0);
4329 negs[i] = ! negs[i];
4330 changed = 1;
4332 break;
4334 case CONST_INT:
4335 if (negs[i])
4336 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4337 break;
4341 /* If we only have two operands, we can't do anything. */
4342 if (n_ops <= 2)
4343 return 0;
4345 /* Now simplify each pair of operands until nothing changes. The first
4346 time through just simplify constants against each other. */
4348 changed = 1;
4349 while (changed)
4351 changed = first;
4353 for (i = 0; i < n_ops - 1; i++)
4354 for (j = i + 1; j < n_ops; j++)
4355 if (ops[i] != 0 && ops[j] != 0
4356 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4358 rtx lhs = ops[i], rhs = ops[j];
4359 enum rtx_code ncode = PLUS;
4361 if (negs[i] && ! negs[j])
4362 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4363 else if (! negs[i] && negs[j])
4364 ncode = MINUS;
4366 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4367 if (tem)
4369 ops[i] = tem, ops[j] = 0;
4370 negs[i] = negs[i] && negs[j];
4371 if (GET_CODE (tem) == NEG)
4372 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4374 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4375 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4376 changed = 1;
4380 first = 0;
4383 /* Pack all the operands to the lower-numbered entries and give up if
4384 we didn't reduce the number of operands we had. Make sure we
4385 count a CONST as two operands. If we have the same number of
4386 operands, but have made more CONSTs than we had, this is also
4387 an improvement, so accept it. */
4389 for (i = 0, j = 0; j < n_ops; j++)
4390 if (ops[j] != 0)
4392 ops[i] = ops[j], negs[i++] = negs[j];
4393 if (GET_CODE (ops[j]) == CONST)
4394 n_consts++;
4397 if (i + n_consts > input_ops
4398 || (i + n_consts == input_ops && n_consts <= input_consts))
4399 return 0;
4401 n_ops = i;
4403 /* If we have a CONST_INT, put it last. */
4404 for (i = 0; i < n_ops - 1; i++)
4405 if (GET_CODE (ops[i]) == CONST_INT)
4407 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4408 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4411 /* Put a non-negated operand first. If there aren't any, make all
4412 operands positive and negate the whole thing later. */
4413 for (i = 0; i < n_ops && negs[i]; i++)
4416 if (i == n_ops)
4418 for (i = 0; i < n_ops; i++)
4419 negs[i] = 0;
4420 negate = 1;
4422 else if (i != 0)
4424 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4425 j = negs[0], negs[0] = negs[i], negs[i] = j;
4428 /* Now make the result by performing the requested operations. */
4429 result = ops[0];
4430 for (i = 1; i < n_ops; i++)
4431 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4433 return negate ? gen_rtx (NEG, mode, result) : result;
4436 /* Make a binary operation by properly ordering the operands and
4437 seeing if the expression folds. */
4439 static rtx
4440 cse_gen_binary (code, mode, op0, op1)
4441 enum rtx_code code;
4442 enum machine_mode mode;
4443 rtx op0, op1;
4445 rtx tem;
4447 /* Put complex operands first and constants second if commutative. */
4448 if (GET_RTX_CLASS (code) == 'c'
4449 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4450 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4451 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4452 || (GET_CODE (op0) == SUBREG
4453 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4454 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4455 tem = op0, op0 = op1, op1 = tem;
4457 /* If this simplifies, do it. */
4458 tem = simplify_binary_operation (code, mode, op0, op1);
4460 if (tem)
4461 return tem;
4463 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4464 just form the operation. */
4466 if (code == PLUS && GET_CODE (op1) == CONST_INT
4467 && GET_MODE (op0) != VOIDmode)
4468 return plus_constant (op0, INTVAL (op1));
4469 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4470 && GET_MODE (op0) != VOIDmode)
4471 return plus_constant (op0, - INTVAL (op1));
4472 else
4473 return gen_rtx (code, mode, op0, op1);
4476 /* Like simplify_binary_operation except used for relational operators.
4477 MODE is the mode of the operands, not that of the result. If MODE
4478 is VOIDmode, both operands must also be VOIDmode and we compare the
4479 operands in "infinite precision".
4481 If no simplification is possible, this function returns zero. Otherwise,
4482 it returns either const_true_rtx or const0_rtx. */
4485 simplify_relational_operation (code, mode, op0, op1)
4486 enum rtx_code code;
4487 enum machine_mode mode;
4488 rtx op0, op1;
4490 int equal, op0lt, op0ltu, op1lt, op1ltu;
4491 rtx tem;
4493 /* If op0 is a compare, extract the comparison arguments from it. */
4494 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4495 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4497 /* We can't simplify MODE_CC values since we don't know what the
4498 actual comparison is. */
4499 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4500 #ifdef HAVE_cc0
4501 || op0 == cc0_rtx
4502 #endif
4504 return 0;
4506 /* For integer comparisons of A and B maybe we can simplify A - B and can
4507 then simplify a comparison of that with zero. If A and B are both either
4508 a register or a CONST_INT, this can't help; testing for these cases will
4509 prevent infinite recursion here and speed things up.
4511 If CODE is an unsigned comparison, then we can never do this optimization,
4512 because it gives an incorrect result if the subtraction wraps around zero.
4513 ANSI C defines unsigned operations such that they never overflow, and
4514 thus such cases can not be ignored. */
4516 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4517 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4518 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4519 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4520 && code != GTU && code != GEU && code != LTU && code != LEU)
4521 return simplify_relational_operation (signed_condition (code),
4522 mode, tem, const0_rtx);
4524 /* For non-IEEE floating-point, if the two operands are equal, we know the
4525 result. */
4526 if (rtx_equal_p (op0, op1)
4527 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4528 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4529 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4531 /* If the operands are floating-point constants, see if we can fold
4532 the result. */
4533 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4534 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4535 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4537 REAL_VALUE_TYPE d0, d1;
4538 jmp_buf handler;
4540 if (setjmp (handler))
4541 return 0;
4543 set_float_handler (handler);
4544 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4545 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4546 equal = REAL_VALUES_EQUAL (d0, d1);
4547 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4548 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4549 set_float_handler (NULL_PTR);
4551 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4553 /* Otherwise, see if the operands are both integers. */
4554 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4555 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4556 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4558 int width = GET_MODE_BITSIZE (mode);
4559 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4560 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4562 /* Get the two words comprising each integer constant. */
4563 if (GET_CODE (op0) == CONST_DOUBLE)
4565 l0u = l0s = CONST_DOUBLE_LOW (op0);
4566 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4568 else
4570 l0u = l0s = INTVAL (op0);
4571 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4574 if (GET_CODE (op1) == CONST_DOUBLE)
4576 l1u = l1s = CONST_DOUBLE_LOW (op1);
4577 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4579 else
4581 l1u = l1s = INTVAL (op1);
4582 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4585 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4586 we have to sign or zero-extend the values. */
4587 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4588 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4590 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4592 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4593 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4595 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4596 l0s |= ((HOST_WIDE_INT) (-1) << width);
4598 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4599 l1s |= ((HOST_WIDE_INT) (-1) << width);
4602 equal = (h0u == h1u && l0u == l1u);
4603 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4604 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4605 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4606 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4609 /* Otherwise, there are some code-specific tests we can make. */
4610 else
4612 switch (code)
4614 case EQ:
4615 /* References to the frame plus a constant or labels cannot
4616 be zero, but a SYMBOL_REF can due to #pragma weak. */
4617 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4618 || GET_CODE (op0) == LABEL_REF)
4619 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4620 /* On some machines, the ap reg can be 0 sometimes. */
4621 && op0 != arg_pointer_rtx
4622 #endif
4624 return const0_rtx;
4625 break;
4627 case NE:
4628 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4629 || GET_CODE (op0) == LABEL_REF)
4630 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4631 && op0 != arg_pointer_rtx
4632 #endif
4634 return const_true_rtx;
4635 break;
4637 case GEU:
4638 /* Unsigned values are never negative. */
4639 if (op1 == const0_rtx)
4640 return const_true_rtx;
4641 break;
4643 case LTU:
4644 if (op1 == const0_rtx)
4645 return const0_rtx;
4646 break;
4648 case LEU:
4649 /* Unsigned values are never greater than the largest
4650 unsigned value. */
4651 if (GET_CODE (op1) == CONST_INT
4652 && INTVAL (op1) == GET_MODE_MASK (mode)
4653 && INTEGRAL_MODE_P (mode))
4654 return const_true_rtx;
4655 break;
4657 case GTU:
4658 if (GET_CODE (op1) == CONST_INT
4659 && INTVAL (op1) == GET_MODE_MASK (mode)
4660 && INTEGRAL_MODE_P (mode))
4661 return const0_rtx;
4662 break;
4665 return 0;
4668 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4669 as appropriate. */
4670 switch (code)
4672 case EQ:
4673 return equal ? const_true_rtx : const0_rtx;
4674 case NE:
4675 return ! equal ? const_true_rtx : const0_rtx;
4676 case LT:
4677 return op0lt ? const_true_rtx : const0_rtx;
4678 case GT:
4679 return op1lt ? const_true_rtx : const0_rtx;
4680 case LTU:
4681 return op0ltu ? const_true_rtx : const0_rtx;
4682 case GTU:
4683 return op1ltu ? const_true_rtx : const0_rtx;
4684 case LE:
4685 return equal || op0lt ? const_true_rtx : const0_rtx;
4686 case GE:
4687 return equal || op1lt ? const_true_rtx : const0_rtx;
4688 case LEU:
4689 return equal || op0ltu ? const_true_rtx : const0_rtx;
4690 case GEU:
4691 return equal || op1ltu ? const_true_rtx : const0_rtx;
4694 abort ();
4697 /* Simplify CODE, an operation with result mode MODE and three operands,
4698 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4699 a constant. Return 0 if no simplifications is possible. */
4702 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4703 enum rtx_code code;
4704 enum machine_mode mode, op0_mode;
4705 rtx op0, op1, op2;
4707 int width = GET_MODE_BITSIZE (mode);
4709 /* VOIDmode means "infinite" precision. */
4710 if (width == 0)
4711 width = HOST_BITS_PER_WIDE_INT;
4713 switch (code)
4715 case SIGN_EXTRACT:
4716 case ZERO_EXTRACT:
4717 if (GET_CODE (op0) == CONST_INT
4718 && GET_CODE (op1) == CONST_INT
4719 && GET_CODE (op2) == CONST_INT
4720 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4721 && width <= HOST_BITS_PER_WIDE_INT)
4723 /* Extracting a bit-field from a constant */
4724 HOST_WIDE_INT val = INTVAL (op0);
4726 if (BITS_BIG_ENDIAN)
4727 val >>= (GET_MODE_BITSIZE (op0_mode)
4728 - INTVAL (op2) - INTVAL (op1));
4729 else
4730 val >>= INTVAL (op2);
4732 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4734 /* First zero-extend. */
4735 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4736 /* If desired, propagate sign bit. */
4737 if (code == SIGN_EXTRACT
4738 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4739 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4742 /* Clear the bits that don't belong in our mode,
4743 unless they and our sign bit are all one.
4744 So we get either a reasonable negative value or a reasonable
4745 unsigned value for this mode. */
4746 if (width < HOST_BITS_PER_WIDE_INT
4747 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4748 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4749 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4751 return GEN_INT (val);
4753 break;
4755 case IF_THEN_ELSE:
4756 if (GET_CODE (op0) == CONST_INT)
4757 return op0 != const0_rtx ? op1 : op2;
4758 break;
4760 default:
4761 abort ();
4764 return 0;
4767 /* If X is a nontrivial arithmetic operation on an argument
4768 for which a constant value can be determined, return
4769 the result of operating on that value, as a constant.
4770 Otherwise, return X, possibly with one or more operands
4771 modified by recursive calls to this function.
4773 If X is a register whose contents are known, we do NOT
4774 return those contents here. equiv_constant is called to
4775 perform that task.
4777 INSN is the insn that we may be modifying. If it is 0, make a copy
4778 of X before modifying it. */
4780 static rtx
4781 fold_rtx (x, insn)
4782 rtx x;
4783 rtx insn;
4785 register enum rtx_code code;
4786 register enum machine_mode mode;
4787 register char *fmt;
4788 register int i;
4789 rtx new = 0;
4790 int copied = 0;
4791 int must_swap = 0;
4793 /* Folded equivalents of first two operands of X. */
4794 rtx folded_arg0;
4795 rtx folded_arg1;
4797 /* Constant equivalents of first three operands of X;
4798 0 when no such equivalent is known. */
4799 rtx const_arg0;
4800 rtx const_arg1;
4801 rtx const_arg2;
4803 /* The mode of the first operand of X. We need this for sign and zero
4804 extends. */
4805 enum machine_mode mode_arg0;
4807 if (x == 0)
4808 return x;
4810 mode = GET_MODE (x);
4811 code = GET_CODE (x);
4812 switch (code)
4814 case CONST:
4815 case CONST_INT:
4816 case CONST_DOUBLE:
4817 case SYMBOL_REF:
4818 case LABEL_REF:
4819 case REG:
4820 /* No use simplifying an EXPR_LIST
4821 since they are used only for lists of args
4822 in a function call's REG_EQUAL note. */
4823 case EXPR_LIST:
4824 return x;
4826 #ifdef HAVE_cc0
4827 case CC0:
4828 return prev_insn_cc0;
4829 #endif
4831 case PC:
4832 /* If the next insn is a CODE_LABEL followed by a jump table,
4833 PC's value is a LABEL_REF pointing to that label. That
4834 lets us fold switch statements on the Vax. */
4835 if (insn && GET_CODE (insn) == JUMP_INSN)
4837 rtx next = next_nonnote_insn (insn);
4839 if (next && GET_CODE (next) == CODE_LABEL
4840 && NEXT_INSN (next) != 0
4841 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4842 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4843 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4844 return gen_rtx (LABEL_REF, Pmode, next);
4846 break;
4848 case SUBREG:
4849 /* See if we previously assigned a constant value to this SUBREG. */
4850 if ((new = lookup_as_function (x, CONST_INT)) != 0
4851 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4852 return new;
4854 /* If this is a paradoxical SUBREG, we have no idea what value the
4855 extra bits would have. However, if the operand is equivalent
4856 to a SUBREG whose operand is the same as our mode, and all the
4857 modes are within a word, we can just use the inner operand
4858 because these SUBREGs just say how to treat the register.
4860 Similarly if we find an integer constant. */
4862 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4864 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4865 struct table_elt *elt;
4867 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4868 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4869 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4870 imode)) != 0)
4871 for (elt = elt->first_same_value;
4872 elt; elt = elt->next_same_value)
4874 if (CONSTANT_P (elt->exp)
4875 && GET_MODE (elt->exp) == VOIDmode)
4876 return elt->exp;
4878 if (GET_CODE (elt->exp) == SUBREG
4879 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4880 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4881 return copy_rtx (SUBREG_REG (elt->exp));
4884 return x;
4887 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4888 We might be able to if the SUBREG is extracting a single word in an
4889 integral mode or extracting the low part. */
4891 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4892 const_arg0 = equiv_constant (folded_arg0);
4893 if (const_arg0)
4894 folded_arg0 = const_arg0;
4896 if (folded_arg0 != SUBREG_REG (x))
4898 new = 0;
4900 if (GET_MODE_CLASS (mode) == MODE_INT
4901 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4902 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4903 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4904 GET_MODE (SUBREG_REG (x)));
4905 if (new == 0 && subreg_lowpart_p (x))
4906 new = gen_lowpart_if_possible (mode, folded_arg0);
4907 if (new)
4908 return new;
4911 /* If this is a narrowing SUBREG and our operand is a REG, see if
4912 we can find an equivalence for REG that is an arithmetic operation
4913 in a wider mode where both operands are paradoxical SUBREGs
4914 from objects of our result mode. In that case, we couldn't report
4915 an equivalent value for that operation, since we don't know what the
4916 extra bits will be. But we can find an equivalence for this SUBREG
4917 by folding that operation is the narrow mode. This allows us to
4918 fold arithmetic in narrow modes when the machine only supports
4919 word-sized arithmetic.
4921 Also look for a case where we have a SUBREG whose operand is the
4922 same as our result. If both modes are smaller than a word, we
4923 are simply interpreting a register in different modes and we
4924 can use the inner value. */
4926 if (GET_CODE (folded_arg0) == REG
4927 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4928 && subreg_lowpart_p (x))
4930 struct table_elt *elt;
4932 /* We can use HASH here since we know that canon_hash won't be
4933 called. */
4934 elt = lookup (folded_arg0,
4935 HASH (folded_arg0, GET_MODE (folded_arg0)),
4936 GET_MODE (folded_arg0));
4938 if (elt)
4939 elt = elt->first_same_value;
4941 for (; elt; elt = elt->next_same_value)
4943 enum rtx_code eltcode = GET_CODE (elt->exp);
4945 /* Just check for unary and binary operations. */
4946 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4947 && GET_CODE (elt->exp) != SIGN_EXTEND
4948 && GET_CODE (elt->exp) != ZERO_EXTEND
4949 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4950 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4952 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4954 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4955 op0 = fold_rtx (op0, NULL_RTX);
4957 op0 = equiv_constant (op0);
4958 if (op0)
4959 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4960 op0, mode);
4962 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4963 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4964 && eltcode != DIV && eltcode != MOD
4965 && eltcode != UDIV && eltcode != UMOD
4966 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4967 && eltcode != ROTATE && eltcode != ROTATERT
4968 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4969 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4970 == mode))
4971 || CONSTANT_P (XEXP (elt->exp, 0)))
4972 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4973 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4974 == mode))
4975 || CONSTANT_P (XEXP (elt->exp, 1))))
4977 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4978 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4980 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4981 op0 = fold_rtx (op0, NULL_RTX);
4983 if (op0)
4984 op0 = equiv_constant (op0);
4986 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4987 op1 = fold_rtx (op1, NULL_RTX);
4989 if (op1)
4990 op1 = equiv_constant (op1);
4992 /* If we are looking for the low SImode part of
4993 (ashift:DI c (const_int 32)), it doesn't work
4994 to compute that in SImode, because a 32-bit shift
4995 in SImode is unpredictable. We know the value is 0. */
4996 if (op0 && op1
4997 && GET_CODE (elt->exp) == ASHIFT
4998 && GET_CODE (op1) == CONST_INT
4999 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5001 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5003 /* If the count fits in the inner mode's width,
5004 but exceeds the outer mode's width,
5005 the value will get truncated to 0
5006 by the subreg. */
5007 new = const0_rtx;
5008 else
5009 /* If the count exceeds even the inner mode's width,
5010 don't fold this expression. */
5011 new = 0;
5013 else if (op0 && op1)
5014 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5015 op0, op1);
5018 else if (GET_CODE (elt->exp) == SUBREG
5019 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5020 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5021 <= UNITS_PER_WORD)
5022 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5023 new = copy_rtx (SUBREG_REG (elt->exp));
5025 if (new)
5026 return new;
5030 return x;
5032 case NOT:
5033 case NEG:
5034 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5035 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5036 new = lookup_as_function (XEXP (x, 0), code);
5037 if (new)
5038 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5039 break;
5041 case MEM:
5042 /* If we are not actually processing an insn, don't try to find the
5043 best address. Not only don't we care, but we could modify the
5044 MEM in an invalid way since we have no insn to validate against. */
5045 if (insn != 0)
5046 find_best_addr (insn, &XEXP (x, 0));
5049 /* Even if we don't fold in the insn itself,
5050 we can safely do so here, in hopes of getting a constant. */
5051 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5052 rtx base = 0;
5053 HOST_WIDE_INT offset = 0;
5055 if (GET_CODE (addr) == REG
5056 && REGNO_QTY_VALID_P (REGNO (addr))
5057 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5058 && qty_const[reg_qty[REGNO (addr)]] != 0)
5059 addr = qty_const[reg_qty[REGNO (addr)]];
5061 /* If address is constant, split it into a base and integer offset. */
5062 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5063 base = addr;
5064 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5065 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5067 base = XEXP (XEXP (addr, 0), 0);
5068 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5070 else if (GET_CODE (addr) == LO_SUM
5071 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5072 base = XEXP (addr, 1);
5074 /* If this is a constant pool reference, we can fold it into its
5075 constant to allow better value tracking. */
5076 if (base && GET_CODE (base) == SYMBOL_REF
5077 && CONSTANT_POOL_ADDRESS_P (base))
5079 rtx constant = get_pool_constant (base);
5080 enum machine_mode const_mode = get_pool_mode (base);
5081 rtx new;
5083 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5084 constant_pool_entries_cost = COST (constant);
5086 /* If we are loading the full constant, we have an equivalence. */
5087 if (offset == 0 && mode == const_mode)
5088 return constant;
5090 /* If this actually isn't a constant (weird!), we can't do
5091 anything. Otherwise, handle the two most common cases:
5092 extracting a word from a multi-word constant, and extracting
5093 the low-order bits. Other cases don't seem common enough to
5094 worry about. */
5095 if (! CONSTANT_P (constant))
5096 return x;
5098 if (GET_MODE_CLASS (mode) == MODE_INT
5099 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5100 && offset % UNITS_PER_WORD == 0
5101 && (new = operand_subword (constant,
5102 offset / UNITS_PER_WORD,
5103 0, const_mode)) != 0)
5104 return new;
5106 if (((BYTES_BIG_ENDIAN
5107 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5108 || (! BYTES_BIG_ENDIAN && offset == 0))
5109 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5110 return new;
5113 /* If this is a reference to a label at a known position in a jump
5114 table, we also know its value. */
5115 if (base && GET_CODE (base) == LABEL_REF)
5117 rtx label = XEXP (base, 0);
5118 rtx table_insn = NEXT_INSN (label);
5120 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5121 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5123 rtx table = PATTERN (table_insn);
5125 if (offset >= 0
5126 && (offset / GET_MODE_SIZE (GET_MODE (table))
5127 < XVECLEN (table, 0)))
5128 return XVECEXP (table, 0,
5129 offset / GET_MODE_SIZE (GET_MODE (table)));
5131 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5132 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5134 rtx table = PATTERN (table_insn);
5136 if (offset >= 0
5137 && (offset / GET_MODE_SIZE (GET_MODE (table))
5138 < XVECLEN (table, 1)))
5140 offset /= GET_MODE_SIZE (GET_MODE (table));
5141 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
5142 XEXP (table, 0));
5144 if (GET_MODE (table) != Pmode)
5145 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5147 /* Indicate this is a constant. This isn't a
5148 valid form of CONST, but it will only be used
5149 to fold the next insns and then discarded, so
5150 it should be safe. */
5151 return gen_rtx (CONST, GET_MODE (new), new);
5156 return x;
5160 const_arg0 = 0;
5161 const_arg1 = 0;
5162 const_arg2 = 0;
5163 mode_arg0 = VOIDmode;
5165 /* Try folding our operands.
5166 Then see which ones have constant values known. */
5168 fmt = GET_RTX_FORMAT (code);
5169 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5170 if (fmt[i] == 'e')
5172 rtx arg = XEXP (x, i);
5173 rtx folded_arg = arg, const_arg = 0;
5174 enum machine_mode mode_arg = GET_MODE (arg);
5175 rtx cheap_arg, expensive_arg;
5176 rtx replacements[2];
5177 int j;
5179 /* Most arguments are cheap, so handle them specially. */
5180 switch (GET_CODE (arg))
5182 case REG:
5183 /* This is the same as calling equiv_constant; it is duplicated
5184 here for speed. */
5185 if (REGNO_QTY_VALID_P (REGNO (arg))
5186 && qty_const[reg_qty[REGNO (arg)]] != 0
5187 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5188 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5189 const_arg
5190 = gen_lowpart_if_possible (GET_MODE (arg),
5191 qty_const[reg_qty[REGNO (arg)]]);
5192 break;
5194 case CONST:
5195 case CONST_INT:
5196 case SYMBOL_REF:
5197 case LABEL_REF:
5198 case CONST_DOUBLE:
5199 const_arg = arg;
5200 break;
5202 #ifdef HAVE_cc0
5203 case CC0:
5204 folded_arg = prev_insn_cc0;
5205 mode_arg = prev_insn_cc0_mode;
5206 const_arg = equiv_constant (folded_arg);
5207 break;
5208 #endif
5210 default:
5211 folded_arg = fold_rtx (arg, insn);
5212 const_arg = equiv_constant (folded_arg);
5215 /* For the first three operands, see if the operand
5216 is constant or equivalent to a constant. */
5217 switch (i)
5219 case 0:
5220 folded_arg0 = folded_arg;
5221 const_arg0 = const_arg;
5222 mode_arg0 = mode_arg;
5223 break;
5224 case 1:
5225 folded_arg1 = folded_arg;
5226 const_arg1 = const_arg;
5227 break;
5228 case 2:
5229 const_arg2 = const_arg;
5230 break;
5233 /* Pick the least expensive of the folded argument and an
5234 equivalent constant argument. */
5235 if (const_arg == 0 || const_arg == folded_arg
5236 || COST (const_arg) > COST (folded_arg))
5237 cheap_arg = folded_arg, expensive_arg = const_arg;
5238 else
5239 cheap_arg = const_arg, expensive_arg = folded_arg;
5241 /* Try to replace the operand with the cheapest of the two
5242 possibilities. If it doesn't work and this is either of the first
5243 two operands of a commutative operation, try swapping them.
5244 If THAT fails, try the more expensive, provided it is cheaper
5245 than what is already there. */
5247 if (cheap_arg == XEXP (x, i))
5248 continue;
5250 if (insn == 0 && ! copied)
5252 x = copy_rtx (x);
5253 copied = 1;
5256 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5257 for (j = 0;
5258 j < 2 && replacements[j]
5259 && COST (replacements[j]) < COST (XEXP (x, i));
5260 j++)
5262 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5263 break;
5265 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5267 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5268 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5270 if (apply_change_group ())
5272 /* Swap them back to be invalid so that this loop can
5273 continue and flag them to be swapped back later. */
5274 rtx tem;
5276 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5277 XEXP (x, 1) = tem;
5278 must_swap = 1;
5279 break;
5285 else if (fmt[i] == 'E')
5286 /* Don't try to fold inside of a vector of expressions.
5287 Doing nothing is harmless. */
5290 /* If a commutative operation, place a constant integer as the second
5291 operand unless the first operand is also a constant integer. Otherwise,
5292 place any constant second unless the first operand is also a constant. */
5294 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5296 if (must_swap || (const_arg0
5297 && (const_arg1 == 0
5298 || (GET_CODE (const_arg0) == CONST_INT
5299 && GET_CODE (const_arg1) != CONST_INT))))
5301 register rtx tem = XEXP (x, 0);
5303 if (insn == 0 && ! copied)
5305 x = copy_rtx (x);
5306 copied = 1;
5309 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5310 validate_change (insn, &XEXP (x, 1), tem, 1);
5311 if (apply_change_group ())
5313 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5314 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5319 /* If X is an arithmetic operation, see if we can simplify it. */
5321 switch (GET_RTX_CLASS (code))
5323 case '1':
5325 int is_const = 0;
5327 /* We can't simplify extension ops unless we know the
5328 original mode. */
5329 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5330 && mode_arg0 == VOIDmode)
5331 break;
5333 /* If we had a CONST, strip it off and put it back later if we
5334 fold. */
5335 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5336 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5338 new = simplify_unary_operation (code, mode,
5339 const_arg0 ? const_arg0 : folded_arg0,
5340 mode_arg0);
5341 if (new != 0 && is_const)
5342 new = gen_rtx (CONST, mode, new);
5344 break;
5346 case '<':
5347 /* See what items are actually being compared and set FOLDED_ARG[01]
5348 to those values and CODE to the actual comparison code. If any are
5349 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5350 do anything if both operands are already known to be constant. */
5352 if (const_arg0 == 0 || const_arg1 == 0)
5354 struct table_elt *p0, *p1;
5355 rtx true = const_true_rtx, false = const0_rtx;
5356 enum machine_mode mode_arg1;
5358 #ifdef FLOAT_STORE_FLAG_VALUE
5359 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5361 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5362 mode);
5363 false = CONST0_RTX (mode);
5365 #endif
5367 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5368 &mode_arg0, &mode_arg1);
5369 const_arg0 = equiv_constant (folded_arg0);
5370 const_arg1 = equiv_constant (folded_arg1);
5372 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5373 what kinds of things are being compared, so we can't do
5374 anything with this comparison. */
5376 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5377 break;
5379 /* If we do not now have two constants being compared, see if we
5380 can nevertheless deduce some things about the comparison. */
5381 if (const_arg0 == 0 || const_arg1 == 0)
5383 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5384 constant? These aren't zero, but we don't know their sign. */
5385 if (const_arg1 == const0_rtx
5386 && (NONZERO_BASE_PLUS_P (folded_arg0)
5387 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5388 come out as 0. */
5389 || GET_CODE (folded_arg0) == SYMBOL_REF
5390 #endif
5391 || GET_CODE (folded_arg0) == LABEL_REF
5392 || GET_CODE (folded_arg0) == CONST))
5394 if (code == EQ)
5395 return false;
5396 else if (code == NE)
5397 return true;
5400 /* See if the two operands are the same. We don't do this
5401 for IEEE floating-point since we can't assume x == x
5402 since x might be a NaN. */
5404 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5405 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5406 && (folded_arg0 == folded_arg1
5407 || (GET_CODE (folded_arg0) == REG
5408 && GET_CODE (folded_arg1) == REG
5409 && (reg_qty[REGNO (folded_arg0)]
5410 == reg_qty[REGNO (folded_arg1)]))
5411 || ((p0 = lookup (folded_arg0,
5412 (safe_hash (folded_arg0, mode_arg0)
5413 % NBUCKETS), mode_arg0))
5414 && (p1 = lookup (folded_arg1,
5415 (safe_hash (folded_arg1, mode_arg0)
5416 % NBUCKETS), mode_arg0))
5417 && p0->first_same_value == p1->first_same_value)))
5418 return ((code == EQ || code == LE || code == GE
5419 || code == LEU || code == GEU)
5420 ? true : false);
5422 /* If FOLDED_ARG0 is a register, see if the comparison we are
5423 doing now is either the same as we did before or the reverse
5424 (we only check the reverse if not floating-point). */
5425 else if (GET_CODE (folded_arg0) == REG)
5427 int qty = reg_qty[REGNO (folded_arg0)];
5429 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5430 && (comparison_dominates_p (qty_comparison_code[qty], code)
5431 || (comparison_dominates_p (qty_comparison_code[qty],
5432 reverse_condition (code))
5433 && ! FLOAT_MODE_P (mode_arg0)))
5434 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5435 || (const_arg1
5436 && rtx_equal_p (qty_comparison_const[qty],
5437 const_arg1))
5438 || (GET_CODE (folded_arg1) == REG
5439 && (reg_qty[REGNO (folded_arg1)]
5440 == qty_comparison_qty[qty]))))
5441 return (comparison_dominates_p (qty_comparison_code[qty],
5442 code)
5443 ? true : false);
5448 /* If we are comparing against zero, see if the first operand is
5449 equivalent to an IOR with a constant. If so, we may be able to
5450 determine the result of this comparison. */
5452 if (const_arg1 == const0_rtx)
5454 rtx y = lookup_as_function (folded_arg0, IOR);
5455 rtx inner_const;
5457 if (y != 0
5458 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5459 && GET_CODE (inner_const) == CONST_INT
5460 && INTVAL (inner_const) != 0)
5462 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5463 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5464 && (INTVAL (inner_const)
5465 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5466 rtx true = const_true_rtx, false = const0_rtx;
5468 #ifdef FLOAT_STORE_FLAG_VALUE
5469 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5471 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5472 mode);
5473 false = CONST0_RTX (mode);
5475 #endif
5477 switch (code)
5479 case EQ:
5480 return false;
5481 case NE:
5482 return true;
5483 case LT: case LE:
5484 if (has_sign)
5485 return true;
5486 break;
5487 case GT: case GE:
5488 if (has_sign)
5489 return false;
5490 break;
5495 new = simplify_relational_operation (code, mode_arg0,
5496 const_arg0 ? const_arg0 : folded_arg0,
5497 const_arg1 ? const_arg1 : folded_arg1);
5498 #ifdef FLOAT_STORE_FLAG_VALUE
5499 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5500 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5501 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5502 #endif
5503 break;
5505 case '2':
5506 case 'c':
5507 switch (code)
5509 case PLUS:
5510 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5511 with that LABEL_REF as its second operand. If so, the result is
5512 the first operand of that MINUS. This handles switches with an
5513 ADDR_DIFF_VEC table. */
5514 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5516 rtx y
5517 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5518 : lookup_as_function (folded_arg0, MINUS);
5520 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5521 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5522 return XEXP (y, 0);
5524 /* Now try for a CONST of a MINUS like the above. */
5525 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5526 : lookup_as_function (folded_arg0, CONST))) != 0
5527 && GET_CODE (XEXP (y, 0)) == MINUS
5528 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5529 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5530 return XEXP (XEXP (y, 0), 0);
5533 /* Likewise if the operands are in the other order. */
5534 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5536 rtx y
5537 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5538 : lookup_as_function (folded_arg1, MINUS);
5540 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5541 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5542 return XEXP (y, 0);
5544 /* Now try for a CONST of a MINUS like the above. */
5545 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5546 : lookup_as_function (folded_arg1, CONST))) != 0
5547 && GET_CODE (XEXP (y, 0)) == MINUS
5548 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5549 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5550 return XEXP (XEXP (y, 0), 0);
5553 /* If second operand is a register equivalent to a negative
5554 CONST_INT, see if we can find a register equivalent to the
5555 positive constant. Make a MINUS if so. Don't do this for
5556 a negative constant since we might then alternate between
5557 chosing positive and negative constants. Having the positive
5558 constant previously-used is the more common case. */
5559 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5560 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5562 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5563 struct table_elt *p
5564 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5565 mode);
5567 if (p)
5568 for (p = p->first_same_value; p; p = p->next_same_value)
5569 if (GET_CODE (p->exp) == REG)
5570 return cse_gen_binary (MINUS, mode, folded_arg0,
5571 canon_reg (p->exp, NULL_RTX));
5573 goto from_plus;
5575 case MINUS:
5576 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5577 If so, produce (PLUS Z C2-C). */
5578 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5580 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5581 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5582 return fold_rtx (plus_constant (copy_rtx (y),
5583 -INTVAL (const_arg1)),
5584 NULL_RTX);
5587 /* ... fall through ... */
5589 from_plus:
5590 case SMIN: case SMAX: case UMIN: case UMAX:
5591 case IOR: case AND: case XOR:
5592 case MULT: case DIV: case UDIV:
5593 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5594 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5595 is known to be of similar form, we may be able to replace the
5596 operation with a combined operation. This may eliminate the
5597 intermediate operation if every use is simplified in this way.
5598 Note that the similar optimization done by combine.c only works
5599 if the intermediate operation's result has only one reference. */
5601 if (GET_CODE (folded_arg0) == REG
5602 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5604 int is_shift
5605 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5606 rtx y = lookup_as_function (folded_arg0, code);
5607 rtx inner_const;
5608 enum rtx_code associate_code;
5609 rtx new_const;
5611 if (y == 0
5612 || 0 == (inner_const
5613 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5614 || GET_CODE (inner_const) != CONST_INT
5615 /* If we have compiled a statement like
5616 "if (x == (x & mask1))", and now are looking at
5617 "x & mask2", we will have a case where the first operand
5618 of Y is the same as our first operand. Unless we detect
5619 this case, an infinite loop will result. */
5620 || XEXP (y, 0) == folded_arg0)
5621 break;
5623 /* Don't associate these operations if they are a PLUS with the
5624 same constant and it is a power of two. These might be doable
5625 with a pre- or post-increment. Similarly for two subtracts of
5626 identical powers of two with post decrement. */
5628 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5629 && (0
5630 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5631 || exact_log2 (INTVAL (const_arg1)) >= 0
5632 #endif
5633 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5634 || exact_log2 (- INTVAL (const_arg1)) >= 0
5635 #endif
5637 break;
5639 /* Compute the code used to compose the constants. For example,
5640 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5642 associate_code
5643 = (code == MULT || code == DIV || code == UDIV ? MULT
5644 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5646 new_const = simplify_binary_operation (associate_code, mode,
5647 const_arg1, inner_const);
5649 if (new_const == 0)
5650 break;
5652 /* If we are associating shift operations, don't let this
5653 produce a shift of the size of the object or larger.
5654 This could occur when we follow a sign-extend by a right
5655 shift on a machine that does a sign-extend as a pair
5656 of shifts. */
5658 if (is_shift && GET_CODE (new_const) == CONST_INT
5659 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5661 /* As an exception, we can turn an ASHIFTRT of this
5662 form into a shift of the number of bits - 1. */
5663 if (code == ASHIFTRT)
5664 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5665 else
5666 break;
5669 y = copy_rtx (XEXP (y, 0));
5671 /* If Y contains our first operand (the most common way this
5672 can happen is if Y is a MEM), we would do into an infinite
5673 loop if we tried to fold it. So don't in that case. */
5675 if (! reg_mentioned_p (folded_arg0, y))
5676 y = fold_rtx (y, insn);
5678 return cse_gen_binary (code, mode, y, new_const);
5682 new = simplify_binary_operation (code, mode,
5683 const_arg0 ? const_arg0 : folded_arg0,
5684 const_arg1 ? const_arg1 : folded_arg1);
5685 break;
5687 case 'o':
5688 /* (lo_sum (high X) X) is simply X. */
5689 if (code == LO_SUM && const_arg0 != 0
5690 && GET_CODE (const_arg0) == HIGH
5691 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5692 return const_arg1;
5693 break;
5695 case '3':
5696 case 'b':
5697 new = simplify_ternary_operation (code, mode, mode_arg0,
5698 const_arg0 ? const_arg0 : folded_arg0,
5699 const_arg1 ? const_arg1 : folded_arg1,
5700 const_arg2 ? const_arg2 : XEXP (x, 2));
5701 break;
5704 return new ? new : x;
5707 /* Return a constant value currently equivalent to X.
5708 Return 0 if we don't know one. */
5710 static rtx
5711 equiv_constant (x)
5712 rtx x;
5714 if (GET_CODE (x) == REG
5715 && REGNO_QTY_VALID_P (REGNO (x))
5716 && qty_const[reg_qty[REGNO (x)]])
5717 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5719 if (x != 0 && CONSTANT_P (x))
5720 return x;
5722 /* If X is a MEM, try to fold it outside the context of any insn to see if
5723 it might be equivalent to a constant. That handles the case where it
5724 is a constant-pool reference. Then try to look it up in the hash table
5725 in case it is something whose value we have seen before. */
5727 if (GET_CODE (x) == MEM)
5729 struct table_elt *elt;
5731 x = fold_rtx (x, NULL_RTX);
5732 if (CONSTANT_P (x))
5733 return x;
5735 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5736 if (elt == 0)
5737 return 0;
5739 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5740 if (elt->is_const && CONSTANT_P (elt->exp))
5741 return elt->exp;
5744 return 0;
5747 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5748 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5749 least-significant part of X.
5750 MODE specifies how big a part of X to return.
5752 If the requested operation cannot be done, 0 is returned.
5754 This is similar to gen_lowpart in emit-rtl.c. */
5757 gen_lowpart_if_possible (mode, x)
5758 enum machine_mode mode;
5759 register rtx x;
5761 rtx result = gen_lowpart_common (mode, x);
5763 if (result)
5764 return result;
5765 else if (GET_CODE (x) == MEM)
5767 /* This is the only other case we handle. */
5768 register int offset = 0;
5769 rtx new;
5771 if (WORDS_BIG_ENDIAN)
5772 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5773 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5774 if (BYTES_BIG_ENDIAN)
5775 /* Adjust the address so that the address-after-the-data is
5776 unchanged. */
5777 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5778 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5779 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5780 if (! memory_address_p (mode, XEXP (new, 0)))
5781 return 0;
5782 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5783 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5784 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5785 return new;
5787 else
5788 return 0;
5791 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5792 branch. It will be zero if not.
5794 In certain cases, this can cause us to add an equivalence. For example,
5795 if we are following the taken case of
5796 if (i == 2)
5797 we can add the fact that `i' and '2' are now equivalent.
5799 In any case, we can record that this comparison was passed. If the same
5800 comparison is seen later, we will know its value. */
5802 static void
5803 record_jump_equiv (insn, taken)
5804 rtx insn;
5805 int taken;
5807 int cond_known_true;
5808 rtx op0, op1;
5809 enum machine_mode mode, mode0, mode1;
5810 int reversed_nonequality = 0;
5811 enum rtx_code code;
5813 /* Ensure this is the right kind of insn. */
5814 if (! condjump_p (insn) || simplejump_p (insn))
5815 return;
5817 /* See if this jump condition is known true or false. */
5818 if (taken)
5819 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5820 else
5821 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5823 /* Get the type of comparison being done and the operands being compared.
5824 If we had to reverse a non-equality condition, record that fact so we
5825 know that it isn't valid for floating-point. */
5826 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5827 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5828 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5830 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5831 if (! cond_known_true)
5833 reversed_nonequality = (code != EQ && code != NE);
5834 code = reverse_condition (code);
5837 /* The mode is the mode of the non-constant. */
5838 mode = mode0;
5839 if (mode1 != VOIDmode)
5840 mode = mode1;
5842 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5845 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5846 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5847 Make any useful entries we can with that information. Called from
5848 above function and called recursively. */
5850 static void
5851 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5852 enum rtx_code code;
5853 enum machine_mode mode;
5854 rtx op0, op1;
5855 int reversed_nonequality;
5857 unsigned op0_hash, op1_hash;
5858 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5859 struct table_elt *op0_elt, *op1_elt;
5861 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5862 we know that they are also equal in the smaller mode (this is also
5863 true for all smaller modes whether or not there is a SUBREG, but
5864 is not worth testing for with no SUBREG. */
5866 /* Note that GET_MODE (op0) may not equal MODE. */
5867 if (code == EQ && GET_CODE (op0) == SUBREG
5868 && (GET_MODE_SIZE (GET_MODE (op0))
5869 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5871 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5872 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5874 record_jump_cond (code, mode, SUBREG_REG (op0),
5875 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5876 reversed_nonequality);
5879 if (code == EQ && GET_CODE (op1) == SUBREG
5880 && (GET_MODE_SIZE (GET_MODE (op1))
5881 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5883 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5884 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5886 record_jump_cond (code, mode, SUBREG_REG (op1),
5887 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5888 reversed_nonequality);
5891 /* Similarly, if this is an NE comparison, and either is a SUBREG
5892 making a smaller mode, we know the whole thing is also NE. */
5894 /* Note that GET_MODE (op0) may not equal MODE;
5895 if we test MODE instead, we can get an infinite recursion
5896 alternating between two modes each wider than MODE. */
5898 if (code == NE && GET_CODE (op0) == SUBREG
5899 && subreg_lowpart_p (op0)
5900 && (GET_MODE_SIZE (GET_MODE (op0))
5901 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5903 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5904 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5906 record_jump_cond (code, mode, SUBREG_REG (op0),
5907 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5908 reversed_nonequality);
5911 if (code == NE && GET_CODE (op1) == SUBREG
5912 && subreg_lowpart_p (op1)
5913 && (GET_MODE_SIZE (GET_MODE (op1))
5914 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5916 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5917 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5919 record_jump_cond (code, mode, SUBREG_REG (op1),
5920 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5921 reversed_nonequality);
5924 /* Hash both operands. */
5926 do_not_record = 0;
5927 hash_arg_in_memory = 0;
5928 hash_arg_in_struct = 0;
5929 op0_hash = HASH (op0, mode);
5930 op0_in_memory = hash_arg_in_memory;
5931 op0_in_struct = hash_arg_in_struct;
5933 if (do_not_record)
5934 return;
5936 do_not_record = 0;
5937 hash_arg_in_memory = 0;
5938 hash_arg_in_struct = 0;
5939 op1_hash = HASH (op1, mode);
5940 op1_in_memory = hash_arg_in_memory;
5941 op1_in_struct = hash_arg_in_struct;
5943 if (do_not_record)
5944 return;
5946 /* Look up both operands. */
5947 op0_elt = lookup (op0, op0_hash, mode);
5948 op1_elt = lookup (op1, op1_hash, mode);
5950 /* If both operands are already equivalent or if they are not in the
5951 table but are identical, do nothing. */
5952 if ((op0_elt != 0 && op1_elt != 0
5953 && op0_elt->first_same_value == op1_elt->first_same_value)
5954 || op0 == op1 || rtx_equal_p (op0, op1))
5955 return;
5957 /* If we aren't setting two things equal all we can do is save this
5958 comparison. Similarly if this is floating-point. In the latter
5959 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5960 If we record the equality, we might inadvertently delete code
5961 whose intent was to change -0 to +0. */
5963 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5965 /* If we reversed a floating-point comparison, if OP0 is not a
5966 register, or if OP1 is neither a register or constant, we can't
5967 do anything. */
5969 if (GET_CODE (op1) != REG)
5970 op1 = equiv_constant (op1);
5972 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5973 || GET_CODE (op0) != REG || op1 == 0)
5974 return;
5976 /* Put OP0 in the hash table if it isn't already. This gives it a
5977 new quantity number. */
5978 if (op0_elt == 0)
5980 if (insert_regs (op0, NULL_PTR, 0))
5982 rehash_using_reg (op0);
5983 op0_hash = HASH (op0, mode);
5985 /* If OP0 is contained in OP1, this changes its hash code
5986 as well. Faster to rehash than to check, except
5987 for the simple case of a constant. */
5988 if (! CONSTANT_P (op1))
5989 op1_hash = HASH (op1,mode);
5992 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5993 op0_elt->in_memory = op0_in_memory;
5994 op0_elt->in_struct = op0_in_struct;
5997 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5998 if (GET_CODE (op1) == REG)
6000 /* Look it up again--in case op0 and op1 are the same. */
6001 op1_elt = lookup (op1, op1_hash, mode);
6003 /* Put OP1 in the hash table so it gets a new quantity number. */
6004 if (op1_elt == 0)
6006 if (insert_regs (op1, NULL_PTR, 0))
6008 rehash_using_reg (op1);
6009 op1_hash = HASH (op1, mode);
6012 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6013 op1_elt->in_memory = op1_in_memory;
6014 op1_elt->in_struct = op1_in_struct;
6017 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
6018 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
6020 else
6022 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
6023 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
6026 return;
6029 /* If either side is still missing an equivalence, make it now,
6030 then merge the equivalences. */
6032 if (op0_elt == 0)
6034 if (insert_regs (op0, NULL_PTR, 0))
6036 rehash_using_reg (op0);
6037 op0_hash = HASH (op0, mode);
6040 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6041 op0_elt->in_memory = op0_in_memory;
6042 op0_elt->in_struct = op0_in_struct;
6045 if (op1_elt == 0)
6047 if (insert_regs (op1, NULL_PTR, 0))
6049 rehash_using_reg (op1);
6050 op1_hash = HASH (op1, mode);
6053 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6054 op1_elt->in_memory = op1_in_memory;
6055 op1_elt->in_struct = op1_in_struct;
6058 merge_equiv_classes (op0_elt, op1_elt);
6059 last_jump_equiv_class = op0_elt;
6062 /* CSE processing for one instruction.
6063 First simplify sources and addresses of all assignments
6064 in the instruction, using previously-computed equivalents values.
6065 Then install the new sources and destinations in the table
6066 of available values.
6068 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
6069 the insn. */
6071 /* Data on one SET contained in the instruction. */
6073 struct set
6075 /* The SET rtx itself. */
6076 rtx rtl;
6077 /* The SET_SRC of the rtx (the original value, if it is changing). */
6078 rtx src;
6079 /* The hash-table element for the SET_SRC of the SET. */
6080 struct table_elt *src_elt;
6081 /* Hash value for the SET_SRC. */
6082 unsigned src_hash;
6083 /* Hash value for the SET_DEST. */
6084 unsigned dest_hash;
6085 /* The SET_DEST, with SUBREG, etc., stripped. */
6086 rtx inner_dest;
6087 /* Place where the pointer to the INNER_DEST was found. */
6088 rtx *inner_dest_loc;
6089 /* Nonzero if the SET_SRC is in memory. */
6090 char src_in_memory;
6091 /* Nonzero if the SET_SRC is in a structure. */
6092 char src_in_struct;
6093 /* Nonzero if the SET_SRC contains something
6094 whose value cannot be predicted and understood. */
6095 char src_volatile;
6096 /* Original machine mode, in case it becomes a CONST_INT. */
6097 enum machine_mode mode;
6098 /* A constant equivalent for SET_SRC, if any. */
6099 rtx src_const;
6100 /* Hash value of constant equivalent for SET_SRC. */
6101 unsigned src_const_hash;
6102 /* Table entry for constant equivalent for SET_SRC, if any. */
6103 struct table_elt *src_const_elt;
6106 static void
6107 cse_insn (insn, in_libcall_block)
6108 rtx insn;
6109 int in_libcall_block;
6111 register rtx x = PATTERN (insn);
6112 register int i;
6113 rtx tem;
6114 register int n_sets = 0;
6116 /* Records what this insn does to set CC0. */
6117 rtx this_insn_cc0 = 0;
6118 enum machine_mode this_insn_cc0_mode;
6119 struct write_data writes_memory;
6120 static struct write_data init = {0, 0, 0, 0};
6122 rtx src_eqv = 0;
6123 struct table_elt *src_eqv_elt = 0;
6124 int src_eqv_volatile;
6125 int src_eqv_in_memory;
6126 int src_eqv_in_struct;
6127 unsigned src_eqv_hash;
6129 struct set *sets;
6131 this_insn = insn;
6132 writes_memory = init;
6134 /* Find all the SETs and CLOBBERs in this instruction.
6135 Record all the SETs in the array `set' and count them.
6136 Also determine whether there is a CLOBBER that invalidates
6137 all memory references, or all references at varying addresses. */
6139 if (GET_CODE (insn) == CALL_INSN)
6141 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6142 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6143 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6146 if (GET_CODE (x) == SET)
6148 sets = (struct set *) alloca (sizeof (struct set));
6149 sets[0].rtl = x;
6151 /* Ignore SETs that are unconditional jumps.
6152 They never need cse processing, so this does not hurt.
6153 The reason is not efficiency but rather
6154 so that we can test at the end for instructions
6155 that have been simplified to unconditional jumps
6156 and not be misled by unchanged instructions
6157 that were unconditional jumps to begin with. */
6158 if (SET_DEST (x) == pc_rtx
6159 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6162 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6163 The hard function value register is used only once, to copy to
6164 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6165 Ensure we invalidate the destination register. On the 80386 no
6166 other code would invalidate it since it is a fixed_reg.
6167 We need not check the return of apply_change_group; see canon_reg. */
6169 else if (GET_CODE (SET_SRC (x)) == CALL)
6171 canon_reg (SET_SRC (x), insn);
6172 apply_change_group ();
6173 fold_rtx (SET_SRC (x), insn);
6174 invalidate (SET_DEST (x), VOIDmode);
6176 else
6177 n_sets = 1;
6179 else if (GET_CODE (x) == PARALLEL)
6181 register int lim = XVECLEN (x, 0);
6183 sets = (struct set *) alloca (lim * sizeof (struct set));
6185 /* Find all regs explicitly clobbered in this insn,
6186 and ensure they are not replaced with any other regs
6187 elsewhere in this insn.
6188 When a reg that is clobbered is also used for input,
6189 we should presume that that is for a reason,
6190 and we should not substitute some other register
6191 which is not supposed to be clobbered.
6192 Therefore, this loop cannot be merged into the one below
6193 because a CALL may precede a CLOBBER and refer to the
6194 value clobbered. We must not let a canonicalization do
6195 anything in that case. */
6196 for (i = 0; i < lim; i++)
6198 register rtx y = XVECEXP (x, 0, i);
6199 if (GET_CODE (y) == CLOBBER)
6201 rtx clobbered = XEXP (y, 0);
6203 if (GET_CODE (clobbered) == REG
6204 || GET_CODE (clobbered) == SUBREG)
6205 invalidate (clobbered, VOIDmode);
6206 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6207 || GET_CODE (clobbered) == ZERO_EXTRACT)
6208 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6212 for (i = 0; i < lim; i++)
6214 register rtx y = XVECEXP (x, 0, i);
6215 if (GET_CODE (y) == SET)
6217 /* As above, we ignore unconditional jumps and call-insns and
6218 ignore the result of apply_change_group. */
6219 if (GET_CODE (SET_SRC (y)) == CALL)
6221 canon_reg (SET_SRC (y), insn);
6222 apply_change_group ();
6223 fold_rtx (SET_SRC (y), insn);
6224 invalidate (SET_DEST (y), VOIDmode);
6226 else if (SET_DEST (y) == pc_rtx
6227 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6229 else
6230 sets[n_sets++].rtl = y;
6232 else if (GET_CODE (y) == CLOBBER)
6234 /* If we clobber memory, take note of that,
6235 and canon the address.
6236 This does nothing when a register is clobbered
6237 because we have already invalidated the reg. */
6238 if (GET_CODE (XEXP (y, 0)) == MEM)
6240 canon_reg (XEXP (y, 0), NULL_RTX);
6241 note_mem_written (XEXP (y, 0), &writes_memory);
6244 else if (GET_CODE (y) == USE
6245 && ! (GET_CODE (XEXP (y, 0)) == REG
6246 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6247 canon_reg (y, NULL_RTX);
6248 else if (GET_CODE (y) == CALL)
6250 /* The result of apply_change_group can be ignored; see
6251 canon_reg. */
6252 canon_reg (y, insn);
6253 apply_change_group ();
6254 fold_rtx (y, insn);
6258 else if (GET_CODE (x) == CLOBBER)
6260 if (GET_CODE (XEXP (x, 0)) == MEM)
6262 canon_reg (XEXP (x, 0), NULL_RTX);
6263 note_mem_written (XEXP (x, 0), &writes_memory);
6267 /* Canonicalize a USE of a pseudo register or memory location. */
6268 else if (GET_CODE (x) == USE
6269 && ! (GET_CODE (XEXP (x, 0)) == REG
6270 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6271 canon_reg (XEXP (x, 0), NULL_RTX);
6272 else if (GET_CODE (x) == CALL)
6274 /* The result of apply_change_group can be ignored; see canon_reg. */
6275 canon_reg (x, insn);
6276 apply_change_group ();
6277 fold_rtx (x, insn);
6280 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6281 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6282 is handled specially for this case, and if it isn't set, then there will
6283 be no equivalence for the destination. */
6284 if (n_sets == 1 && REG_NOTES (insn) != 0
6285 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6286 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6287 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6288 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6290 /* Canonicalize sources and addresses of destinations.
6291 We do this in a separate pass to avoid problems when a MATCH_DUP is
6292 present in the insn pattern. In that case, we want to ensure that
6293 we don't break the duplicate nature of the pattern. So we will replace
6294 both operands at the same time. Otherwise, we would fail to find an
6295 equivalent substitution in the loop calling validate_change below.
6297 We used to suppress canonicalization of DEST if it appears in SRC,
6298 but we don't do this any more. */
6300 for (i = 0; i < n_sets; i++)
6302 rtx dest = SET_DEST (sets[i].rtl);
6303 rtx src = SET_SRC (sets[i].rtl);
6304 rtx new = canon_reg (src, insn);
6306 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6307 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6308 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6309 || insn_n_dups[recog_memoized (insn)] > 0)
6310 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6311 else
6312 SET_SRC (sets[i].rtl) = new;
6314 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6316 validate_change (insn, &XEXP (dest, 1),
6317 canon_reg (XEXP (dest, 1), insn), 1);
6318 validate_change (insn, &XEXP (dest, 2),
6319 canon_reg (XEXP (dest, 2), insn), 1);
6322 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6323 || GET_CODE (dest) == ZERO_EXTRACT
6324 || GET_CODE (dest) == SIGN_EXTRACT)
6325 dest = XEXP (dest, 0);
6327 if (GET_CODE (dest) == MEM)
6328 canon_reg (dest, insn);
6331 /* Now that we have done all the replacements, we can apply the change
6332 group and see if they all work. Note that this will cause some
6333 canonicalizations that would have worked individually not to be applied
6334 because some other canonicalization didn't work, but this should not
6335 occur often.
6337 The result of apply_change_group can be ignored; see canon_reg. */
6339 apply_change_group ();
6341 /* Set sets[i].src_elt to the class each source belongs to.
6342 Detect assignments from or to volatile things
6343 and set set[i] to zero so they will be ignored
6344 in the rest of this function.
6346 Nothing in this loop changes the hash table or the register chains. */
6348 for (i = 0; i < n_sets; i++)
6350 register rtx src, dest;
6351 register rtx src_folded;
6352 register struct table_elt *elt = 0, *p;
6353 enum machine_mode mode;
6354 rtx src_eqv_here;
6355 rtx src_const = 0;
6356 rtx src_related = 0;
6357 struct table_elt *src_const_elt = 0;
6358 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6359 int src_related_cost = 10000, src_elt_cost = 10000;
6360 /* Set non-zero if we need to call force_const_mem on with the
6361 contents of src_folded before using it. */
6362 int src_folded_force_flag = 0;
6364 dest = SET_DEST (sets[i].rtl);
6365 src = SET_SRC (sets[i].rtl);
6367 /* If SRC is a constant that has no machine mode,
6368 hash it with the destination's machine mode.
6369 This way we can keep different modes separate. */
6371 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6372 sets[i].mode = mode;
6374 if (src_eqv)
6376 enum machine_mode eqvmode = mode;
6377 if (GET_CODE (dest) == STRICT_LOW_PART)
6378 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6379 do_not_record = 0;
6380 hash_arg_in_memory = 0;
6381 hash_arg_in_struct = 0;
6382 src_eqv = fold_rtx (src_eqv, insn);
6383 src_eqv_hash = HASH (src_eqv, eqvmode);
6385 /* Find the equivalence class for the equivalent expression. */
6387 if (!do_not_record)
6388 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6390 src_eqv_volatile = do_not_record;
6391 src_eqv_in_memory = hash_arg_in_memory;
6392 src_eqv_in_struct = hash_arg_in_struct;
6395 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6396 value of the INNER register, not the destination. So it is not
6397 a valid substitution for the source. But save it for later. */
6398 if (GET_CODE (dest) == STRICT_LOW_PART)
6399 src_eqv_here = 0;
6400 else
6401 src_eqv_here = src_eqv;
6403 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6404 simplified result, which may not necessarily be valid. */
6405 src_folded = fold_rtx (src, insn);
6407 #if 0
6408 /* ??? This caused bad code to be generated for the m68k port with -O2.
6409 Suppose src is (CONST_INT -1), and that after truncation src_folded
6410 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6411 At the end we will add src and src_const to the same equivalence
6412 class. We now have 3 and -1 on the same equivalence class. This
6413 causes later instructions to be mis-optimized. */
6414 /* If storing a constant in a bitfield, pre-truncate the constant
6415 so we will be able to record it later. */
6416 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6417 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6419 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6421 if (GET_CODE (src) == CONST_INT
6422 && GET_CODE (width) == CONST_INT
6423 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6424 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6425 src_folded
6426 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6427 << INTVAL (width)) - 1));
6429 #endif
6431 /* Compute SRC's hash code, and also notice if it
6432 should not be recorded at all. In that case,
6433 prevent any further processing of this assignment. */
6434 do_not_record = 0;
6435 hash_arg_in_memory = 0;
6436 hash_arg_in_struct = 0;
6438 sets[i].src = src;
6439 sets[i].src_hash = HASH (src, mode);
6440 sets[i].src_volatile = do_not_record;
6441 sets[i].src_in_memory = hash_arg_in_memory;
6442 sets[i].src_in_struct = hash_arg_in_struct;
6444 #if 0
6445 /* It is no longer clear why we used to do this, but it doesn't
6446 appear to still be needed. So let's try without it since this
6447 code hurts cse'ing widened ops. */
6448 /* If source is a perverse subreg (such as QI treated as an SI),
6449 treat it as volatile. It may do the work of an SI in one context
6450 where the extra bits are not being used, but cannot replace an SI
6451 in general. */
6452 if (GET_CODE (src) == SUBREG
6453 && (GET_MODE_SIZE (GET_MODE (src))
6454 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6455 sets[i].src_volatile = 1;
6456 #endif
6458 /* Locate all possible equivalent forms for SRC. Try to replace
6459 SRC in the insn with each cheaper equivalent.
6461 We have the following types of equivalents: SRC itself, a folded
6462 version, a value given in a REG_EQUAL note, or a value related
6463 to a constant.
6465 Each of these equivalents may be part of an additional class
6466 of equivalents (if more than one is in the table, they must be in
6467 the same class; we check for this).
6469 If the source is volatile, we don't do any table lookups.
6471 We note any constant equivalent for possible later use in a
6472 REG_NOTE. */
6474 if (!sets[i].src_volatile)
6475 elt = lookup (src, sets[i].src_hash, mode);
6477 sets[i].src_elt = elt;
6479 if (elt && src_eqv_here && src_eqv_elt)
6481 if (elt->first_same_value != src_eqv_elt->first_same_value)
6483 /* The REG_EQUAL is indicating that two formerly distinct
6484 classes are now equivalent. So merge them. */
6485 merge_equiv_classes (elt, src_eqv_elt);
6486 src_eqv_hash = HASH (src_eqv, elt->mode);
6487 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6490 src_eqv_here = 0;
6493 else if (src_eqv_elt)
6494 elt = src_eqv_elt;
6496 /* Try to find a constant somewhere and record it in `src_const'.
6497 Record its table element, if any, in `src_const_elt'. Look in
6498 any known equivalences first. (If the constant is not in the
6499 table, also set `sets[i].src_const_hash'). */
6500 if (elt)
6501 for (p = elt->first_same_value; p; p = p->next_same_value)
6502 if (p->is_const)
6504 src_const = p->exp;
6505 src_const_elt = elt;
6506 break;
6509 if (src_const == 0
6510 && (CONSTANT_P (src_folded)
6511 /* Consider (minus (label_ref L1) (label_ref L2)) as
6512 "constant" here so we will record it. This allows us
6513 to fold switch statements when an ADDR_DIFF_VEC is used. */
6514 || (GET_CODE (src_folded) == MINUS
6515 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6516 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6517 src_const = src_folded, src_const_elt = elt;
6518 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6519 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6521 /* If we don't know if the constant is in the table, get its
6522 hash code and look it up. */
6523 if (src_const && src_const_elt == 0)
6525 sets[i].src_const_hash = HASH (src_const, mode);
6526 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6529 sets[i].src_const = src_const;
6530 sets[i].src_const_elt = src_const_elt;
6532 /* If the constant and our source are both in the table, mark them as
6533 equivalent. Otherwise, if a constant is in the table but the source
6534 isn't, set ELT to it. */
6535 if (src_const_elt && elt
6536 && src_const_elt->first_same_value != elt->first_same_value)
6537 merge_equiv_classes (elt, src_const_elt);
6538 else if (src_const_elt && elt == 0)
6539 elt = src_const_elt;
6541 /* See if there is a register linearly related to a constant
6542 equivalent of SRC. */
6543 if (src_const
6544 && (GET_CODE (src_const) == CONST
6545 || (src_const_elt && src_const_elt->related_value != 0)))
6547 src_related = use_related_value (src_const, src_const_elt);
6548 if (src_related)
6550 struct table_elt *src_related_elt
6551 = lookup (src_related, HASH (src_related, mode), mode);
6552 if (src_related_elt && elt)
6554 if (elt->first_same_value
6555 != src_related_elt->first_same_value)
6556 /* This can occur when we previously saw a CONST
6557 involving a SYMBOL_REF and then see the SYMBOL_REF
6558 twice. Merge the involved classes. */
6559 merge_equiv_classes (elt, src_related_elt);
6561 src_related = 0;
6562 src_related_elt = 0;
6564 else if (src_related_elt && elt == 0)
6565 elt = src_related_elt;
6569 /* See if we have a CONST_INT that is already in a register in a
6570 wider mode. */
6572 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6573 && GET_MODE_CLASS (mode) == MODE_INT
6574 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6576 enum machine_mode wider_mode;
6578 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6579 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6580 && src_related == 0;
6581 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6583 struct table_elt *const_elt
6584 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6586 if (const_elt == 0)
6587 continue;
6589 for (const_elt = const_elt->first_same_value;
6590 const_elt; const_elt = const_elt->next_same_value)
6591 if (GET_CODE (const_elt->exp) == REG)
6593 src_related = gen_lowpart_if_possible (mode,
6594 const_elt->exp);
6595 break;
6600 /* Another possibility is that we have an AND with a constant in
6601 a mode narrower than a word. If so, it might have been generated
6602 as part of an "if" which would narrow the AND. If we already
6603 have done the AND in a wider mode, we can use a SUBREG of that
6604 value. */
6606 if (flag_expensive_optimizations && ! src_related
6607 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6608 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6610 enum machine_mode tmode;
6611 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6613 for (tmode = GET_MODE_WIDER_MODE (mode);
6614 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6615 tmode = GET_MODE_WIDER_MODE (tmode))
6617 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6618 struct table_elt *larger_elt;
6620 if (inner)
6622 PUT_MODE (new_and, tmode);
6623 XEXP (new_and, 0) = inner;
6624 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6625 if (larger_elt == 0)
6626 continue;
6628 for (larger_elt = larger_elt->first_same_value;
6629 larger_elt; larger_elt = larger_elt->next_same_value)
6630 if (GET_CODE (larger_elt->exp) == REG)
6632 src_related
6633 = gen_lowpart_if_possible (mode, larger_elt->exp);
6634 break;
6637 if (src_related)
6638 break;
6643 #ifdef LOAD_EXTEND_OP
6644 /* See if a MEM has already been loaded with a widening operation;
6645 if it has, we can use a subreg of that. Many CISC machines
6646 also have such operations, but this is only likely to be
6647 beneficial these machines. */
6649 if (flag_expensive_optimizations && src_related == 0
6650 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6651 && GET_MODE_CLASS (mode) == MODE_INT
6652 && GET_CODE (src) == MEM && ! do_not_record
6653 && LOAD_EXTEND_OP (mode) != NIL)
6655 enum machine_mode tmode;
6657 /* Set what we are trying to extend and the operation it might
6658 have been extended with. */
6659 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6660 XEXP (memory_extend_rtx, 0) = src;
6662 for (tmode = GET_MODE_WIDER_MODE (mode);
6663 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6664 tmode = GET_MODE_WIDER_MODE (tmode))
6666 struct table_elt *larger_elt;
6668 PUT_MODE (memory_extend_rtx, tmode);
6669 larger_elt = lookup (memory_extend_rtx,
6670 HASH (memory_extend_rtx, tmode), tmode);
6671 if (larger_elt == 0)
6672 continue;
6674 for (larger_elt = larger_elt->first_same_value;
6675 larger_elt; larger_elt = larger_elt->next_same_value)
6676 if (GET_CODE (larger_elt->exp) == REG)
6678 src_related = gen_lowpart_if_possible (mode,
6679 larger_elt->exp);
6680 break;
6683 if (src_related)
6684 break;
6687 #endif /* LOAD_EXTEND_OP */
6689 if (src == src_folded)
6690 src_folded = 0;
6692 /* At this point, ELT, if non-zero, points to a class of expressions
6693 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6694 and SRC_RELATED, if non-zero, each contain additional equivalent
6695 expressions. Prune these latter expressions by deleting expressions
6696 already in the equivalence class.
6698 Check for an equivalent identical to the destination. If found,
6699 this is the preferred equivalent since it will likely lead to
6700 elimination of the insn. Indicate this by placing it in
6701 `src_related'. */
6703 if (elt) elt = elt->first_same_value;
6704 for (p = elt; p; p = p->next_same_value)
6706 enum rtx_code code = GET_CODE (p->exp);
6708 /* If the expression is not valid, ignore it. Then we do not
6709 have to check for validity below. In most cases, we can use
6710 `rtx_equal_p', since canonicalization has already been done. */
6711 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6712 continue;
6714 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6715 src = 0;
6716 else if (src_folded && GET_CODE (src_folded) == code
6717 && rtx_equal_p (src_folded, p->exp))
6718 src_folded = 0;
6719 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6720 && rtx_equal_p (src_eqv_here, p->exp))
6721 src_eqv_here = 0;
6722 else if (src_related && GET_CODE (src_related) == code
6723 && rtx_equal_p (src_related, p->exp))
6724 src_related = 0;
6726 /* This is the same as the destination of the insns, we want
6727 to prefer it. Copy it to src_related. The code below will
6728 then give it a negative cost. */
6729 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6730 src_related = dest;
6734 /* Find the cheapest valid equivalent, trying all the available
6735 possibilities. Prefer items not in the hash table to ones
6736 that are when they are equal cost. Note that we can never
6737 worsen an insn as the current contents will also succeed.
6738 If we find an equivalent identical to the destination, use it as best,
6739 since this insn will probably be eliminated in that case. */
6740 if (src)
6742 if (rtx_equal_p (src, dest))
6743 src_cost = -1;
6744 else
6745 src_cost = COST (src);
6748 if (src_eqv_here)
6750 if (rtx_equal_p (src_eqv_here, dest))
6751 src_eqv_cost = -1;
6752 else
6753 src_eqv_cost = COST (src_eqv_here);
6756 if (src_folded)
6758 if (rtx_equal_p (src_folded, dest))
6759 src_folded_cost = -1;
6760 else
6761 src_folded_cost = COST (src_folded);
6764 if (src_related)
6766 if (rtx_equal_p (src_related, dest))
6767 src_related_cost = -1;
6768 else
6769 src_related_cost = COST (src_related);
6772 /* If this was an indirect jump insn, a known label will really be
6773 cheaper even though it looks more expensive. */
6774 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6775 src_folded = src_const, src_folded_cost = -1;
6777 /* Terminate loop when replacement made. This must terminate since
6778 the current contents will be tested and will always be valid. */
6779 while (1)
6781 rtx trial;
6783 /* Skip invalid entries. */
6784 while (elt && GET_CODE (elt->exp) != REG
6785 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6786 elt = elt->next_same_value;
6788 if (elt) src_elt_cost = elt->cost;
6790 /* Find cheapest and skip it for the next time. For items
6791 of equal cost, use this order:
6792 src_folded, src, src_eqv, src_related and hash table entry. */
6793 if (src_folded_cost <= src_cost
6794 && src_folded_cost <= src_eqv_cost
6795 && src_folded_cost <= src_related_cost
6796 && src_folded_cost <= src_elt_cost)
6798 trial = src_folded, src_folded_cost = 10000;
6799 if (src_folded_force_flag)
6800 trial = force_const_mem (mode, trial);
6802 else if (src_cost <= src_eqv_cost
6803 && src_cost <= src_related_cost
6804 && src_cost <= src_elt_cost)
6805 trial = src, src_cost = 10000;
6806 else if (src_eqv_cost <= src_related_cost
6807 && src_eqv_cost <= src_elt_cost)
6808 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6809 else if (src_related_cost <= src_elt_cost)
6810 trial = copy_rtx (src_related), src_related_cost = 10000;
6811 else
6813 trial = copy_rtx (elt->exp);
6814 elt = elt->next_same_value;
6815 src_elt_cost = 10000;
6818 /* We don't normally have an insn matching (set (pc) (pc)), so
6819 check for this separately here. We will delete such an
6820 insn below.
6822 Tablejump insns contain a USE of the table, so simply replacing
6823 the operand with the constant won't match. This is simply an
6824 unconditional branch, however, and is therefore valid. Just
6825 insert the substitution here and we will delete and re-emit
6826 the insn later. */
6828 if (n_sets == 1 && dest == pc_rtx
6829 && (trial == pc_rtx
6830 || (GET_CODE (trial) == LABEL_REF
6831 && ! condjump_p (insn))))
6833 /* If TRIAL is a label in front of a jump table, we are
6834 really falling through the switch (this is how casesi
6835 insns work), so we must branch around the table. */
6836 if (GET_CODE (trial) == CODE_LABEL
6837 && NEXT_INSN (trial) != 0
6838 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6839 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6840 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6842 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6844 SET_SRC (sets[i].rtl) = trial;
6845 cse_jumps_altered = 1;
6846 break;
6849 /* Look for a substitution that makes a valid insn. */
6850 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6852 /* The result of apply_change_group can be ignored; see
6853 canon_reg. */
6855 validate_change (insn, &SET_SRC (sets[i].rtl),
6856 canon_reg (SET_SRC (sets[i].rtl), insn),
6858 apply_change_group ();
6859 break;
6862 /* If we previously found constant pool entries for
6863 constants and this is a constant, try making a
6864 pool entry. Put it in src_folded unless we already have done
6865 this since that is where it likely came from. */
6867 else if (constant_pool_entries_cost
6868 && CONSTANT_P (trial)
6869 && ! (GET_CODE (trial) == CONST
6870 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6871 && (src_folded == 0
6872 || (GET_CODE (src_folded) != MEM
6873 && ! src_folded_force_flag))
6874 && GET_MODE_CLASS (mode) != MODE_CC)
6876 src_folded_force_flag = 1;
6877 src_folded = trial;
6878 src_folded_cost = constant_pool_entries_cost;
6882 src = SET_SRC (sets[i].rtl);
6884 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6885 However, there is an important exception: If both are registers
6886 that are not the head of their equivalence class, replace SET_SRC
6887 with the head of the class. If we do not do this, we will have
6888 both registers live over a portion of the basic block. This way,
6889 their lifetimes will likely abut instead of overlapping. */
6890 if (GET_CODE (dest) == REG
6891 && REGNO_QTY_VALID_P (REGNO (dest))
6892 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6893 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6894 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6895 /* Don't do this if the original insn had a hard reg as
6896 SET_SRC. */
6897 && (GET_CODE (sets[i].src) != REG
6898 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6899 /* We can't call canon_reg here because it won't do anything if
6900 SRC is a hard register. */
6902 int first = qty_first_reg[reg_qty[REGNO (src)]];
6904 src = SET_SRC (sets[i].rtl)
6905 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6906 : gen_rtx (REG, GET_MODE (src), first);
6908 /* If we had a constant that is cheaper than what we are now
6909 setting SRC to, use that constant. We ignored it when we
6910 thought we could make this into a no-op. */
6911 if (src_const && COST (src_const) < COST (src)
6912 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6913 src = src_const;
6916 /* If we made a change, recompute SRC values. */
6917 if (src != sets[i].src)
6919 do_not_record = 0;
6920 hash_arg_in_memory = 0;
6921 hash_arg_in_struct = 0;
6922 sets[i].src = src;
6923 sets[i].src_hash = HASH (src, mode);
6924 sets[i].src_volatile = do_not_record;
6925 sets[i].src_in_memory = hash_arg_in_memory;
6926 sets[i].src_in_struct = hash_arg_in_struct;
6927 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6930 /* If this is a single SET, we are setting a register, and we have an
6931 equivalent constant, we want to add a REG_NOTE. We don't want
6932 to write a REG_EQUAL note for a constant pseudo since verifying that
6933 that pseudo hasn't been eliminated is a pain. Such a note also
6934 won't help anything. */
6935 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6936 && GET_CODE (src_const) != REG)
6938 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6940 /* Record the actual constant value in a REG_EQUAL note, making
6941 a new one if one does not already exist. */
6942 if (tem)
6943 XEXP (tem, 0) = src_const;
6944 else
6945 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6946 src_const, REG_NOTES (insn));
6948 /* If storing a constant value in a register that
6949 previously held the constant value 0,
6950 record this fact with a REG_WAS_0 note on this insn.
6952 Note that the *register* is required to have previously held 0,
6953 not just any register in the quantity and we must point to the
6954 insn that set that register to zero.
6956 Rather than track each register individually, we just see if
6957 the last set for this quantity was for this register. */
6959 if (REGNO_QTY_VALID_P (REGNO (dest))
6960 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6962 /* See if we previously had a REG_WAS_0 note. */
6963 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6964 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6966 if ((tem = single_set (const_insn)) != 0
6967 && rtx_equal_p (SET_DEST (tem), dest))
6969 if (note)
6970 XEXP (note, 0) = const_insn;
6971 else
6972 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6973 const_insn, REG_NOTES (insn));
6978 /* Now deal with the destination. */
6979 do_not_record = 0;
6980 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6982 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6983 to the MEM or REG within it. */
6984 while (GET_CODE (dest) == SIGN_EXTRACT
6985 || GET_CODE (dest) == ZERO_EXTRACT
6986 || GET_CODE (dest) == SUBREG
6987 || GET_CODE (dest) == STRICT_LOW_PART)
6989 sets[i].inner_dest_loc = &XEXP (dest, 0);
6990 dest = XEXP (dest, 0);
6993 sets[i].inner_dest = dest;
6995 if (GET_CODE (dest) == MEM)
6997 dest = fold_rtx (dest, insn);
6999 /* Decide whether we invalidate everything in memory,
7000 or just things at non-fixed places.
7001 Writing a large aggregate must invalidate everything
7002 because we don't know how long it is. */
7003 note_mem_written (dest, &writes_memory);
7006 /* Compute the hash code of the destination now,
7007 before the effects of this instruction are recorded,
7008 since the register values used in the address computation
7009 are those before this instruction. */
7010 sets[i].dest_hash = HASH (dest, mode);
7012 /* Don't enter a bit-field in the hash table
7013 because the value in it after the store
7014 may not equal what was stored, due to truncation. */
7016 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7017 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7019 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7021 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7022 && GET_CODE (width) == CONST_INT
7023 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7024 && ! (INTVAL (src_const)
7025 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7026 /* Exception: if the value is constant,
7027 and it won't be truncated, record it. */
7029 else
7031 /* This is chosen so that the destination will be invalidated
7032 but no new value will be recorded.
7033 We must invalidate because sometimes constant
7034 values can be recorded for bitfields. */
7035 sets[i].src_elt = 0;
7036 sets[i].src_volatile = 1;
7037 src_eqv = 0;
7038 src_eqv_elt = 0;
7042 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7043 the insn. */
7044 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7046 PUT_CODE (insn, NOTE);
7047 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7048 NOTE_SOURCE_FILE (insn) = 0;
7049 cse_jumps_altered = 1;
7050 /* One less use of the label this insn used to jump to. */
7051 --LABEL_NUSES (JUMP_LABEL (insn));
7052 /* No more processing for this set. */
7053 sets[i].rtl = 0;
7056 /* If this SET is now setting PC to a label, we know it used to
7057 be a conditional or computed branch. So we see if we can follow
7058 it. If it was a computed branch, delete it and re-emit. */
7059 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7061 rtx p;
7063 /* If this is not in the format for a simple branch and
7064 we are the only SET in it, re-emit it. */
7065 if (! simplejump_p (insn) && n_sets == 1)
7067 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7068 JUMP_LABEL (new) = XEXP (src, 0);
7069 LABEL_NUSES (XEXP (src, 0))++;
7070 delete_insn (insn);
7071 insn = new;
7073 else
7074 /* Otherwise, force rerecognition, since it probably had
7075 a different pattern before.
7076 This shouldn't really be necessary, since whatever
7077 changed the source value above should have done this.
7078 Until the right place is found, might as well do this here. */
7079 INSN_CODE (insn) = -1;
7081 /* Now that we've converted this jump to an unconditional jump,
7082 there is dead code after it. Delete the dead code until we
7083 reach a BARRIER, the end of the function, or a label. Do
7084 not delete NOTEs except for NOTE_INSN_DELETED since later
7085 phases assume these notes are retained. */
7087 p = insn;
7089 while (NEXT_INSN (p) != 0
7090 && GET_CODE (NEXT_INSN (p)) != BARRIER
7091 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7093 if (GET_CODE (NEXT_INSN (p)) != NOTE
7094 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7095 delete_insn (NEXT_INSN (p));
7096 else
7097 p = NEXT_INSN (p);
7100 /* If we don't have a BARRIER immediately after INSN, put one there.
7101 Much code assumes that there are no NOTEs between a JUMP_INSN and
7102 BARRIER. */
7104 if (NEXT_INSN (insn) == 0
7105 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7106 emit_barrier_before (NEXT_INSN (insn));
7108 /* We might have two BARRIERs separated by notes. Delete the second
7109 one if so. */
7111 if (p != insn && NEXT_INSN (p) != 0
7112 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7113 delete_insn (NEXT_INSN (p));
7115 cse_jumps_altered = 1;
7116 sets[i].rtl = 0;
7119 /* If destination is volatile, invalidate it and then do no further
7120 processing for this assignment. */
7122 else if (do_not_record)
7124 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7125 || GET_CODE (dest) == MEM)
7126 invalidate (dest, VOIDmode);
7127 else if (GET_CODE (dest) == STRICT_LOW_PART
7128 || GET_CODE (dest) == ZERO_EXTRACT)
7129 invalidate (XEXP (dest, 0), GET_MODE (dest));
7130 sets[i].rtl = 0;
7133 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7134 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7136 #ifdef HAVE_cc0
7137 /* If setting CC0, record what it was set to, or a constant, if it
7138 is equivalent to a constant. If it is being set to a floating-point
7139 value, make a COMPARE with the appropriate constant of 0. If we
7140 don't do this, later code can interpret this as a test against
7141 const0_rtx, which can cause problems if we try to put it into an
7142 insn as a floating-point operand. */
7143 if (dest == cc0_rtx)
7145 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7146 this_insn_cc0_mode = mode;
7147 if (FLOAT_MODE_P (mode))
7148 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
7149 CONST0_RTX (mode));
7151 #endif
7154 /* Now enter all non-volatile source expressions in the hash table
7155 if they are not already present.
7156 Record their equivalence classes in src_elt.
7157 This way we can insert the corresponding destinations into
7158 the same classes even if the actual sources are no longer in them
7159 (having been invalidated). */
7161 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7162 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7164 register struct table_elt *elt;
7165 register struct table_elt *classp = sets[0].src_elt;
7166 rtx dest = SET_DEST (sets[0].rtl);
7167 enum machine_mode eqvmode = GET_MODE (dest);
7169 if (GET_CODE (dest) == STRICT_LOW_PART)
7171 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7172 classp = 0;
7174 if (insert_regs (src_eqv, classp, 0))
7176 rehash_using_reg (src_eqv);
7177 src_eqv_hash = HASH (src_eqv, eqvmode);
7179 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7180 elt->in_memory = src_eqv_in_memory;
7181 elt->in_struct = src_eqv_in_struct;
7182 src_eqv_elt = elt;
7184 /* Check to see if src_eqv_elt is the same as a set source which
7185 does not yet have an elt, and if so set the elt of the set source
7186 to src_eqv_elt. */
7187 for (i = 0; i < n_sets; i++)
7188 if (sets[i].rtl && sets[i].src_elt == 0
7189 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7190 sets[i].src_elt = src_eqv_elt;
7193 for (i = 0; i < n_sets; i++)
7194 if (sets[i].rtl && ! sets[i].src_volatile
7195 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7197 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7199 /* REG_EQUAL in setting a STRICT_LOW_PART
7200 gives an equivalent for the entire destination register,
7201 not just for the subreg being stored in now.
7202 This is a more interesting equivalence, so we arrange later
7203 to treat the entire reg as the destination. */
7204 sets[i].src_elt = src_eqv_elt;
7205 sets[i].src_hash = src_eqv_hash;
7207 else
7209 /* Insert source and constant equivalent into hash table, if not
7210 already present. */
7211 register struct table_elt *classp = src_eqv_elt;
7212 register rtx src = sets[i].src;
7213 register rtx dest = SET_DEST (sets[i].rtl);
7214 enum machine_mode mode
7215 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7217 if (sets[i].src_elt == 0)
7219 register struct table_elt *elt;
7221 /* Note that these insert_regs calls cannot remove
7222 any of the src_elt's, because they would have failed to
7223 match if not still valid. */
7224 if (insert_regs (src, classp, 0))
7226 rehash_using_reg (src);
7227 sets[i].src_hash = HASH (src, mode);
7229 elt = insert (src, classp, sets[i].src_hash, mode);
7230 elt->in_memory = sets[i].src_in_memory;
7231 elt->in_struct = sets[i].src_in_struct;
7232 sets[i].src_elt = classp = elt;
7235 if (sets[i].src_const && sets[i].src_const_elt == 0
7236 && src != sets[i].src_const
7237 && ! rtx_equal_p (sets[i].src_const, src))
7238 sets[i].src_elt = insert (sets[i].src_const, classp,
7239 sets[i].src_const_hash, mode);
7242 else if (sets[i].src_elt == 0)
7243 /* If we did not insert the source into the hash table (e.g., it was
7244 volatile), note the equivalence class for the REG_EQUAL value, if any,
7245 so that the destination goes into that class. */
7246 sets[i].src_elt = src_eqv_elt;
7248 invalidate_from_clobbers (&writes_memory, x);
7250 /* Some registers are invalidated by subroutine calls. Memory is
7251 invalidated by non-constant calls. */
7253 if (GET_CODE (insn) == CALL_INSN)
7255 static struct write_data everything = {0, 1, 1, 1};
7257 if (! CONST_CALL_P (insn))
7258 invalidate_memory (&everything);
7259 invalidate_for_call ();
7262 /* Now invalidate everything set by this instruction.
7263 If a SUBREG or other funny destination is being set,
7264 sets[i].rtl is still nonzero, so here we invalidate the reg
7265 a part of which is being set. */
7267 for (i = 0; i < n_sets; i++)
7268 if (sets[i].rtl)
7270 /* We can't use the inner dest, because the mode associated with
7271 a ZERO_EXTRACT is significant. */
7272 register rtx dest = SET_DEST (sets[i].rtl);
7274 /* Needed for registers to remove the register from its
7275 previous quantity's chain.
7276 Needed for memory if this is a nonvarying address, unless
7277 we have just done an invalidate_memory that covers even those. */
7278 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7279 || (GET_CODE (dest) == MEM && ! writes_memory.all
7280 && ! cse_rtx_addr_varies_p (dest)))
7281 invalidate (dest, VOIDmode);
7282 else if (GET_CODE (dest) == STRICT_LOW_PART
7283 || GET_CODE (dest) == ZERO_EXTRACT)
7284 invalidate (XEXP (dest, 0), GET_MODE (dest));
7287 /* Make sure registers mentioned in destinations
7288 are safe for use in an expression to be inserted.
7289 This removes from the hash table
7290 any invalid entry that refers to one of these registers.
7292 We don't care about the return value from mention_regs because
7293 we are going to hash the SET_DEST values unconditionally. */
7295 for (i = 0; i < n_sets; i++)
7296 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7297 mention_regs (SET_DEST (sets[i].rtl));
7299 /* We may have just removed some of the src_elt's from the hash table.
7300 So replace each one with the current head of the same class. */
7302 for (i = 0; i < n_sets; i++)
7303 if (sets[i].rtl)
7305 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7306 /* If elt was removed, find current head of same class,
7307 or 0 if nothing remains of that class. */
7309 register struct table_elt *elt = sets[i].src_elt;
7311 while (elt && elt->prev_same_value)
7312 elt = elt->prev_same_value;
7314 while (elt && elt->first_same_value == 0)
7315 elt = elt->next_same_value;
7316 sets[i].src_elt = elt ? elt->first_same_value : 0;
7320 /* Now insert the destinations into their equivalence classes. */
7322 for (i = 0; i < n_sets; i++)
7323 if (sets[i].rtl)
7325 register rtx dest = SET_DEST (sets[i].rtl);
7326 register struct table_elt *elt;
7328 /* Don't record value if we are not supposed to risk allocating
7329 floating-point values in registers that might be wider than
7330 memory. */
7331 if ((flag_float_store
7332 && GET_CODE (dest) == MEM
7333 && FLOAT_MODE_P (GET_MODE (dest)))
7334 /* Don't record values of destinations set inside a libcall block
7335 since we might delete the libcall. Things should have been set
7336 up so we won't want to reuse such a value, but we play it safe
7337 here. */
7338 || in_libcall_block
7339 /* If we didn't put a REG_EQUAL value or a source into the hash
7340 table, there is no point is recording DEST. */
7341 || sets[i].src_elt == 0
7342 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7343 or SIGN_EXTEND, don't record DEST since it can cause
7344 some tracking to be wrong.
7346 ??? Think about this more later. */
7347 || (GET_CODE (dest) == SUBREG
7348 && (GET_MODE_SIZE (GET_MODE (dest))
7349 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7350 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7351 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7352 continue;
7354 /* STRICT_LOW_PART isn't part of the value BEING set,
7355 and neither is the SUBREG inside it.
7356 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7357 if (GET_CODE (dest) == STRICT_LOW_PART)
7358 dest = SUBREG_REG (XEXP (dest, 0));
7360 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7361 /* Registers must also be inserted into chains for quantities. */
7362 if (insert_regs (dest, sets[i].src_elt, 1))
7364 /* If `insert_regs' changes something, the hash code must be
7365 recalculated. */
7366 rehash_using_reg (dest);
7367 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7370 elt = insert (dest, sets[i].src_elt,
7371 sets[i].dest_hash, GET_MODE (dest));
7372 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7373 && ! RTX_UNCHANGING_P (sets[i].inner_dest));
7375 if (elt->in_memory)
7377 /* This implicitly assumes a whole struct
7378 need not have MEM_IN_STRUCT_P.
7379 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7380 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7381 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7384 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7385 narrower than M2, and both M1 and M2 are the same number of words,
7386 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7387 make that equivalence as well.
7389 However, BAR may have equivalences for which gen_lowpart_if_possible
7390 will produce a simpler value than gen_lowpart_if_possible applied to
7391 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7392 BAR's equivalences. If we don't get a simplified form, make
7393 the SUBREG. It will not be used in an equivalence, but will
7394 cause two similar assignments to be detected.
7396 Note the loop below will find SUBREG_REG (DEST) since we have
7397 already entered SRC and DEST of the SET in the table. */
7399 if (GET_CODE (dest) == SUBREG
7400 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7401 / UNITS_PER_WORD)
7402 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7403 && (GET_MODE_SIZE (GET_MODE (dest))
7404 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7405 && sets[i].src_elt != 0)
7407 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7408 struct table_elt *elt, *classp = 0;
7410 for (elt = sets[i].src_elt->first_same_value; elt;
7411 elt = elt->next_same_value)
7413 rtx new_src = 0;
7414 unsigned src_hash;
7415 struct table_elt *src_elt;
7417 /* Ignore invalid entries. */
7418 if (GET_CODE (elt->exp) != REG
7419 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7420 continue;
7422 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7423 if (new_src == 0)
7424 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7426 src_hash = HASH (new_src, new_mode);
7427 src_elt = lookup (new_src, src_hash, new_mode);
7429 /* Put the new source in the hash table is if isn't
7430 already. */
7431 if (src_elt == 0)
7433 if (insert_regs (new_src, classp, 0))
7435 rehash_using_reg (new_src);
7436 src_hash = HASH (new_src, new_mode);
7438 src_elt = insert (new_src, classp, src_hash, new_mode);
7439 src_elt->in_memory = elt->in_memory;
7440 src_elt->in_struct = elt->in_struct;
7442 else if (classp && classp != src_elt->first_same_value)
7443 /* Show that two things that we've seen before are
7444 actually the same. */
7445 merge_equiv_classes (src_elt, classp);
7447 classp = src_elt->first_same_value;
7452 /* Special handling for (set REG0 REG1)
7453 where REG0 is the "cheapest", cheaper than REG1.
7454 After cse, REG1 will probably not be used in the sequel,
7455 so (if easily done) change this insn to (set REG1 REG0) and
7456 replace REG1 with REG0 in the previous insn that computed their value.
7457 Then REG1 will become a dead store and won't cloud the situation
7458 for later optimizations.
7460 Do not make this change if REG1 is a hard register, because it will
7461 then be used in the sequel and we may be changing a two-operand insn
7462 into a three-operand insn.
7464 Also do not do this if we are operating on a copy of INSN. */
7466 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7467 && NEXT_INSN (PREV_INSN (insn)) == insn
7468 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7469 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7470 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7471 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7472 == REGNO (SET_DEST (sets[0].rtl))))
7474 rtx prev = PREV_INSN (insn);
7475 while (prev && GET_CODE (prev) == NOTE)
7476 prev = PREV_INSN (prev);
7478 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7479 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7481 rtx dest = SET_DEST (sets[0].rtl);
7482 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7484 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7485 validate_change (insn, & SET_DEST (sets[0].rtl),
7486 SET_SRC (sets[0].rtl), 1);
7487 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7488 apply_change_group ();
7490 /* If REG1 was equivalent to a constant, REG0 is not. */
7491 if (note)
7492 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7494 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7495 any REG_WAS_0 note on INSN to PREV. */
7496 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7497 if (note)
7498 remove_note (prev, note);
7500 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7501 if (note)
7503 remove_note (insn, note);
7504 XEXP (note, 1) = REG_NOTES (prev);
7505 REG_NOTES (prev) = note;
7508 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7509 then we must delete it, because the value in REG0 has changed. */
7510 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7511 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7512 remove_note (insn, note);
7516 /* If this is a conditional jump insn, record any known equivalences due to
7517 the condition being tested. */
7519 last_jump_equiv_class = 0;
7520 if (GET_CODE (insn) == JUMP_INSN
7521 && n_sets == 1 && GET_CODE (x) == SET
7522 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7523 record_jump_equiv (insn, 0);
7525 #ifdef HAVE_cc0
7526 /* If the previous insn set CC0 and this insn no longer references CC0,
7527 delete the previous insn. Here we use the fact that nothing expects CC0
7528 to be valid over an insn, which is true until the final pass. */
7529 if (prev_insn && GET_CODE (prev_insn) == INSN
7530 && (tem = single_set (prev_insn)) != 0
7531 && SET_DEST (tem) == cc0_rtx
7532 && ! reg_mentioned_p (cc0_rtx, x))
7534 PUT_CODE (prev_insn, NOTE);
7535 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7536 NOTE_SOURCE_FILE (prev_insn) = 0;
7539 prev_insn_cc0 = this_insn_cc0;
7540 prev_insn_cc0_mode = this_insn_cc0_mode;
7541 #endif
7543 prev_insn = insn;
7546 /* Store 1 in *WRITES_PTR for those categories of memory ref
7547 that must be invalidated when the expression WRITTEN is stored in.
7548 If WRITTEN is null, say everything must be invalidated. */
7550 static void
7551 note_mem_written (written, writes_ptr)
7552 rtx written;
7553 struct write_data *writes_ptr;
7555 static struct write_data everything = {0, 1, 1, 1};
7557 if (written == 0)
7558 *writes_ptr = everything;
7559 else if (GET_CODE (written) == MEM)
7561 /* Pushing or popping the stack invalidates just the stack pointer. */
7562 rtx addr = XEXP (written, 0);
7563 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7564 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7565 && GET_CODE (XEXP (addr, 0)) == REG
7566 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7568 writes_ptr->sp = 1;
7569 return;
7571 else if (GET_MODE (written) == BLKmode)
7572 *writes_ptr = everything;
7573 /* (mem (scratch)) means clobber everything. */
7574 else if (GET_CODE (addr) == SCRATCH)
7575 *writes_ptr = everything;
7576 else if (cse_rtx_addr_varies_p (written))
7578 /* A varying address that is a sum indicates an array element,
7579 and that's just as good as a structure element
7580 in implying that we need not invalidate scalar variables.
7581 However, we must allow QImode aliasing of scalars, because the
7582 ANSI C standard allows character pointers to alias anything. */
7583 if (! ((MEM_IN_STRUCT_P (written)
7584 || GET_CODE (XEXP (written, 0)) == PLUS)
7585 && GET_MODE (written) != QImode))
7586 writes_ptr->all = 1;
7587 writes_ptr->nonscalar = 1;
7589 writes_ptr->var = 1;
7593 /* Perform invalidation on the basis of everything about an insn
7594 except for invalidating the actual places that are SET in it.
7595 This includes the places CLOBBERed, and anything that might
7596 alias with something that is SET or CLOBBERed.
7598 W points to the writes_memory for this insn, a struct write_data
7599 saying which kinds of memory references must be invalidated.
7600 X is the pattern of the insn. */
7602 static void
7603 invalidate_from_clobbers (w, x)
7604 struct write_data *w;
7605 rtx x;
7607 /* If W->var is not set, W specifies no action.
7608 If W->all is set, this step gets all memory refs
7609 so they can be ignored in the rest of this function. */
7610 if (w->var)
7611 invalidate_memory (w);
7613 if (w->sp)
7615 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7616 reg_tick[STACK_POINTER_REGNUM]++;
7618 /* This should be *very* rare. */
7619 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7620 invalidate (stack_pointer_rtx, VOIDmode);
7623 if (GET_CODE (x) == CLOBBER)
7625 rtx ref = XEXP (x, 0);
7626 if (ref)
7628 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7629 || (GET_CODE (ref) == MEM && ! w->all))
7630 invalidate (ref, VOIDmode);
7631 else if (GET_CODE (ref) == STRICT_LOW_PART
7632 || GET_CODE (ref) == ZERO_EXTRACT)
7633 invalidate (XEXP (ref, 0), GET_MODE (ref));
7636 else if (GET_CODE (x) == PARALLEL)
7638 register int i;
7639 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7641 register rtx y = XVECEXP (x, 0, i);
7642 if (GET_CODE (y) == CLOBBER)
7644 rtx ref = XEXP (y, 0);
7645 if (ref)
7647 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7648 || (GET_CODE (ref) == MEM && !w->all))
7649 invalidate (ref, VOIDmode);
7650 else if (GET_CODE (ref) == STRICT_LOW_PART
7651 || GET_CODE (ref) == ZERO_EXTRACT)
7652 invalidate (XEXP (ref, 0), GET_MODE (ref));
7659 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7660 and replace any registers in them with either an equivalent constant
7661 or the canonical form of the register. If we are inside an address,
7662 only do this if the address remains valid.
7664 OBJECT is 0 except when within a MEM in which case it is the MEM.
7666 Return the replacement for X. */
7668 static rtx
7669 cse_process_notes (x, object)
7670 rtx x;
7671 rtx object;
7673 enum rtx_code code = GET_CODE (x);
7674 char *fmt = GET_RTX_FORMAT (code);
7675 int i;
7677 switch (code)
7679 case CONST_INT:
7680 case CONST:
7681 case SYMBOL_REF:
7682 case LABEL_REF:
7683 case CONST_DOUBLE:
7684 case PC:
7685 case CC0:
7686 case LO_SUM:
7687 return x;
7689 case MEM:
7690 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7691 return x;
7693 case EXPR_LIST:
7694 case INSN_LIST:
7695 if (REG_NOTE_KIND (x) == REG_EQUAL)
7696 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7697 if (XEXP (x, 1))
7698 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7699 return x;
7701 case SIGN_EXTEND:
7702 case ZERO_EXTEND:
7704 rtx new = cse_process_notes (XEXP (x, 0), object);
7705 /* We don't substitute VOIDmode constants into these rtx,
7706 since they would impede folding. */
7707 if (GET_MODE (new) != VOIDmode)
7708 validate_change (object, &XEXP (x, 0), new, 0);
7709 return x;
7712 case REG:
7713 i = reg_qty[REGNO (x)];
7715 /* Return a constant or a constant register. */
7716 if (REGNO_QTY_VALID_P (REGNO (x))
7717 && qty_const[i] != 0
7718 && (CONSTANT_P (qty_const[i])
7719 || GET_CODE (qty_const[i]) == REG))
7721 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7722 if (new)
7723 return new;
7726 /* Otherwise, canonicalize this register. */
7727 return canon_reg (x, NULL_RTX);
7730 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7731 if (fmt[i] == 'e')
7732 validate_change (object, &XEXP (x, i),
7733 cse_process_notes (XEXP (x, i), object), 0);
7735 return x;
7738 /* Find common subexpressions between the end test of a loop and the beginning
7739 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7741 Often we have a loop where an expression in the exit test is used
7742 in the body of the loop. For example "while (*p) *q++ = *p++;".
7743 Because of the way we duplicate the loop exit test in front of the loop,
7744 however, we don't detect that common subexpression. This will be caught
7745 when global cse is implemented, but this is a quite common case.
7747 This function handles the most common cases of these common expressions.
7748 It is called after we have processed the basic block ending with the
7749 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7750 jumps to a label used only once. */
7752 static void
7753 cse_around_loop (loop_start)
7754 rtx loop_start;
7756 rtx insn;
7757 int i;
7758 struct table_elt *p;
7760 /* If the jump at the end of the loop doesn't go to the start, we don't
7761 do anything. */
7762 for (insn = PREV_INSN (loop_start);
7763 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7764 insn = PREV_INSN (insn))
7767 if (insn == 0
7768 || GET_CODE (insn) != NOTE
7769 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7770 return;
7772 /* If the last insn of the loop (the end test) was an NE comparison,
7773 we will interpret it as an EQ comparison, since we fell through
7774 the loop. Any equivalences resulting from that comparison are
7775 therefore not valid and must be invalidated. */
7776 if (last_jump_equiv_class)
7777 for (p = last_jump_equiv_class->first_same_value; p;
7778 p = p->next_same_value)
7779 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7780 || (GET_CODE (p->exp) == SUBREG
7781 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7782 invalidate (p->exp, VOIDmode);
7783 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7784 || GET_CODE (p->exp) == ZERO_EXTRACT)
7785 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7787 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7788 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7790 The only thing we do with SET_DEST is invalidate entries, so we
7791 can safely process each SET in order. It is slightly less efficient
7792 to do so, but we only want to handle the most common cases. */
7794 for (insn = NEXT_INSN (loop_start);
7795 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7796 && ! (GET_CODE (insn) == NOTE
7797 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7798 insn = NEXT_INSN (insn))
7800 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7801 && (GET_CODE (PATTERN (insn)) == SET
7802 || GET_CODE (PATTERN (insn)) == CLOBBER))
7803 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7804 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7805 && GET_CODE (PATTERN (insn)) == PARALLEL)
7806 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7807 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7808 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7809 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7810 loop_start);
7814 /* Variable used for communications between the next two routines. */
7816 static struct write_data skipped_writes_memory;
7818 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7819 since they are done elsewhere. This function is called via note_stores. */
7821 static void
7822 invalidate_skipped_set (dest, set)
7823 rtx set;
7824 rtx dest;
7826 if (GET_CODE (set) == CLOBBER
7827 #ifdef HAVE_cc0
7828 || dest == cc0_rtx
7829 #endif
7830 || dest == pc_rtx)
7831 return;
7833 if (GET_CODE (dest) == MEM)
7834 note_mem_written (dest, &skipped_writes_memory);
7836 /* There are times when an address can appear varying and be a PLUS
7837 during this scan when it would be a fixed address were we to know
7838 the proper equivalences. So promote "nonscalar" to be "all". */
7839 if (skipped_writes_memory.nonscalar)
7840 skipped_writes_memory.all = 1;
7842 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7843 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7844 invalidate (dest, VOIDmode);
7845 else if (GET_CODE (dest) == STRICT_LOW_PART
7846 || GET_CODE (dest) == ZERO_EXTRACT)
7847 invalidate (XEXP (dest, 0), GET_MODE (dest));
7850 /* Invalidate all insns from START up to the end of the function or the
7851 next label. This called when we wish to CSE around a block that is
7852 conditionally executed. */
7854 static void
7855 invalidate_skipped_block (start)
7856 rtx start;
7858 rtx insn;
7859 static struct write_data init = {0, 0, 0, 0};
7860 static struct write_data everything = {0, 1, 1, 1};
7862 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7863 insn = NEXT_INSN (insn))
7865 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7866 continue;
7868 skipped_writes_memory = init;
7870 if (GET_CODE (insn) == CALL_INSN)
7872 invalidate_for_call ();
7873 skipped_writes_memory = everything;
7876 note_stores (PATTERN (insn), invalidate_skipped_set);
7877 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7881 /* Used for communication between the following two routines; contains a
7882 value to be checked for modification. */
7884 static rtx cse_check_loop_start_value;
7886 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7887 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7889 static void
7890 cse_check_loop_start (x, set)
7891 rtx x;
7892 rtx set;
7894 if (cse_check_loop_start_value == 0
7895 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7896 return;
7898 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7899 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7900 cse_check_loop_start_value = 0;
7903 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7904 a loop that starts with the label at LOOP_START.
7906 If X is a SET, we see if its SET_SRC is currently in our hash table.
7907 If so, we see if it has a value equal to some register used only in the
7908 loop exit code (as marked by jump.c).
7910 If those two conditions are true, we search backwards from the start of
7911 the loop to see if that same value was loaded into a register that still
7912 retains its value at the start of the loop.
7914 If so, we insert an insn after the load to copy the destination of that
7915 load into the equivalent register and (try to) replace our SET_SRC with that
7916 register.
7918 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7920 static void
7921 cse_set_around_loop (x, insn, loop_start)
7922 rtx x;
7923 rtx insn;
7924 rtx loop_start;
7926 struct table_elt *src_elt;
7927 static struct write_data init = {0, 0, 0, 0};
7928 struct write_data writes_memory;
7930 writes_memory = init;
7932 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7933 are setting PC or CC0 or whose SET_SRC is already a register. */
7934 if (GET_CODE (x) == SET
7935 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7936 && GET_CODE (SET_SRC (x)) != REG)
7938 src_elt = lookup (SET_SRC (x),
7939 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7940 GET_MODE (SET_DEST (x)));
7942 if (src_elt)
7943 for (src_elt = src_elt->first_same_value; src_elt;
7944 src_elt = src_elt->next_same_value)
7945 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7946 && COST (src_elt->exp) < COST (SET_SRC (x)))
7948 rtx p, set;
7950 /* Look for an insn in front of LOOP_START that sets
7951 something in the desired mode to SET_SRC (x) before we hit
7952 a label or CALL_INSN. */
7954 for (p = prev_nonnote_insn (loop_start);
7955 p && GET_CODE (p) != CALL_INSN
7956 && GET_CODE (p) != CODE_LABEL;
7957 p = prev_nonnote_insn (p))
7958 if ((set = single_set (p)) != 0
7959 && GET_CODE (SET_DEST (set)) == REG
7960 && GET_MODE (SET_DEST (set)) == src_elt->mode
7961 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7963 /* We now have to ensure that nothing between P
7964 and LOOP_START modified anything referenced in
7965 SET_SRC (x). We know that nothing within the loop
7966 can modify it, or we would have invalidated it in
7967 the hash table. */
7968 rtx q;
7970 cse_check_loop_start_value = SET_SRC (x);
7971 for (q = p; q != loop_start; q = NEXT_INSN (q))
7972 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7973 note_stores (PATTERN (q), cse_check_loop_start);
7975 /* If nothing was changed and we can replace our
7976 SET_SRC, add an insn after P to copy its destination
7977 to what we will be replacing SET_SRC with. */
7978 if (cse_check_loop_start_value
7979 && validate_change (insn, &SET_SRC (x),
7980 src_elt->exp, 0))
7981 emit_insn_after (gen_move_insn (src_elt->exp,
7982 SET_DEST (set)),
7984 break;
7989 /* Now invalidate anything modified by X. */
7990 note_mem_written (SET_DEST (x), &writes_memory);
7992 if (writes_memory.var)
7993 invalidate_memory (&writes_memory);
7995 /* See comment on similar code in cse_insn for explanation of these tests. */
7996 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7997 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7998 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7999 invalidate (SET_DEST (x), VOIDmode);
8000 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8001 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8002 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8005 /* Find the end of INSN's basic block and return its range,
8006 the total number of SETs in all the insns of the block, the last insn of the
8007 block, and the branch path.
8009 The branch path indicates which branches should be followed. If a non-zero
8010 path size is specified, the block should be rescanned and a different set
8011 of branches will be taken. The branch path is only used if
8012 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8014 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8015 used to describe the block. It is filled in with the information about
8016 the current block. The incoming structure's branch path, if any, is used
8017 to construct the output branch path. */
8019 void
8020 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8021 rtx insn;
8022 struct cse_basic_block_data *data;
8023 int follow_jumps;
8024 int after_loop;
8025 int skip_blocks;
8027 rtx p = insn, q;
8028 int nsets = 0;
8029 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8030 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8031 int path_size = data->path_size;
8032 int path_entry = 0;
8033 int i;
8035 /* Update the previous branch path, if any. If the last branch was
8036 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8037 shorten the path by one and look at the previous branch. We know that
8038 at least one branch must have been taken if PATH_SIZE is non-zero. */
8039 while (path_size > 0)
8041 if (data->path[path_size - 1].status != NOT_TAKEN)
8043 data->path[path_size - 1].status = NOT_TAKEN;
8044 break;
8046 else
8047 path_size--;
8050 /* Scan to end of this basic block. */
8051 while (p && GET_CODE (p) != CODE_LABEL)
8053 /* Don't cse out the end of a loop. This makes a difference
8054 only for the unusual loops that always execute at least once;
8055 all other loops have labels there so we will stop in any case.
8056 Cse'ing out the end of the loop is dangerous because it
8057 might cause an invariant expression inside the loop
8058 to be reused after the end of the loop. This would make it
8059 hard to move the expression out of the loop in loop.c,
8060 especially if it is one of several equivalent expressions
8061 and loop.c would like to eliminate it.
8063 If we are running after loop.c has finished, we can ignore
8064 the NOTE_INSN_LOOP_END. */
8066 if (! after_loop && GET_CODE (p) == NOTE
8067 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8068 break;
8070 /* Don't cse over a call to setjmp; on some machines (eg vax)
8071 the regs restored by the longjmp come from
8072 a later time than the setjmp. */
8073 if (GET_CODE (p) == NOTE
8074 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8075 break;
8077 /* A PARALLEL can have lots of SETs in it,
8078 especially if it is really an ASM_OPERANDS. */
8079 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8080 && GET_CODE (PATTERN (p)) == PARALLEL)
8081 nsets += XVECLEN (PATTERN (p), 0);
8082 else if (GET_CODE (p) != NOTE)
8083 nsets += 1;
8085 /* Ignore insns made by CSE; they cannot affect the boundaries of
8086 the basic block. */
8088 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8089 high_cuid = INSN_CUID (p);
8090 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8091 low_cuid = INSN_CUID (p);
8093 /* See if this insn is in our branch path. If it is and we are to
8094 take it, do so. */
8095 if (path_entry < path_size && data->path[path_entry].branch == p)
8097 if (data->path[path_entry].status != NOT_TAKEN)
8098 p = JUMP_LABEL (p);
8100 /* Point to next entry in path, if any. */
8101 path_entry++;
8104 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8105 was specified, we haven't reached our maximum path length, there are
8106 insns following the target of the jump, this is the only use of the
8107 jump label, and the target label is preceded by a BARRIER.
8109 Alternatively, we can follow the jump if it branches around a
8110 block of code and there are no other branches into the block.
8111 In this case invalidate_skipped_block will be called to invalidate any
8112 registers set in the block when following the jump. */
8114 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8115 && GET_CODE (p) == JUMP_INSN
8116 && GET_CODE (PATTERN (p)) == SET
8117 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8118 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8119 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8121 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8122 if ((GET_CODE (q) != NOTE
8123 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8124 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8125 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8126 break;
8128 /* If we ran into a BARRIER, this code is an extension of the
8129 basic block when the branch is taken. */
8130 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8132 /* Don't allow ourself to keep walking around an
8133 always-executed loop. */
8134 if (next_real_insn (q) == next)
8136 p = NEXT_INSN (p);
8137 continue;
8140 /* Similarly, don't put a branch in our path more than once. */
8141 for (i = 0; i < path_entry; i++)
8142 if (data->path[i].branch == p)
8143 break;
8145 if (i != path_entry)
8146 break;
8148 data->path[path_entry].branch = p;
8149 data->path[path_entry++].status = TAKEN;
8151 /* This branch now ends our path. It was possible that we
8152 didn't see this branch the last time around (when the
8153 insn in front of the target was a JUMP_INSN that was
8154 turned into a no-op). */
8155 path_size = path_entry;
8157 p = JUMP_LABEL (p);
8158 /* Mark block so we won't scan it again later. */
8159 PUT_MODE (NEXT_INSN (p), QImode);
8161 /* Detect a branch around a block of code. */
8162 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8164 register rtx tmp;
8166 if (next_real_insn (q) == next)
8168 p = NEXT_INSN (p);
8169 continue;
8172 for (i = 0; i < path_entry; i++)
8173 if (data->path[i].branch == p)
8174 break;
8176 if (i != path_entry)
8177 break;
8179 /* This is no_labels_between_p (p, q) with an added check for
8180 reaching the end of a function (in case Q precedes P). */
8181 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8182 if (GET_CODE (tmp) == CODE_LABEL)
8183 break;
8185 if (tmp == q)
8187 data->path[path_entry].branch = p;
8188 data->path[path_entry++].status = AROUND;
8190 path_size = path_entry;
8192 p = JUMP_LABEL (p);
8193 /* Mark block so we won't scan it again later. */
8194 PUT_MODE (NEXT_INSN (p), QImode);
8198 p = NEXT_INSN (p);
8201 data->low_cuid = low_cuid;
8202 data->high_cuid = high_cuid;
8203 data->nsets = nsets;
8204 data->last = p;
8206 /* If all jumps in the path are not taken, set our path length to zero
8207 so a rescan won't be done. */
8208 for (i = path_size - 1; i >= 0; i--)
8209 if (data->path[i].status != NOT_TAKEN)
8210 break;
8212 if (i == -1)
8213 data->path_size = 0;
8214 else
8215 data->path_size = path_size;
8217 /* End the current branch path. */
8218 data->path[path_size].branch = 0;
8221 /* Perform cse on the instructions of a function.
8222 F is the first instruction.
8223 NREGS is one plus the highest pseudo-reg number used in the instruction.
8225 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8226 (only if -frerun-cse-after-loop).
8228 Returns 1 if jump_optimize should be redone due to simplifications
8229 in conditional jump instructions. */
8232 cse_main (f, nregs, after_loop, file)
8233 rtx f;
8234 int nregs;
8235 int after_loop;
8236 FILE *file;
8238 struct cse_basic_block_data val;
8239 register rtx insn = f;
8240 register int i;
8242 cse_jumps_altered = 0;
8243 recorded_label_ref = 0;
8244 constant_pool_entries_cost = 0;
8245 val.path_size = 0;
8247 init_recog ();
8249 max_reg = nregs;
8251 all_minus_one = (int *) alloca (nregs * sizeof (int));
8252 consec_ints = (int *) alloca (nregs * sizeof (int));
8254 for (i = 0; i < nregs; i++)
8256 all_minus_one[i] = -1;
8257 consec_ints[i] = i;
8260 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8261 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8262 reg_qty = (int *) alloca (nregs * sizeof (int));
8263 reg_in_table = (int *) alloca (nregs * sizeof (int));
8264 reg_tick = (int *) alloca (nregs * sizeof (int));
8266 #ifdef LOAD_EXTEND_OP
8268 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8269 and change the code and mode as appropriate. */
8270 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8271 #endif
8273 /* Discard all the free elements of the previous function
8274 since they are allocated in the temporarily obstack. */
8275 bzero ((char *) table, sizeof table);
8276 free_element_chain = 0;
8277 n_elements_made = 0;
8279 /* Find the largest uid. */
8281 max_uid = get_max_uid ();
8282 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8283 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8285 /* Compute the mapping from uids to cuids.
8286 CUIDs are numbers assigned to insns, like uids,
8287 except that cuids increase monotonically through the code.
8288 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8289 between two insns is not affected by -g. */
8291 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8293 if (GET_CODE (insn) != NOTE
8294 || NOTE_LINE_NUMBER (insn) < 0)
8295 INSN_CUID (insn) = ++i;
8296 else
8297 /* Give a line number note the same cuid as preceding insn. */
8298 INSN_CUID (insn) = i;
8301 /* Initialize which registers are clobbered by calls. */
8303 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8305 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8306 if ((call_used_regs[i]
8307 /* Used to check !fixed_regs[i] here, but that isn't safe;
8308 fixed regs are still call-clobbered, and sched can get
8309 confused if they can "live across calls".
8311 The frame pointer is always preserved across calls. The arg
8312 pointer is if it is fixed. The stack pointer usually is, unless
8313 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8314 will be present. If we are generating PIC code, the PIC offset
8315 table register is preserved across calls. */
8317 && i != STACK_POINTER_REGNUM
8318 && i != FRAME_POINTER_REGNUM
8319 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8320 && i != HARD_FRAME_POINTER_REGNUM
8321 #endif
8322 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8323 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8324 #endif
8325 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8326 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8327 #endif
8329 || global_regs[i])
8330 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8332 /* Loop over basic blocks.
8333 Compute the maximum number of qty's needed for each basic block
8334 (which is 2 for each SET). */
8335 insn = f;
8336 while (insn)
8338 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8339 flag_cse_skip_blocks);
8341 /* If this basic block was already processed or has no sets, skip it. */
8342 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8344 PUT_MODE (insn, VOIDmode);
8345 insn = (val.last ? NEXT_INSN (val.last) : 0);
8346 val.path_size = 0;
8347 continue;
8350 cse_basic_block_start = val.low_cuid;
8351 cse_basic_block_end = val.high_cuid;
8352 max_qty = val.nsets * 2;
8354 if (file)
8355 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8356 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8357 val.nsets);
8359 /* Make MAX_QTY bigger to give us room to optimize
8360 past the end of this basic block, if that should prove useful. */
8361 if (max_qty < 500)
8362 max_qty = 500;
8364 max_qty += max_reg;
8366 /* If this basic block is being extended by following certain jumps,
8367 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8368 Otherwise, we start after this basic block. */
8369 if (val.path_size > 0)
8370 cse_basic_block (insn, val.last, val.path, 0);
8371 else
8373 int old_cse_jumps_altered = cse_jumps_altered;
8374 rtx temp;
8376 /* When cse changes a conditional jump to an unconditional
8377 jump, we want to reprocess the block, since it will give
8378 us a new branch path to investigate. */
8379 cse_jumps_altered = 0;
8380 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8381 if (cse_jumps_altered == 0
8382 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8383 insn = temp;
8385 cse_jumps_altered |= old_cse_jumps_altered;
8388 #ifdef USE_C_ALLOCA
8389 alloca (0);
8390 #endif
8393 /* Tell refers_to_mem_p that qty_const info is not available. */
8394 qty_const = 0;
8396 if (max_elements_made < n_elements_made)
8397 max_elements_made = n_elements_made;
8399 return cse_jumps_altered || recorded_label_ref;
8402 /* Process a single basic block. FROM and TO and the limits of the basic
8403 block. NEXT_BRANCH points to the branch path when following jumps or
8404 a null path when not following jumps.
8406 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8407 loop. This is true when we are being called for the last time on a
8408 block and this CSE pass is before loop.c. */
8410 static rtx
8411 cse_basic_block (from, to, next_branch, around_loop)
8412 register rtx from, to;
8413 struct branch_path *next_branch;
8414 int around_loop;
8416 register rtx insn;
8417 int to_usage = 0;
8418 int in_libcall_block = 0;
8420 /* Each of these arrays is undefined before max_reg, so only allocate
8421 the space actually needed and adjust the start below. */
8423 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8424 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8425 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8426 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8427 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8428 qty_comparison_code
8429 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8430 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8431 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8433 qty_first_reg -= max_reg;
8434 qty_last_reg -= max_reg;
8435 qty_mode -= max_reg;
8436 qty_const -= max_reg;
8437 qty_const_insn -= max_reg;
8438 qty_comparison_code -= max_reg;
8439 qty_comparison_qty -= max_reg;
8440 qty_comparison_const -= max_reg;
8442 new_basic_block ();
8444 /* TO might be a label. If so, protect it from being deleted. */
8445 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8446 ++LABEL_NUSES (to);
8448 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8450 register enum rtx_code code;
8452 /* See if this is a branch that is part of the path. If so, and it is
8453 to be taken, do so. */
8454 if (next_branch->branch == insn)
8456 enum taken status = next_branch++->status;
8457 if (status != NOT_TAKEN)
8459 if (status == TAKEN)
8460 record_jump_equiv (insn, 1);
8461 else
8462 invalidate_skipped_block (NEXT_INSN (insn));
8464 /* Set the last insn as the jump insn; it doesn't affect cc0.
8465 Then follow this branch. */
8466 #ifdef HAVE_cc0
8467 prev_insn_cc0 = 0;
8468 #endif
8469 prev_insn = insn;
8470 insn = JUMP_LABEL (insn);
8471 continue;
8475 code = GET_CODE (insn);
8476 if (GET_MODE (insn) == QImode)
8477 PUT_MODE (insn, VOIDmode);
8479 if (GET_RTX_CLASS (code) == 'i')
8481 /* Process notes first so we have all notes in canonical forms when
8482 looking for duplicate operations. */
8484 if (REG_NOTES (insn))
8485 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8487 /* Track when we are inside in LIBCALL block. Inside such a block,
8488 we do not want to record destinations. The last insn of a
8489 LIBCALL block is not considered to be part of the block, since
8490 its destination is the result of the block and hence should be
8491 recorded. */
8493 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8494 in_libcall_block = 1;
8495 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8496 in_libcall_block = 0;
8498 cse_insn (insn, in_libcall_block);
8501 /* If INSN is now an unconditional jump, skip to the end of our
8502 basic block by pretending that we just did the last insn in the
8503 basic block. If we are jumping to the end of our block, show
8504 that we can have one usage of TO. */
8506 if (simplejump_p (insn))
8508 if (to == 0)
8509 return 0;
8511 if (JUMP_LABEL (insn) == to)
8512 to_usage = 1;
8514 /* Maybe TO was deleted because the jump is unconditional.
8515 If so, there is nothing left in this basic block. */
8516 /* ??? Perhaps it would be smarter to set TO
8517 to whatever follows this insn,
8518 and pretend the basic block had always ended here. */
8519 if (INSN_DELETED_P (to))
8520 break;
8522 insn = PREV_INSN (to);
8525 /* See if it is ok to keep on going past the label
8526 which used to end our basic block. Remember that we incremented
8527 the count of that label, so we decrement it here. If we made
8528 a jump unconditional, TO_USAGE will be one; in that case, we don't
8529 want to count the use in that jump. */
8531 if (to != 0 && NEXT_INSN (insn) == to
8532 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8534 struct cse_basic_block_data val;
8535 rtx prev;
8537 insn = NEXT_INSN (to);
8539 if (LABEL_NUSES (to) == 0)
8540 insn = delete_insn (to);
8542 /* If TO was the last insn in the function, we are done. */
8543 if (insn == 0)
8544 return 0;
8546 /* If TO was preceded by a BARRIER we are done with this block
8547 because it has no continuation. */
8548 prev = prev_nonnote_insn (to);
8549 if (prev && GET_CODE (prev) == BARRIER)
8550 return insn;
8552 /* Find the end of the following block. Note that we won't be
8553 following branches in this case. */
8554 to_usage = 0;
8555 val.path_size = 0;
8556 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8558 /* If the tables we allocated have enough space left
8559 to handle all the SETs in the next basic block,
8560 continue through it. Otherwise, return,
8561 and that block will be scanned individually. */
8562 if (val.nsets * 2 + next_qty > max_qty)
8563 break;
8565 cse_basic_block_start = val.low_cuid;
8566 cse_basic_block_end = val.high_cuid;
8567 to = val.last;
8569 /* Prevent TO from being deleted if it is a label. */
8570 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8571 ++LABEL_NUSES (to);
8573 /* Back up so we process the first insn in the extension. */
8574 insn = PREV_INSN (insn);
8578 if (next_qty > max_qty)
8579 abort ();
8581 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8582 the previous insn is the only insn that branches to the head of a loop,
8583 we can cse into the loop. Don't do this if we changed the jump
8584 structure of a loop unless we aren't going to be following jumps. */
8586 if ((cse_jumps_altered == 0
8587 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8588 && around_loop && to != 0
8589 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8590 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8591 && JUMP_LABEL (PREV_INSN (to)) != 0
8592 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8593 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8595 return to ? NEXT_INSN (to) : 0;
8598 /* Count the number of times registers are used (not set) in X.
8599 COUNTS is an array in which we accumulate the count, INCR is how much
8600 we count each register usage.
8602 Don't count a usage of DEST, which is the SET_DEST of a SET which
8603 contains X in its SET_SRC. This is because such a SET does not
8604 modify the liveness of DEST. */
8606 static void
8607 count_reg_usage (x, counts, dest, incr)
8608 rtx x;
8609 int *counts;
8610 rtx dest;
8611 int incr;
8613 enum rtx_code code;
8614 char *fmt;
8615 int i, j;
8617 if (x == 0)
8618 return;
8620 switch (code = GET_CODE (x))
8622 case REG:
8623 if (x != dest)
8624 counts[REGNO (x)] += incr;
8625 return;
8627 case PC:
8628 case CC0:
8629 case CONST:
8630 case CONST_INT:
8631 case CONST_DOUBLE:
8632 case SYMBOL_REF:
8633 case LABEL_REF:
8634 case CLOBBER:
8635 return;
8637 case SET:
8638 /* Unless we are setting a REG, count everything in SET_DEST. */
8639 if (GET_CODE (SET_DEST (x)) != REG)
8640 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8642 /* If SRC has side-effects, then we can't delete this insn, so the
8643 usage of SET_DEST inside SRC counts.
8645 ??? Strictly-speaking, we might be preserving this insn
8646 because some other SET has side-effects, but that's hard
8647 to do and can't happen now. */
8648 count_reg_usage (SET_SRC (x), counts,
8649 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8650 incr);
8651 return;
8653 case CALL_INSN:
8654 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8656 /* ... falls through ... */
8657 case INSN:
8658 case JUMP_INSN:
8659 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8661 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8662 use them. */
8664 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8665 return;
8667 case EXPR_LIST:
8668 case INSN_LIST:
8669 if (REG_NOTE_KIND (x) == REG_EQUAL
8670 || GET_CODE (XEXP (x,0)) == USE)
8671 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8672 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8673 return;
8676 fmt = GET_RTX_FORMAT (code);
8677 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8679 if (fmt[i] == 'e')
8680 count_reg_usage (XEXP (x, i), counts, dest, incr);
8681 else if (fmt[i] == 'E')
8682 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8683 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8687 /* Scan all the insns and delete any that are dead; i.e., they store a register
8688 that is never used or they copy a register to itself.
8690 This is used to remove insns made obviously dead by cse. It improves the
8691 heuristics in loop since it won't try to move dead invariants out of loops
8692 or make givs for dead quantities. The remaining passes of the compilation
8693 are also sped up. */
8695 void
8696 delete_dead_from_cse (insns, nreg)
8697 rtx insns;
8698 int nreg;
8700 int *counts = (int *) alloca (nreg * sizeof (int));
8701 rtx insn, prev;
8702 rtx tem;
8703 int i;
8704 int in_libcall = 0;
8706 /* First count the number of times each register is used. */
8707 bzero ((char *) counts, sizeof (int) * nreg);
8708 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8709 count_reg_usage (insn, counts, NULL_RTX, 1);
8711 /* Go from the last insn to the first and delete insns that only set unused
8712 registers or copy a register to itself. As we delete an insn, remove
8713 usage counts for registers it uses. */
8714 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8716 int live_insn = 0;
8718 prev = prev_real_insn (insn);
8720 /* Don't delete any insns that are part of a libcall block.
8721 Flow or loop might get confused if we did that. Remember
8722 that we are scanning backwards. */
8723 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8724 in_libcall = 1;
8726 if (in_libcall)
8727 live_insn = 1;
8728 else if (GET_CODE (PATTERN (insn)) == SET)
8730 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8731 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8734 #ifdef HAVE_cc0
8735 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8736 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8737 && ((tem = next_nonnote_insn (insn)) == 0
8738 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8739 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8741 #endif
8742 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8743 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8744 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8745 || side_effects_p (SET_SRC (PATTERN (insn))))
8746 live_insn = 1;
8748 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8749 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8751 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8753 if (GET_CODE (elt) == SET)
8755 if (GET_CODE (SET_DEST (elt)) == REG
8756 && SET_DEST (elt) == SET_SRC (elt))
8759 #ifdef HAVE_cc0
8760 else if (GET_CODE (SET_DEST (elt)) == CC0
8761 && ! side_effects_p (SET_SRC (elt))
8762 && ((tem = next_nonnote_insn (insn)) == 0
8763 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8764 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8766 #endif
8767 else if (GET_CODE (SET_DEST (elt)) != REG
8768 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8769 || counts[REGNO (SET_DEST (elt))] != 0
8770 || side_effects_p (SET_SRC (elt)))
8771 live_insn = 1;
8773 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8774 live_insn = 1;
8776 else
8777 live_insn = 1;
8779 /* If this is a dead insn, delete it and show registers in it aren't
8780 being used. */
8782 if (! live_insn)
8784 count_reg_usage (insn, counts, NULL_RTX, -1);
8785 delete_insn (insn);
8788 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8789 in_libcall = 0;