Initial revision
[official-gcc.git] / gcc / cse.c
blob096a630aeb4a85343ac9946e3d53028627641db7
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* Must precede rtl.h for FFS. */
24 #include <stdio.h>
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
34 #include <setjmp.h>
36 /* The basic idea of common subexpression elimination is to go
37 through the code, keeping a record of expressions that would
38 have the same value at the current scan point, and replacing
39 expressions encountered with the cheapest equivalent expression.
41 It is too complicated to keep track of the different possibilities
42 when control paths merge; so, at each label, we forget all that is
43 known and start fresh. This can be described as processing each
44 basic block separately. Note, however, that these are not quite
45 the same as the basic blocks found by a later pass and used for
46 data flow analysis and register packing. We do not need to start fresh
47 after a conditional jump instruction if there is no label there.
49 We use two data structures to record the equivalent expressions:
50 a hash table for most expressions, and several vectors together
51 with "quantity numbers" to record equivalent (pseudo) registers.
53 The use of the special data structure for registers is desirable
54 because it is faster. It is possible because registers references
55 contain a fairly small number, the register number, taken from
56 a contiguously allocated series, and two register references are
57 identical if they have the same number. General expressions
58 do not have any such thing, so the only way to retrieve the
59 information recorded on an expression other than a register
60 is to keep it in a hash table.
62 Registers and "quantity numbers":
64 At the start of each basic block, all of the (hardware and pseudo)
65 registers used in the function are given distinct quantity
66 numbers to indicate their contents. During scan, when the code
67 copies one register into another, we copy the quantity number.
68 When a register is loaded in any other way, we allocate a new
69 quantity number to describe the value generated by this operation.
70 `reg_qty' records what quantity a register is currently thought
71 of as containing.
73 All real quantity numbers are greater than or equal to `max_reg'.
74 If register N has not been assigned a quantity, reg_qty[N] will equal N.
76 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
77 variables should be referenced with an index below `max_reg'.
79 We also maintain a bidirectional chain of registers for each
80 quantity number. `qty_first_reg', `qty_last_reg',
81 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
83 The first register in a chain is the one whose lifespan is least local.
84 Among equals, it is the one that was seen first.
85 We replace any equivalent register with that one.
87 If two registers have the same quantity number, it must be true that
88 REG expressions with `qty_mode' must be in the hash table for both
89 registers and must be in the same class.
91 The converse is not true. Since hard registers may be referenced in
92 any mode, two REG expressions might be equivalent in the hash table
93 but not have the same quantity number if the quantity number of one
94 of the registers is not the same mode as those expressions.
96 Constants and quantity numbers
98 When a quantity has a known constant value, that value is stored
99 in the appropriate element of qty_const. This is in addition to
100 putting the constant in the hash table as is usual for non-regs.
102 Whether a reg or a constant is preferred is determined by the configuration
103 macro CONST_COSTS and will often depend on the constant value. In any
104 event, expressions containing constants can be simplified, by fold_rtx.
106 When a quantity has a known nearly constant value (such as an address
107 of a stack slot), that value is stored in the appropriate element
108 of qty_const.
110 Integer constants don't have a machine mode. However, cse
111 determines the intended machine mode from the destination
112 of the instruction that moves the constant. The machine mode
113 is recorded in the hash table along with the actual RTL
114 constant expression so that different modes are kept separate.
116 Other expressions:
118 To record known equivalences among expressions in general
119 we use a hash table called `table'. It has a fixed number of buckets
120 that contain chains of `struct table_elt' elements for expressions.
121 These chains connect the elements whose expressions have the same
122 hash codes.
124 Other chains through the same elements connect the elements which
125 currently have equivalent values.
127 Register references in an expression are canonicalized before hashing
128 the expression. This is done using `reg_qty' and `qty_first_reg'.
129 The hash code of a register reference is computed using the quantity
130 number, not the register number.
132 When the value of an expression changes, it is necessary to remove from the
133 hash table not just that expression but all expressions whose values
134 could be different as a result.
136 1. If the value changing is in memory, except in special cases
137 ANYTHING referring to memory could be changed. That is because
138 nobody knows where a pointer does not point.
139 The function `invalidate_memory' removes what is necessary.
141 The special cases are when the address is constant or is
142 a constant plus a fixed register such as the frame pointer
143 or a static chain pointer. When such addresses are stored in,
144 we can tell exactly which other such addresses must be invalidated
145 due to overlap. `invalidate' does this.
146 All expressions that refer to non-constant
147 memory addresses are also invalidated. `invalidate_memory' does this.
149 2. If the value changing is a register, all expressions
150 containing references to that register, and only those,
151 must be removed.
153 Because searching the entire hash table for expressions that contain
154 a register is very slow, we try to figure out when it isn't necessary.
155 Precisely, this is necessary only when expressions have been
156 entered in the hash table using this register, and then the value has
157 changed, and then another expression wants to be added to refer to
158 the register's new value. This sequence of circumstances is rare
159 within any one basic block.
161 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
162 reg_tick[i] is incremented whenever a value is stored in register i.
163 reg_in_table[i] holds -1 if no references to register i have been
164 entered in the table; otherwise, it contains the value reg_tick[i] had
165 when the references were entered. If we want to enter a reference
166 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
167 Until we want to enter a new entry, the mere fact that the two vectors
168 don't match makes the entries be ignored if anyone tries to match them.
170 Registers themselves are entered in the hash table as well as in
171 the equivalent-register chains. However, the vectors `reg_tick'
172 and `reg_in_table' do not apply to expressions which are simple
173 register references. These expressions are removed from the table
174 immediately when they become invalid, and this can be done even if
175 we do not immediately search for all the expressions that refer to
176 the register.
178 A CLOBBER rtx in an instruction invalidates its operand for further
179 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
180 invalidates everything that resides in memory.
182 Related expressions:
184 Constant expressions that differ only by an additive integer
185 are called related. When a constant expression is put in
186 the table, the related expression with no constant term
187 is also entered. These are made to point at each other
188 so that it is possible to find out if there exists any
189 register equivalent to an expression related to a given expression. */
191 /* One plus largest register number used in this function. */
193 static int max_reg;
195 /* Length of vectors indexed by quantity number.
196 We know in advance we will not need a quantity number this big. */
198 static int max_qty;
200 /* Next quantity number to be allocated.
201 This is 1 + the largest number needed so far. */
203 static int next_qty;
205 /* Indexed by quantity number, gives the first (or last) register
206 in the chain of registers that currently contain this quantity. */
208 static int *qty_first_reg;
209 static int *qty_last_reg;
211 /* Index by quantity number, gives the mode of the quantity. */
213 static enum machine_mode *qty_mode;
215 /* Indexed by quantity number, gives the rtx of the constant value of the
216 quantity, or zero if it does not have a known value.
217 A sum of the frame pointer (or arg pointer) plus a constant
218 can also be entered here. */
220 static rtx *qty_const;
222 /* Indexed by qty number, gives the insn that stored the constant value
223 recorded in `qty_const'. */
225 static rtx *qty_const_insn;
227 /* The next three variables are used to track when a comparison between a
228 quantity and some constant or register has been passed. In that case, we
229 know the results of the comparison in case we see it again. These variables
230 record a comparison that is known to be true. */
232 /* Indexed by qty number, gives the rtx code of a comparison with a known
233 result involving this quantity. If none, it is UNKNOWN. */
234 static enum rtx_code *qty_comparison_code;
236 /* Indexed by qty number, gives the constant being compared against in a
237 comparison of known result. If no such comparison, it is undefined.
238 If the comparison is not with a constant, it is zero. */
240 static rtx *qty_comparison_const;
242 /* Indexed by qty number, gives the quantity being compared against in a
243 comparison of known result. If no such comparison, if it undefined.
244 If the comparison is not with a register, it is -1. */
246 static int *qty_comparison_qty;
248 #ifdef HAVE_cc0
249 /* For machines that have a CC0, we do not record its value in the hash
250 table since its use is guaranteed to be the insn immediately following
251 its definition and any other insn is presumed to invalidate it.
253 Instead, we store below the value last assigned to CC0. If it should
254 happen to be a constant, it is stored in preference to the actual
255 assigned value. In case it is a constant, we store the mode in which
256 the constant should be interpreted. */
258 static rtx prev_insn_cc0;
259 static enum machine_mode prev_insn_cc0_mode;
260 #endif
262 /* Previous actual insn. 0 if at first insn of basic block. */
264 static rtx prev_insn;
266 /* Insn being scanned. */
268 static rtx this_insn;
270 /* Index by register number, gives the quantity number
271 of the register's current contents. */
273 static int *reg_qty;
275 /* Index by register number, gives the number of the next (or
276 previous) register in the chain of registers sharing the same
277 value.
279 Or -1 if this register is at the end of the chain.
281 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
283 static int *reg_next_eqv;
284 static int *reg_prev_eqv;
286 /* Index by register number, gives the number of times
287 that register has been altered in the current basic block. */
289 static int *reg_tick;
291 /* Index by register number, gives the reg_tick value at which
292 rtx's containing this register are valid in the hash table.
293 If this does not equal the current reg_tick value, such expressions
294 existing in the hash table are invalid.
295 If this is -1, no expressions containing this register have been
296 entered in the table. */
298 static int *reg_in_table;
300 /* A HARD_REG_SET containing all the hard registers for which there is
301 currently a REG expression in the hash table. Note the difference
302 from the above variables, which indicate if the REG is mentioned in some
303 expression in the table. */
305 static HARD_REG_SET hard_regs_in_table;
307 /* A HARD_REG_SET containing all the hard registers that are invalidated
308 by a CALL_INSN. */
310 static HARD_REG_SET regs_invalidated_by_call;
312 /* Two vectors of ints:
313 one containing max_reg -1's; the other max_reg + 500 (an approximation
314 for max_qty) elements where element i contains i.
315 These are used to initialize various other vectors fast. */
317 static int *all_minus_one;
318 static int *consec_ints;
320 /* CUID of insn that starts the basic block currently being cse-processed. */
322 static int cse_basic_block_start;
324 /* CUID of insn that ends the basic block currently being cse-processed. */
326 static int cse_basic_block_end;
328 /* Vector mapping INSN_UIDs to cuids.
329 The cuids are like uids but increase monotonically always.
330 We use them to see whether a reg is used outside a given basic block. */
332 static int *uid_cuid;
334 /* Highest UID in UID_CUID. */
335 static int max_uid;
337 /* Get the cuid of an insn. */
339 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
341 /* Nonzero if cse has altered conditional jump insns
342 in such a way that jump optimization should be redone. */
344 static int cse_jumps_altered;
346 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
347 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
348 to put in the note. */
349 static int recorded_label_ref;
351 /* canon_hash stores 1 in do_not_record
352 if it notices a reference to CC0, PC, or some other volatile
353 subexpression. */
355 static int do_not_record;
357 #ifdef LOAD_EXTEND_OP
359 /* Scratch rtl used when looking for load-extended copy of a MEM. */
360 static rtx memory_extend_rtx;
361 #endif
363 /* canon_hash stores 1 in hash_arg_in_memory
364 if it notices a reference to memory within the expression being hashed. */
366 static int hash_arg_in_memory;
368 /* canon_hash stores 1 in hash_arg_in_struct
369 if it notices a reference to memory that's part of a structure. */
371 static int hash_arg_in_struct;
373 /* The hash table contains buckets which are chains of `struct table_elt's,
374 each recording one expression's information.
375 That expression is in the `exp' field.
377 Those elements with the same hash code are chained in both directions
378 through the `next_same_hash' and `prev_same_hash' fields.
380 Each set of expressions with equivalent values
381 are on a two-way chain through the `next_same_value'
382 and `prev_same_value' fields, and all point with
383 the `first_same_value' field at the first element in
384 that chain. The chain is in order of increasing cost.
385 Each element's cost value is in its `cost' field.
387 The `in_memory' field is nonzero for elements that
388 involve any reference to memory. These elements are removed
389 whenever a write is done to an unidentified location in memory.
390 To be safe, we assume that a memory address is unidentified unless
391 the address is either a symbol constant or a constant plus
392 the frame pointer or argument pointer.
394 The `in_struct' field is nonzero for elements that
395 involve any reference to memory inside a structure or array.
397 The `related_value' field is used to connect related expressions
398 (that differ by adding an integer).
399 The related expressions are chained in a circular fashion.
400 `related_value' is zero for expressions for which this
401 chain is not useful.
403 The `cost' field stores the cost of this element's expression.
405 The `is_const' flag is set if the element is a constant (including
406 a fixed address).
408 The `flag' field is used as a temporary during some search routines.
410 The `mode' field is usually the same as GET_MODE (`exp'), but
411 if `exp' is a CONST_INT and has no machine mode then the `mode'
412 field is the mode it was being used as. Each constant is
413 recorded separately for each mode it is used with. */
416 struct table_elt
418 rtx exp;
419 struct table_elt *next_same_hash;
420 struct table_elt *prev_same_hash;
421 struct table_elt *next_same_value;
422 struct table_elt *prev_same_value;
423 struct table_elt *first_same_value;
424 struct table_elt *related_value;
425 int cost;
426 enum machine_mode mode;
427 char in_memory;
428 char in_struct;
429 char is_const;
430 char flag;
433 /* We don't want a lot of buckets, because we rarely have very many
434 things stored in the hash table, and a lot of buckets slows
435 down a lot of loops that happen frequently. */
436 #define NBUCKETS 31
438 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
439 register (hard registers may require `do_not_record' to be set). */
441 #define HASH(X, M) \
442 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
443 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
444 : canon_hash (X, M) % NBUCKETS)
446 /* Determine whether register number N is considered a fixed register for CSE.
447 It is desirable to replace other regs with fixed regs, to reduce need for
448 non-fixed hard regs.
449 A reg wins if it is either the frame pointer or designated as fixed,
450 but not if it is an overlapping register. */
451 #ifdef OVERLAPPING_REGNO_P
452 #define FIXED_REGNO_P(N) \
453 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454 || fixed_regs[N] || global_regs[N]) \
455 && ! OVERLAPPING_REGNO_P ((N)))
456 #else
457 #define FIXED_REGNO_P(N) \
458 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
459 || fixed_regs[N] || global_regs[N])
460 #endif
462 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
463 hard registers and pointers into the frame are the cheapest with a cost
464 of 0. Next come pseudos with a cost of one and other hard registers with
465 a cost of 2. Aside from these special cases, call `rtx_cost'. */
467 #define CHEAP_REGNO(N) \
468 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
469 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
470 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
471 || ((N) < FIRST_PSEUDO_REGISTER \
472 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
474 /* A register is cheap if it is a user variable assigned to the register
475 or if its register number always corresponds to a cheap register. */
477 #define CHEAP_REG(N) \
478 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
479 || CHEAP_REGNO (REGNO (N)))
481 #define COST(X) \
482 (GET_CODE (X) == REG \
483 ? (CHEAP_REG (X) ? 0 \
484 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
485 : 2) \
486 : notreg_cost(X))
488 /* Determine if the quantity number for register X represents a valid index
489 into the `qty_...' variables. */
491 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
493 static struct table_elt *table[NBUCKETS];
495 /* Chain of `struct table_elt's made so far for this function
496 but currently removed from the table. */
498 static struct table_elt *free_element_chain;
500 /* Number of `struct table_elt' structures made so far for this function. */
502 static int n_elements_made;
504 /* Maximum value `n_elements_made' has had so far in this compilation
505 for functions previously processed. */
507 static int max_elements_made;
509 /* Surviving equivalence class when two equivalence classes are merged
510 by recording the effects of a jump in the last insn. Zero if the
511 last insn was not a conditional jump. */
513 static struct table_elt *last_jump_equiv_class;
515 /* Set to the cost of a constant pool reference if one was found for a
516 symbolic constant. If this was found, it means we should try to
517 convert constants into constant pool entries if they don't fit in
518 the insn. */
520 static int constant_pool_entries_cost;
522 /* Define maximum length of a branch path. */
524 #define PATHLENGTH 10
526 /* This data describes a block that will be processed by cse_basic_block. */
528 struct cse_basic_block_data {
529 /* Lowest CUID value of insns in block. */
530 int low_cuid;
531 /* Highest CUID value of insns in block. */
532 int high_cuid;
533 /* Total number of SETs in block. */
534 int nsets;
535 /* Last insn in the block. */
536 rtx last;
537 /* Size of current branch path, if any. */
538 int path_size;
539 /* Current branch path, indicating which branches will be taken. */
540 struct branch_path {
541 /* The branch insn. */
542 rtx branch;
543 /* Whether it should be taken or not. AROUND is the same as taken
544 except that it is used when the destination label is not preceded
545 by a BARRIER. */
546 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
547 } path[PATHLENGTH];
550 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
551 virtual regs here because the simplify_*_operation routines are called
552 by integrate.c, which is called before virtual register instantiation. */
554 #define FIXED_BASE_PLUS_P(X) \
555 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
556 || (X) == arg_pointer_rtx \
557 || (X) == virtual_stack_vars_rtx \
558 || (X) == virtual_incoming_args_rtx \
559 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
560 && (XEXP (X, 0) == frame_pointer_rtx \
561 || XEXP (X, 0) == hard_frame_pointer_rtx \
562 || XEXP (X, 0) == arg_pointer_rtx \
563 || XEXP (X, 0) == virtual_stack_vars_rtx \
564 || XEXP (X, 0) == virtual_incoming_args_rtx)))
566 /* Similar, but also allows reference to the stack pointer.
568 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
569 arg_pointer_rtx by itself is nonzero, because on at least one machine,
570 the i960, the arg pointer is zero when it is unused. */
572 #define NONZERO_BASE_PLUS_P(X) \
573 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
574 || (X) == virtual_stack_vars_rtx \
575 || (X) == virtual_incoming_args_rtx \
576 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && (XEXP (X, 0) == frame_pointer_rtx \
578 || XEXP (X, 0) == hard_frame_pointer_rtx \
579 || XEXP (X, 0) == arg_pointer_rtx \
580 || XEXP (X, 0) == virtual_stack_vars_rtx \
581 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
582 || (X) == stack_pointer_rtx \
583 || (X) == virtual_stack_dynamic_rtx \
584 || (X) == virtual_outgoing_args_rtx \
585 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
586 && (XEXP (X, 0) == stack_pointer_rtx \
587 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
588 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
590 static int notreg_cost PROTO((rtx));
591 static void new_basic_block PROTO((void));
592 static void make_new_qty PROTO((int));
593 static void make_regs_eqv PROTO((int, int));
594 static void delete_reg_equiv PROTO((int));
595 static int mention_regs PROTO((rtx));
596 static int insert_regs PROTO((rtx, struct table_elt *, int));
597 static void free_element PROTO((struct table_elt *));
598 static void remove_from_table PROTO((struct table_elt *, unsigned));
599 static struct table_elt *get_element PROTO((void));
600 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
601 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
602 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
603 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
604 enum machine_mode));
605 static void merge_equiv_classes PROTO((struct table_elt *,
606 struct table_elt *));
607 static void invalidate PROTO((rtx, enum machine_mode));
608 static int cse_rtx_varies_p PROTO((rtx));
609 static void remove_invalid_refs PROTO((int));
610 static void rehash_using_reg PROTO((rtx));
611 static void invalidate_memory PROTO((void));
612 static void invalidate_for_call PROTO((void));
613 static rtx use_related_value PROTO((rtx, struct table_elt *));
614 static unsigned canon_hash PROTO((rtx, enum machine_mode));
615 static unsigned safe_hash PROTO((rtx, enum machine_mode));
616 static int exp_equiv_p PROTO((rtx, rtx, int, int));
617 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
618 HOST_WIDE_INT *,
619 HOST_WIDE_INT *));
620 static int refers_to_p PROTO((rtx, rtx));
621 static rtx canon_reg PROTO((rtx, rtx));
622 static void find_best_addr PROTO((rtx, rtx *));
623 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
624 enum machine_mode *,
625 enum machine_mode *));
626 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
627 rtx, rtx));
628 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
629 rtx, rtx));
630 static rtx fold_rtx PROTO((rtx, rtx));
631 static rtx equiv_constant PROTO((rtx));
632 static void record_jump_equiv PROTO((rtx, int));
633 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
634 rtx, rtx, int));
635 static void cse_insn PROTO((rtx, int));
636 static int note_mem_written PROTO((rtx));
637 static void invalidate_from_clobbers PROTO((rtx));
638 static rtx cse_process_notes PROTO((rtx, rtx));
639 static void cse_around_loop PROTO((rtx));
640 static void invalidate_skipped_set PROTO((rtx, rtx));
641 static void invalidate_skipped_block PROTO((rtx));
642 static void cse_check_loop_start PROTO((rtx, rtx));
643 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
644 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
645 static void count_reg_usage PROTO((rtx, int *, rtx, int));
647 extern int rtx_equal_function_value_matters;
649 /* Return an estimate of the cost of computing rtx X.
650 One use is in cse, to decide which expression to keep in the hash table.
651 Another is in rtl generation, to pick the cheapest way to multiply.
652 Other uses like the latter are expected in the future. */
654 /* Internal function, to compute cost when X is not a register; called
655 from COST macro to keep it simple. */
657 static int
658 notreg_cost (x)
659 rtx x;
661 return ((GET_CODE (x) == SUBREG
662 && GET_CODE (SUBREG_REG (x)) == REG
663 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
664 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
665 && (GET_MODE_SIZE (GET_MODE (x))
666 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
667 && subreg_lowpart_p (x)
668 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
669 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
670 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
671 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
672 : 2))
673 : rtx_cost (x, SET) * 2);
676 /* Return the right cost to give to an operation
677 to make the cost of the corresponding register-to-register instruction
678 N times that of a fast register-to-register instruction. */
680 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
683 rtx_cost (x, outer_code)
684 rtx x;
685 enum rtx_code outer_code;
687 register int i, j;
688 register enum rtx_code code;
689 register char *fmt;
690 register int total;
692 if (x == 0)
693 return 0;
695 /* Compute the default costs of certain things.
696 Note that RTX_COSTS can override the defaults. */
698 code = GET_CODE (x);
699 switch (code)
701 case MULT:
702 /* Count multiplication by 2**n as a shift,
703 because if we are considering it, we would output it as a shift. */
704 if (GET_CODE (XEXP (x, 1)) == CONST_INT
705 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
706 total = 2;
707 else
708 total = COSTS_N_INSNS (5);
709 break;
710 case DIV:
711 case UDIV:
712 case MOD:
713 case UMOD:
714 total = COSTS_N_INSNS (7);
715 break;
716 case USE:
717 /* Used in loop.c and combine.c as a marker. */
718 total = 0;
719 break;
720 case ASM_OPERANDS:
721 /* We don't want these to be used in substitutions because
722 we have no way of validating the resulting insn. So assign
723 anything containing an ASM_OPERANDS a very high cost. */
724 total = 1000;
725 break;
726 default:
727 total = 2;
730 switch (code)
732 case REG:
733 return ! CHEAP_REG (x);
735 case SUBREG:
736 /* If we can't tie these modes, make this expensive. The larger
737 the mode, the more expensive it is. */
738 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
739 return COSTS_N_INSNS (2
740 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
741 return 2;
742 #ifdef RTX_COSTS
743 RTX_COSTS (x, code, outer_code);
744 #endif
745 CONST_COSTS (x, code, outer_code);
748 /* Sum the costs of the sub-rtx's, plus cost of this operation,
749 which is already in total. */
751 fmt = GET_RTX_FORMAT (code);
752 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
753 if (fmt[i] == 'e')
754 total += rtx_cost (XEXP (x, i), code);
755 else if (fmt[i] == 'E')
756 for (j = 0; j < XVECLEN (x, i); j++)
757 total += rtx_cost (XVECEXP (x, i, j), code);
759 return total;
762 /* Clear the hash table and initialize each register with its own quantity,
763 for a new basic block. */
765 static void
766 new_basic_block ()
768 register int i;
770 next_qty = max_reg;
772 bzero ((char *) reg_tick, max_reg * sizeof (int));
774 bcopy ((char *) all_minus_one, (char *) reg_in_table,
775 max_reg * sizeof (int));
776 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
777 CLEAR_HARD_REG_SET (hard_regs_in_table);
779 /* The per-quantity values used to be initialized here, but it is
780 much faster to initialize each as it is made in `make_new_qty'. */
782 for (i = 0; i < NBUCKETS; i++)
784 register struct table_elt *this, *next;
785 for (this = table[i]; this; this = next)
787 next = this->next_same_hash;
788 free_element (this);
792 bzero ((char *) table, sizeof table);
794 prev_insn = 0;
796 #ifdef HAVE_cc0
797 prev_insn_cc0 = 0;
798 #endif
801 /* Say that register REG contains a quantity not in any register before
802 and initialize that quantity. */
804 static void
805 make_new_qty (reg)
806 register int reg;
808 register int q;
810 if (next_qty >= max_qty)
811 abort ();
813 q = reg_qty[reg] = next_qty++;
814 qty_first_reg[q] = reg;
815 qty_last_reg[q] = reg;
816 qty_const[q] = qty_const_insn[q] = 0;
817 qty_comparison_code[q] = UNKNOWN;
819 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
822 /* Make reg NEW equivalent to reg OLD.
823 OLD is not changing; NEW is. */
825 static void
826 make_regs_eqv (new, old)
827 register int new, old;
829 register int lastr, firstr;
830 register int q = reg_qty[old];
832 /* Nothing should become eqv until it has a "non-invalid" qty number. */
833 if (! REGNO_QTY_VALID_P (old))
834 abort ();
836 reg_qty[new] = q;
837 firstr = qty_first_reg[q];
838 lastr = qty_last_reg[q];
840 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
841 hard regs. Among pseudos, if NEW will live longer than any other reg
842 of the same qty, and that is beyond the current basic block,
843 make it the new canonical replacement for this qty. */
844 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
845 /* Certain fixed registers might be of the class NO_REGS. This means
846 that not only can they not be allocated by the compiler, but
847 they cannot be used in substitutions or canonicalizations
848 either. */
849 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
850 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
851 || (new >= FIRST_PSEUDO_REGISTER
852 && (firstr < FIRST_PSEUDO_REGISTER
853 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
854 || (uid_cuid[REGNO_FIRST_UID (new)]
855 < cse_basic_block_start))
856 && (uid_cuid[REGNO_LAST_UID (new)]
857 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
859 reg_prev_eqv[firstr] = new;
860 reg_next_eqv[new] = firstr;
861 reg_prev_eqv[new] = -1;
862 qty_first_reg[q] = new;
864 else
866 /* If NEW is a hard reg (known to be non-fixed), insert at end.
867 Otherwise, insert before any non-fixed hard regs that are at the
868 end. Registers of class NO_REGS cannot be used as an
869 equivalent for anything. */
870 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
871 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
872 && new >= FIRST_PSEUDO_REGISTER)
873 lastr = reg_prev_eqv[lastr];
874 reg_next_eqv[new] = reg_next_eqv[lastr];
875 if (reg_next_eqv[lastr] >= 0)
876 reg_prev_eqv[reg_next_eqv[lastr]] = new;
877 else
878 qty_last_reg[q] = new;
879 reg_next_eqv[lastr] = new;
880 reg_prev_eqv[new] = lastr;
884 /* Remove REG from its equivalence class. */
886 static void
887 delete_reg_equiv (reg)
888 register int reg;
890 register int q = reg_qty[reg];
891 register int p, n;
893 /* If invalid, do nothing. */
894 if (q == reg)
895 return;
897 p = reg_prev_eqv[reg];
898 n = reg_next_eqv[reg];
900 if (n != -1)
901 reg_prev_eqv[n] = p;
902 else
903 qty_last_reg[q] = p;
904 if (p != -1)
905 reg_next_eqv[p] = n;
906 else
907 qty_first_reg[q] = n;
909 reg_qty[reg] = reg;
912 /* Remove any invalid expressions from the hash table
913 that refer to any of the registers contained in expression X.
915 Make sure that newly inserted references to those registers
916 as subexpressions will be considered valid.
918 mention_regs is not called when a register itself
919 is being stored in the table.
921 Return 1 if we have done something that may have changed the hash code
922 of X. */
924 static int
925 mention_regs (x)
926 rtx x;
928 register enum rtx_code code;
929 register int i, j;
930 register char *fmt;
931 register int changed = 0;
933 if (x == 0)
934 return 0;
936 code = GET_CODE (x);
937 if (code == REG)
939 register int regno = REGNO (x);
940 register int endregno
941 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
942 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
943 int i;
945 for (i = regno; i < endregno; i++)
947 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
948 remove_invalid_refs (i);
950 reg_in_table[i] = reg_tick[i];
953 return 0;
956 /* If X is a comparison or a COMPARE and either operand is a register
957 that does not have a quantity, give it one. This is so that a later
958 call to record_jump_equiv won't cause X to be assigned a different
959 hash code and not found in the table after that call.
961 It is not necessary to do this here, since rehash_using_reg can
962 fix up the table later, but doing this here eliminates the need to
963 call that expensive function in the most common case where the only
964 use of the register is in the comparison. */
966 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
968 if (GET_CODE (XEXP (x, 0)) == REG
969 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
970 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
972 rehash_using_reg (XEXP (x, 0));
973 changed = 1;
976 if (GET_CODE (XEXP (x, 1)) == REG
977 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
978 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
980 rehash_using_reg (XEXP (x, 1));
981 changed = 1;
985 fmt = GET_RTX_FORMAT (code);
986 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
987 if (fmt[i] == 'e')
988 changed |= mention_regs (XEXP (x, i));
989 else if (fmt[i] == 'E')
990 for (j = 0; j < XVECLEN (x, i); j++)
991 changed |= mention_regs (XVECEXP (x, i, j));
993 return changed;
996 /* Update the register quantities for inserting X into the hash table
997 with a value equivalent to CLASSP.
998 (If the class does not contain a REG, it is irrelevant.)
999 If MODIFIED is nonzero, X is a destination; it is being modified.
1000 Note that delete_reg_equiv should be called on a register
1001 before insert_regs is done on that register with MODIFIED != 0.
1003 Nonzero value means that elements of reg_qty have changed
1004 so X's hash code may be different. */
1006 static int
1007 insert_regs (x, classp, modified)
1008 rtx x;
1009 struct table_elt *classp;
1010 int modified;
1012 if (GET_CODE (x) == REG)
1014 register int regno = REGNO (x);
1016 /* If REGNO is in the equivalence table already but is of the
1017 wrong mode for that equivalence, don't do anything here. */
1019 if (REGNO_QTY_VALID_P (regno)
1020 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1021 return 0;
1023 if (modified || ! REGNO_QTY_VALID_P (regno))
1025 if (classp)
1026 for (classp = classp->first_same_value;
1027 classp != 0;
1028 classp = classp->next_same_value)
1029 if (GET_CODE (classp->exp) == REG
1030 && GET_MODE (classp->exp) == GET_MODE (x))
1032 make_regs_eqv (regno, REGNO (classp->exp));
1033 return 1;
1036 make_new_qty (regno);
1037 qty_mode[reg_qty[regno]] = GET_MODE (x);
1038 return 1;
1041 return 0;
1044 /* If X is a SUBREG, we will likely be inserting the inner register in the
1045 table. If that register doesn't have an assigned quantity number at
1046 this point but does later, the insertion that we will be doing now will
1047 not be accessible because its hash code will have changed. So assign
1048 a quantity number now. */
1050 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1051 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1053 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1054 mention_regs (SUBREG_REG (x));
1055 return 1;
1057 else
1058 return mention_regs (x);
1061 /* Look in or update the hash table. */
1063 /* Put the element ELT on the list of free elements. */
1065 static void
1066 free_element (elt)
1067 struct table_elt *elt;
1069 elt->next_same_hash = free_element_chain;
1070 free_element_chain = elt;
1073 /* Return an element that is free for use. */
1075 static struct table_elt *
1076 get_element ()
1078 struct table_elt *elt = free_element_chain;
1079 if (elt)
1081 free_element_chain = elt->next_same_hash;
1082 return elt;
1084 n_elements_made++;
1085 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1088 /* Remove table element ELT from use in the table.
1089 HASH is its hash code, made using the HASH macro.
1090 It's an argument because often that is known in advance
1091 and we save much time not recomputing it. */
1093 static void
1094 remove_from_table (elt, hash)
1095 register struct table_elt *elt;
1096 unsigned hash;
1098 if (elt == 0)
1099 return;
1101 /* Mark this element as removed. See cse_insn. */
1102 elt->first_same_value = 0;
1104 /* Remove the table element from its equivalence class. */
1107 register struct table_elt *prev = elt->prev_same_value;
1108 register struct table_elt *next = elt->next_same_value;
1110 if (next) next->prev_same_value = prev;
1112 if (prev)
1113 prev->next_same_value = next;
1114 else
1116 register struct table_elt *newfirst = next;
1117 while (next)
1119 next->first_same_value = newfirst;
1120 next = next->next_same_value;
1125 /* Remove the table element from its hash bucket. */
1128 register struct table_elt *prev = elt->prev_same_hash;
1129 register struct table_elt *next = elt->next_same_hash;
1131 if (next) next->prev_same_hash = prev;
1133 if (prev)
1134 prev->next_same_hash = next;
1135 else if (table[hash] == elt)
1136 table[hash] = next;
1137 else
1139 /* This entry is not in the proper hash bucket. This can happen
1140 when two classes were merged by `merge_equiv_classes'. Search
1141 for the hash bucket that it heads. This happens only very
1142 rarely, so the cost is acceptable. */
1143 for (hash = 0; hash < NBUCKETS; hash++)
1144 if (table[hash] == elt)
1145 table[hash] = next;
1149 /* Remove the table element from its related-value circular chain. */
1151 if (elt->related_value != 0 && elt->related_value != elt)
1153 register struct table_elt *p = elt->related_value;
1154 while (p->related_value != elt)
1155 p = p->related_value;
1156 p->related_value = elt->related_value;
1157 if (p->related_value == p)
1158 p->related_value = 0;
1161 free_element (elt);
1164 /* Look up X in the hash table and return its table element,
1165 or 0 if X is not in the table.
1167 MODE is the machine-mode of X, or if X is an integer constant
1168 with VOIDmode then MODE is the mode with which X will be used.
1170 Here we are satisfied to find an expression whose tree structure
1171 looks like X. */
1173 static struct table_elt *
1174 lookup (x, hash, mode)
1175 rtx x;
1176 unsigned hash;
1177 enum machine_mode mode;
1179 register struct table_elt *p;
1181 for (p = table[hash]; p; p = p->next_same_hash)
1182 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1183 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1184 return p;
1186 return 0;
1189 /* Like `lookup' but don't care whether the table element uses invalid regs.
1190 Also ignore discrepancies in the machine mode of a register. */
1192 static struct table_elt *
1193 lookup_for_remove (x, hash, mode)
1194 rtx x;
1195 unsigned hash;
1196 enum machine_mode mode;
1198 register struct table_elt *p;
1200 if (GET_CODE (x) == REG)
1202 int regno = REGNO (x);
1203 /* Don't check the machine mode when comparing registers;
1204 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1205 for (p = table[hash]; p; p = p->next_same_hash)
1206 if (GET_CODE (p->exp) == REG
1207 && REGNO (p->exp) == regno)
1208 return p;
1210 else
1212 for (p = table[hash]; p; p = p->next_same_hash)
1213 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1214 return p;
1217 return 0;
1220 /* Look for an expression equivalent to X and with code CODE.
1221 If one is found, return that expression. */
1223 static rtx
1224 lookup_as_function (x, code)
1225 rtx x;
1226 enum rtx_code code;
1228 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1229 GET_MODE (x));
1230 if (p == 0)
1231 return 0;
1233 for (p = p->first_same_value; p; p = p->next_same_value)
1235 if (GET_CODE (p->exp) == code
1236 /* Make sure this is a valid entry in the table. */
1237 && exp_equiv_p (p->exp, p->exp, 1, 0))
1238 return p->exp;
1241 return 0;
1244 /* Insert X in the hash table, assuming HASH is its hash code
1245 and CLASSP is an element of the class it should go in
1246 (or 0 if a new class should be made).
1247 It is inserted at the proper position to keep the class in
1248 the order cheapest first.
1250 MODE is the machine-mode of X, or if X is an integer constant
1251 with VOIDmode then MODE is the mode with which X will be used.
1253 For elements of equal cheapness, the most recent one
1254 goes in front, except that the first element in the list
1255 remains first unless a cheaper element is added. The order of
1256 pseudo-registers does not matter, as canon_reg will be called to
1257 find the cheapest when a register is retrieved from the table.
1259 The in_memory field in the hash table element is set to 0.
1260 The caller must set it nonzero if appropriate.
1262 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1263 and if insert_regs returns a nonzero value
1264 you must then recompute its hash code before calling here.
1266 If necessary, update table showing constant values of quantities. */
1268 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1270 static struct table_elt *
1271 insert (x, classp, hash, mode)
1272 register rtx x;
1273 register struct table_elt *classp;
1274 unsigned hash;
1275 enum machine_mode mode;
1277 register struct table_elt *elt;
1279 /* If X is a register and we haven't made a quantity for it,
1280 something is wrong. */
1281 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1282 abort ();
1284 /* If X is a hard register, show it is being put in the table. */
1285 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1287 int regno = REGNO (x);
1288 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1289 int i;
1291 for (i = regno; i < endregno; i++)
1292 SET_HARD_REG_BIT (hard_regs_in_table, i);
1295 /* If X is a label, show we recorded it. */
1296 if (GET_CODE (x) == LABEL_REF
1297 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1298 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1299 recorded_label_ref = 1;
1301 /* Put an element for X into the right hash bucket. */
1303 elt = get_element ();
1304 elt->exp = x;
1305 elt->cost = COST (x);
1306 elt->next_same_value = 0;
1307 elt->prev_same_value = 0;
1308 elt->next_same_hash = table[hash];
1309 elt->prev_same_hash = 0;
1310 elt->related_value = 0;
1311 elt->in_memory = 0;
1312 elt->mode = mode;
1313 elt->is_const = (CONSTANT_P (x)
1314 /* GNU C++ takes advantage of this for `this'
1315 (and other const values). */
1316 || (RTX_UNCHANGING_P (x)
1317 && GET_CODE (x) == REG
1318 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1319 || FIXED_BASE_PLUS_P (x));
1321 if (table[hash])
1322 table[hash]->prev_same_hash = elt;
1323 table[hash] = elt;
1325 /* Put it into the proper value-class. */
1326 if (classp)
1328 classp = classp->first_same_value;
1329 if (CHEAPER (elt, classp))
1330 /* Insert at the head of the class */
1332 register struct table_elt *p;
1333 elt->next_same_value = classp;
1334 classp->prev_same_value = elt;
1335 elt->first_same_value = elt;
1337 for (p = classp; p; p = p->next_same_value)
1338 p->first_same_value = elt;
1340 else
1342 /* Insert not at head of the class. */
1343 /* Put it after the last element cheaper than X. */
1344 register struct table_elt *p, *next;
1345 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1346 p = next);
1347 /* Put it after P and before NEXT. */
1348 elt->next_same_value = next;
1349 if (next)
1350 next->prev_same_value = elt;
1351 elt->prev_same_value = p;
1352 p->next_same_value = elt;
1353 elt->first_same_value = classp;
1356 else
1357 elt->first_same_value = elt;
1359 /* If this is a constant being set equivalent to a register or a register
1360 being set equivalent to a constant, note the constant equivalence.
1362 If this is a constant, it cannot be equivalent to a different constant,
1363 and a constant is the only thing that can be cheaper than a register. So
1364 we know the register is the head of the class (before the constant was
1365 inserted).
1367 If this is a register that is not already known equivalent to a
1368 constant, we must check the entire class.
1370 If this is a register that is already known equivalent to an insn,
1371 update `qty_const_insn' to show that `this_insn' is the latest
1372 insn making that quantity equivalent to the constant. */
1374 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1375 && GET_CODE (x) != REG)
1377 qty_const[reg_qty[REGNO (classp->exp)]]
1378 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1379 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1382 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1383 && ! elt->is_const)
1385 register struct table_elt *p;
1387 for (p = classp; p != 0; p = p->next_same_value)
1389 if (p->is_const && GET_CODE (p->exp) != REG)
1391 qty_const[reg_qty[REGNO (x)]]
1392 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1393 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1394 break;
1399 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1400 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1401 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1403 /* If this is a constant with symbolic value,
1404 and it has a term with an explicit integer value,
1405 link it up with related expressions. */
1406 if (GET_CODE (x) == CONST)
1408 rtx subexp = get_related_value (x);
1409 unsigned subhash;
1410 struct table_elt *subelt, *subelt_prev;
1412 if (subexp != 0)
1414 /* Get the integer-free subexpression in the hash table. */
1415 subhash = safe_hash (subexp, mode) % NBUCKETS;
1416 subelt = lookup (subexp, subhash, mode);
1417 if (subelt == 0)
1418 subelt = insert (subexp, NULL_PTR, subhash, mode);
1419 /* Initialize SUBELT's circular chain if it has none. */
1420 if (subelt->related_value == 0)
1421 subelt->related_value = subelt;
1422 /* Find the element in the circular chain that precedes SUBELT. */
1423 subelt_prev = subelt;
1424 while (subelt_prev->related_value != subelt)
1425 subelt_prev = subelt_prev->related_value;
1426 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1427 This way the element that follows SUBELT is the oldest one. */
1428 elt->related_value = subelt_prev->related_value;
1429 subelt_prev->related_value = elt;
1433 return elt;
1436 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1437 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1438 the two classes equivalent.
1440 CLASS1 will be the surviving class; CLASS2 should not be used after this
1441 call.
1443 Any invalid entries in CLASS2 will not be copied. */
1445 static void
1446 merge_equiv_classes (class1, class2)
1447 struct table_elt *class1, *class2;
1449 struct table_elt *elt, *next, *new;
1451 /* Ensure we start with the head of the classes. */
1452 class1 = class1->first_same_value;
1453 class2 = class2->first_same_value;
1455 /* If they were already equal, forget it. */
1456 if (class1 == class2)
1457 return;
1459 for (elt = class2; elt; elt = next)
1461 unsigned hash;
1462 rtx exp = elt->exp;
1463 enum machine_mode mode = elt->mode;
1465 next = elt->next_same_value;
1467 /* Remove old entry, make a new one in CLASS1's class.
1468 Don't do this for invalid entries as we cannot find their
1469 hash code (it also isn't necessary). */
1470 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1472 hash_arg_in_memory = 0;
1473 hash_arg_in_struct = 0;
1474 hash = HASH (exp, mode);
1476 if (GET_CODE (exp) == REG)
1477 delete_reg_equiv (REGNO (exp));
1479 remove_from_table (elt, hash);
1481 if (insert_regs (exp, class1, 0))
1483 rehash_using_reg (exp);
1484 hash = HASH (exp, mode);
1486 new = insert (exp, class1, hash, mode);
1487 new->in_memory = hash_arg_in_memory;
1488 new->in_struct = hash_arg_in_struct;
1493 /* Remove from the hash table, or mark as invalid,
1494 all expressions whose values could be altered by storing in X.
1495 X is a register, a subreg, or a memory reference with nonvarying address
1496 (because, when a memory reference with a varying address is stored in,
1497 all memory references are removed by invalidate_memory
1498 so specific invalidation is superfluous).
1499 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1500 instead of just the amount indicated by the mode of X. This is only used
1501 for bitfield stores into memory.
1503 A nonvarying address may be just a register or just
1504 a symbol reference, or it may be either of those plus
1505 a numeric offset. */
1507 static void
1508 invalidate (x, full_mode)
1509 rtx x;
1510 enum machine_mode full_mode;
1512 register int i;
1513 register struct table_elt *p;
1515 /* If X is a register, dependencies on its contents
1516 are recorded through the qty number mechanism.
1517 Just change the qty number of the register,
1518 mark it as invalid for expressions that refer to it,
1519 and remove it itself. */
1521 if (GET_CODE (x) == REG)
1523 register int regno = REGNO (x);
1524 register unsigned hash = HASH (x, GET_MODE (x));
1526 /* Remove REGNO from any quantity list it might be on and indicate
1527 that it's value might have changed. If it is a pseudo, remove its
1528 entry from the hash table.
1530 For a hard register, we do the first two actions above for any
1531 additional hard registers corresponding to X. Then, if any of these
1532 registers are in the table, we must remove any REG entries that
1533 overlap these registers. */
1535 delete_reg_equiv (regno);
1536 reg_tick[regno]++;
1538 if (regno >= FIRST_PSEUDO_REGISTER)
1540 /* Because a register can be referenced in more than one mode,
1541 we might have to remove more than one table entry. */
1543 struct table_elt *elt;
1545 while (elt = lookup_for_remove (x, hash, GET_MODE (x)))
1546 remove_from_table (elt, hash);
1548 else
1550 HOST_WIDE_INT in_table
1551 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1552 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1553 int tregno, tendregno;
1554 register struct table_elt *p, *next;
1556 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1558 for (i = regno + 1; i < endregno; i++)
1560 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1561 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1562 delete_reg_equiv (i);
1563 reg_tick[i]++;
1566 if (in_table)
1567 for (hash = 0; hash < NBUCKETS; hash++)
1568 for (p = table[hash]; p; p = next)
1570 next = p->next_same_hash;
1572 if (GET_CODE (p->exp) != REG
1573 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1574 continue;
1576 tregno = REGNO (p->exp);
1577 tendregno
1578 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1579 if (tendregno > regno && tregno < endregno)
1580 remove_from_table (p, hash);
1584 return;
1587 if (GET_CODE (x) == SUBREG)
1589 if (GET_CODE (SUBREG_REG (x)) != REG)
1590 abort ();
1591 invalidate (SUBREG_REG (x), VOIDmode);
1592 return;
1595 /* X is not a register; it must be a memory reference with
1596 a nonvarying address. Remove all hash table elements
1597 that refer to overlapping pieces of memory. */
1599 if (GET_CODE (x) != MEM)
1600 abort ();
1602 if (full_mode == VOIDmode)
1603 full_mode = GET_MODE (x);
1605 for (i = 0; i < NBUCKETS; i++)
1607 register struct table_elt *next;
1608 for (p = table[i]; p; p = next)
1610 next = p->next_same_hash;
1611 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1612 than checking all the aliases). */
1613 if (p->in_memory
1614 && (GET_CODE (p->exp) != MEM
1615 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1616 remove_from_table (p, i);
1621 /* Remove all expressions that refer to register REGNO,
1622 since they are already invalid, and we are about to
1623 mark that register valid again and don't want the old
1624 expressions to reappear as valid. */
1626 static void
1627 remove_invalid_refs (regno)
1628 int regno;
1630 register int i;
1631 register struct table_elt *p, *next;
1633 for (i = 0; i < NBUCKETS; i++)
1634 for (p = table[i]; p; p = next)
1636 next = p->next_same_hash;
1637 if (GET_CODE (p->exp) != REG
1638 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1639 remove_from_table (p, i);
1643 /* Recompute the hash codes of any valid entries in the hash table that
1644 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1646 This is called when we make a jump equivalence. */
1648 static void
1649 rehash_using_reg (x)
1650 rtx x;
1652 int i;
1653 struct table_elt *p, *next;
1654 unsigned hash;
1656 if (GET_CODE (x) == SUBREG)
1657 x = SUBREG_REG (x);
1659 /* If X is not a register or if the register is known not to be in any
1660 valid entries in the table, we have no work to do. */
1662 if (GET_CODE (x) != REG
1663 || reg_in_table[REGNO (x)] < 0
1664 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1665 return;
1667 /* Scan all hash chains looking for valid entries that mention X.
1668 If we find one and it is in the wrong hash chain, move it. We can skip
1669 objects that are registers, since they are handled specially. */
1671 for (i = 0; i < NBUCKETS; i++)
1672 for (p = table[i]; p; p = next)
1674 next = p->next_same_hash;
1675 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1676 && exp_equiv_p (p->exp, p->exp, 1, 0)
1677 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1679 if (p->next_same_hash)
1680 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1682 if (p->prev_same_hash)
1683 p->prev_same_hash->next_same_hash = p->next_same_hash;
1684 else
1685 table[i] = p->next_same_hash;
1687 p->next_same_hash = table[hash];
1688 p->prev_same_hash = 0;
1689 if (table[hash])
1690 table[hash]->prev_same_hash = p;
1691 table[hash] = p;
1696 /* Remove from the hash table any expression that is a call-clobbered
1697 register. Also update their TICK values. */
1699 static void
1700 invalidate_for_call ()
1702 int regno, endregno;
1703 int i;
1704 unsigned hash;
1705 struct table_elt *p, *next;
1706 int in_table = 0;
1708 /* Go through all the hard registers. For each that is clobbered in
1709 a CALL_INSN, remove the register from quantity chains and update
1710 reg_tick if defined. Also see if any of these registers is currently
1711 in the table. */
1713 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1714 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1716 delete_reg_equiv (regno);
1717 if (reg_tick[regno] >= 0)
1718 reg_tick[regno]++;
1720 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1723 /* In the case where we have no call-clobbered hard registers in the
1724 table, we are done. Otherwise, scan the table and remove any
1725 entry that overlaps a call-clobbered register. */
1727 if (in_table)
1728 for (hash = 0; hash < NBUCKETS; hash++)
1729 for (p = table[hash]; p; p = next)
1731 next = p->next_same_hash;
1733 if (p->in_memory)
1735 remove_from_table (p, hash);
1736 continue;
1739 if (GET_CODE (p->exp) != REG
1740 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1741 continue;
1743 regno = REGNO (p->exp);
1744 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1746 for (i = regno; i < endregno; i++)
1747 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1749 remove_from_table (p, hash);
1750 break;
1755 /* Given an expression X of type CONST,
1756 and ELT which is its table entry (or 0 if it
1757 is not in the hash table),
1758 return an alternate expression for X as a register plus integer.
1759 If none can be found, return 0. */
1761 static rtx
1762 use_related_value (x, elt)
1763 rtx x;
1764 struct table_elt *elt;
1766 register struct table_elt *relt = 0;
1767 register struct table_elt *p, *q;
1768 HOST_WIDE_INT offset;
1770 /* First, is there anything related known?
1771 If we have a table element, we can tell from that.
1772 Otherwise, must look it up. */
1774 if (elt != 0 && elt->related_value != 0)
1775 relt = elt;
1776 else if (elt == 0 && GET_CODE (x) == CONST)
1778 rtx subexp = get_related_value (x);
1779 if (subexp != 0)
1780 relt = lookup (subexp,
1781 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1782 GET_MODE (subexp));
1785 if (relt == 0)
1786 return 0;
1788 /* Search all related table entries for one that has an
1789 equivalent register. */
1791 p = relt;
1792 while (1)
1794 /* This loop is strange in that it is executed in two different cases.
1795 The first is when X is already in the table. Then it is searching
1796 the RELATED_VALUE list of X's class (RELT). The second case is when
1797 X is not in the table. Then RELT points to a class for the related
1798 value.
1800 Ensure that, whatever case we are in, that we ignore classes that have
1801 the same value as X. */
1803 if (rtx_equal_p (x, p->exp))
1804 q = 0;
1805 else
1806 for (q = p->first_same_value; q; q = q->next_same_value)
1807 if (GET_CODE (q->exp) == REG)
1808 break;
1810 if (q)
1811 break;
1813 p = p->related_value;
1815 /* We went all the way around, so there is nothing to be found.
1816 Alternatively, perhaps RELT was in the table for some other reason
1817 and it has no related values recorded. */
1818 if (p == relt || p == 0)
1819 break;
1822 if (q == 0)
1823 return 0;
1825 offset = (get_integer_term (x) - get_integer_term (p->exp));
1826 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1827 return plus_constant (q->exp, offset);
1830 /* Hash an rtx. We are careful to make sure the value is never negative.
1831 Equivalent registers hash identically.
1832 MODE is used in hashing for CONST_INTs only;
1833 otherwise the mode of X is used.
1835 Store 1 in do_not_record if any subexpression is volatile.
1837 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1838 which does not have the RTX_UNCHANGING_P bit set.
1839 In this case, also store 1 in hash_arg_in_struct
1840 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1842 Note that cse_insn knows that the hash code of a MEM expression
1843 is just (int) MEM plus the hash code of the address. */
1845 static unsigned
1846 canon_hash (x, mode)
1847 rtx x;
1848 enum machine_mode mode;
1850 register int i, j;
1851 register unsigned hash = 0;
1852 register enum rtx_code code;
1853 register char *fmt;
1855 /* repeat is used to turn tail-recursion into iteration. */
1856 repeat:
1857 if (x == 0)
1858 return hash;
1860 code = GET_CODE (x);
1861 switch (code)
1863 case REG:
1865 register int regno = REGNO (x);
1867 /* On some machines, we can't record any non-fixed hard register,
1868 because extending its life will cause reload problems. We
1869 consider ap, fp, and sp to be fixed for this purpose.
1870 On all machines, we can't record any global registers. */
1872 if (regno < FIRST_PSEUDO_REGISTER
1873 && (global_regs[regno]
1874 #ifdef SMALL_REGISTER_CLASSES
1875 || (SMALL_REGISTER_CLASSES
1876 && ! fixed_regs[regno]
1877 && regno != FRAME_POINTER_REGNUM
1878 && regno != HARD_FRAME_POINTER_REGNUM
1879 && regno != ARG_POINTER_REGNUM
1880 && regno != STACK_POINTER_REGNUM)
1881 #endif
1884 do_not_record = 1;
1885 return 0;
1887 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1888 return hash;
1891 case CONST_INT:
1893 unsigned HOST_WIDE_INT tem = INTVAL (x);
1894 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1895 return hash;
1898 case CONST_DOUBLE:
1899 /* This is like the general case, except that it only counts
1900 the integers representing the constant. */
1901 hash += (unsigned) code + (unsigned) GET_MODE (x);
1902 if (GET_MODE (x) != VOIDmode)
1903 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1905 unsigned tem = XINT (x, i);
1906 hash += tem;
1908 else
1909 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1910 + (unsigned) CONST_DOUBLE_HIGH (x));
1911 return hash;
1913 /* Assume there is only one rtx object for any given label. */
1914 case LABEL_REF:
1915 hash
1916 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
1917 return hash;
1919 case SYMBOL_REF:
1920 hash
1921 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XSTR (x, 0);
1922 return hash;
1924 case MEM:
1925 if (MEM_VOLATILE_P (x))
1927 do_not_record = 1;
1928 return 0;
1930 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
1932 hash_arg_in_memory = 1;
1933 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1935 /* Now that we have already found this special case,
1936 might as well speed it up as much as possible. */
1937 hash += (unsigned) MEM;
1938 x = XEXP (x, 0);
1939 goto repeat;
1941 case PRE_DEC:
1942 case PRE_INC:
1943 case POST_DEC:
1944 case POST_INC:
1945 case PC:
1946 case CC0:
1947 case CALL:
1948 case UNSPEC_VOLATILE:
1949 do_not_record = 1;
1950 return 0;
1952 case ASM_OPERANDS:
1953 if (MEM_VOLATILE_P (x))
1955 do_not_record = 1;
1956 return 0;
1960 i = GET_RTX_LENGTH (code) - 1;
1961 hash += (unsigned) code + (unsigned) GET_MODE (x);
1962 fmt = GET_RTX_FORMAT (code);
1963 for (; i >= 0; i--)
1965 if (fmt[i] == 'e')
1967 rtx tem = XEXP (x, i);
1969 /* If we are about to do the last recursive call
1970 needed at this level, change it into iteration.
1971 This function is called enough to be worth it. */
1972 if (i == 0)
1974 x = tem;
1975 goto repeat;
1977 hash += canon_hash (tem, 0);
1979 else if (fmt[i] == 'E')
1980 for (j = 0; j < XVECLEN (x, i); j++)
1981 hash += canon_hash (XVECEXP (x, i, j), 0);
1982 else if (fmt[i] == 's')
1984 register unsigned char *p = (unsigned char *) XSTR (x, i);
1985 if (p)
1986 while (*p)
1987 hash += *p++;
1989 else if (fmt[i] == 'i')
1991 register unsigned tem = XINT (x, i);
1992 hash += tem;
1994 else
1995 abort ();
1997 return hash;
2000 /* Like canon_hash but with no side effects. */
2002 static unsigned
2003 safe_hash (x, mode)
2004 rtx x;
2005 enum machine_mode mode;
2007 int save_do_not_record = do_not_record;
2008 int save_hash_arg_in_memory = hash_arg_in_memory;
2009 int save_hash_arg_in_struct = hash_arg_in_struct;
2010 unsigned hash = canon_hash (x, mode);
2011 hash_arg_in_memory = save_hash_arg_in_memory;
2012 hash_arg_in_struct = save_hash_arg_in_struct;
2013 do_not_record = save_do_not_record;
2014 return hash;
2017 /* Return 1 iff X and Y would canonicalize into the same thing,
2018 without actually constructing the canonicalization of either one.
2019 If VALIDATE is nonzero,
2020 we assume X is an expression being processed from the rtl
2021 and Y was found in the hash table. We check register refs
2022 in Y for being marked as valid.
2024 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2025 that is known to be in the register. Ordinarily, we don't allow them
2026 to match, because letting them match would cause unpredictable results
2027 in all the places that search a hash table chain for an equivalent
2028 for a given value. A possible equivalent that has different structure
2029 has its hash code computed from different data. Whether the hash code
2030 is the same as that of the the given value is pure luck. */
2032 static int
2033 exp_equiv_p (x, y, validate, equal_values)
2034 rtx x, y;
2035 int validate;
2036 int equal_values;
2038 register int i, j;
2039 register enum rtx_code code;
2040 register char *fmt;
2042 /* Note: it is incorrect to assume an expression is equivalent to itself
2043 if VALIDATE is nonzero. */
2044 if (x == y && !validate)
2045 return 1;
2046 if (x == 0 || y == 0)
2047 return x == y;
2049 code = GET_CODE (x);
2050 if (code != GET_CODE (y))
2052 if (!equal_values)
2053 return 0;
2055 /* If X is a constant and Y is a register or vice versa, they may be
2056 equivalent. We only have to validate if Y is a register. */
2057 if (CONSTANT_P (x) && GET_CODE (y) == REG
2058 && REGNO_QTY_VALID_P (REGNO (y))
2059 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2060 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2061 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2062 return 1;
2064 if (CONSTANT_P (y) && code == REG
2065 && REGNO_QTY_VALID_P (REGNO (x))
2066 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2067 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2068 return 1;
2070 return 0;
2073 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2074 if (GET_MODE (x) != GET_MODE (y))
2075 return 0;
2077 switch (code)
2079 case PC:
2080 case CC0:
2081 return x == y;
2083 case CONST_INT:
2084 return INTVAL (x) == INTVAL (y);
2086 case LABEL_REF:
2087 return XEXP (x, 0) == XEXP (y, 0);
2089 case SYMBOL_REF:
2090 return XSTR (x, 0) == XSTR (y, 0);
2092 case REG:
2094 int regno = REGNO (y);
2095 int endregno
2096 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2097 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2098 int i;
2100 /* If the quantities are not the same, the expressions are not
2101 equivalent. If there are and we are not to validate, they
2102 are equivalent. Otherwise, ensure all regs are up-to-date. */
2104 if (reg_qty[REGNO (x)] != reg_qty[regno])
2105 return 0;
2107 if (! validate)
2108 return 1;
2110 for (i = regno; i < endregno; i++)
2111 if (reg_in_table[i] != reg_tick[i])
2112 return 0;
2114 return 1;
2117 /* For commutative operations, check both orders. */
2118 case PLUS:
2119 case MULT:
2120 case AND:
2121 case IOR:
2122 case XOR:
2123 case NE:
2124 case EQ:
2125 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2126 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2127 validate, equal_values))
2128 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2129 validate, equal_values)
2130 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2131 validate, equal_values)));
2134 /* Compare the elements. If any pair of corresponding elements
2135 fail to match, return 0 for the whole things. */
2137 fmt = GET_RTX_FORMAT (code);
2138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2140 switch (fmt[i])
2142 case 'e':
2143 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2144 return 0;
2145 break;
2147 case 'E':
2148 if (XVECLEN (x, i) != XVECLEN (y, i))
2149 return 0;
2150 for (j = 0; j < XVECLEN (x, i); j++)
2151 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2152 validate, equal_values))
2153 return 0;
2154 break;
2156 case 's':
2157 if (strcmp (XSTR (x, i), XSTR (y, i)))
2158 return 0;
2159 break;
2161 case 'i':
2162 if (XINT (x, i) != XINT (y, i))
2163 return 0;
2164 break;
2166 case 'w':
2167 if (XWINT (x, i) != XWINT (y, i))
2168 return 0;
2169 break;
2171 case '0':
2172 break;
2174 default:
2175 abort ();
2179 return 1;
2182 /* Return 1 iff any subexpression of X matches Y.
2183 Here we do not require that X or Y be valid (for registers referred to)
2184 for being in the hash table. */
2186 static int
2187 refers_to_p (x, y)
2188 rtx x, y;
2190 register int i;
2191 register enum rtx_code code;
2192 register char *fmt;
2194 repeat:
2195 if (x == y)
2196 return 1;
2197 if (x == 0 || y == 0)
2198 return 0;
2200 code = GET_CODE (x);
2201 /* If X as a whole has the same code as Y, they may match.
2202 If so, return 1. */
2203 if (code == GET_CODE (y))
2205 if (exp_equiv_p (x, y, 0, 1))
2206 return 1;
2209 /* X does not match, so try its subexpressions. */
2211 fmt = GET_RTX_FORMAT (code);
2212 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2213 if (fmt[i] == 'e')
2215 if (i == 0)
2217 x = XEXP (x, 0);
2218 goto repeat;
2220 else
2221 if (refers_to_p (XEXP (x, i), y))
2222 return 1;
2224 else if (fmt[i] == 'E')
2226 int j;
2227 for (j = 0; j < XVECLEN (x, i); j++)
2228 if (refers_to_p (XVECEXP (x, i, j), y))
2229 return 1;
2232 return 0;
2235 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2236 set PBASE, PSTART, and PEND which correspond to the base of the address,
2237 the starting offset, and ending offset respectively.
2239 ADDR is known to be a nonvarying address. */
2241 /* ??? Despite what the comments say, this function is in fact frequently
2242 passed varying addresses. This does not appear to cause any problems. */
2244 static void
2245 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2246 rtx addr;
2247 int size;
2248 rtx *pbase;
2249 HOST_WIDE_INT *pstart, *pend;
2251 rtx base;
2252 HOST_WIDE_INT start, end;
2254 base = addr;
2255 start = 0;
2256 end = 0;
2258 /* Registers with nonvarying addresses usually have constant equivalents;
2259 but the frame pointer register is also possible. */
2260 if (GET_CODE (base) == REG
2261 && qty_const != 0
2262 && REGNO_QTY_VALID_P (REGNO (base))
2263 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2264 && qty_const[reg_qty[REGNO (base)]] != 0)
2265 base = qty_const[reg_qty[REGNO (base)]];
2266 else if (GET_CODE (base) == PLUS
2267 && GET_CODE (XEXP (base, 1)) == CONST_INT
2268 && GET_CODE (XEXP (base, 0)) == REG
2269 && qty_const != 0
2270 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2271 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2272 == GET_MODE (XEXP (base, 0)))
2273 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2275 start = INTVAL (XEXP (base, 1));
2276 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2278 /* This can happen as the result of virtual register instantiation,
2279 if the initial offset is too large to be a valid address. */
2280 else if (GET_CODE (base) == PLUS
2281 && GET_CODE (XEXP (base, 0)) == REG
2282 && GET_CODE (XEXP (base, 1)) == REG
2283 && qty_const != 0
2284 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2285 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2286 == GET_MODE (XEXP (base, 0)))
2287 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2288 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2289 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2290 == GET_MODE (XEXP (base, 1)))
2291 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2293 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2294 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2296 /* One of the two values must be a constant. */
2297 if (GET_CODE (base) != CONST_INT)
2299 if (GET_CODE (tem) != CONST_INT)
2300 abort ();
2301 start = INTVAL (tem);
2303 else
2305 start = INTVAL (base);
2306 base = tem;
2310 /* Handle everything that we can find inside an address that has been
2311 viewed as constant. */
2313 while (1)
2315 /* If no part of this switch does a "continue", the code outside
2316 will exit this loop. */
2318 switch (GET_CODE (base))
2320 case LO_SUM:
2321 /* By definition, operand1 of a LO_SUM is the associated constant
2322 address. Use the associated constant address as the base
2323 instead. */
2324 base = XEXP (base, 1);
2325 continue;
2327 case CONST:
2328 /* Strip off CONST. */
2329 base = XEXP (base, 0);
2330 continue;
2332 case PLUS:
2333 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2335 start += INTVAL (XEXP (base, 1));
2336 base = XEXP (base, 0);
2337 continue;
2339 break;
2341 case AND:
2342 /* Handle the case of an AND which is the negative of a power of
2343 two. This is used to represent unaligned memory operations. */
2344 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2345 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2347 set_nonvarying_address_components (XEXP (base, 0), size,
2348 pbase, pstart, pend);
2350 /* Assume the worst misalignment. START is affected, but not
2351 END, so compensate but adjusting SIZE. Don't lose any
2352 constant we already had. */
2354 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2355 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2356 end += *pend;
2357 base = *pbase;
2359 break;
2362 break;
2365 if (GET_CODE (base) == CONST_INT)
2367 start += INTVAL (base);
2368 base = const0_rtx;
2371 end = start + size;
2373 /* Set the return values. */
2374 *pbase = base;
2375 *pstart = start;
2376 *pend = end;
2379 /* Return 1 if X has a value that can vary even between two
2380 executions of the program. 0 means X can be compared reliably
2381 against certain constants or near-constants. */
2383 static int
2384 cse_rtx_varies_p (x)
2385 register rtx x;
2387 /* We need not check for X and the equivalence class being of the same
2388 mode because if X is equivalent to a constant in some mode, it
2389 doesn't vary in any mode. */
2391 if (GET_CODE (x) == REG
2392 && REGNO_QTY_VALID_P (REGNO (x))
2393 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2394 && qty_const[reg_qty[REGNO (x)]] != 0)
2395 return 0;
2397 if (GET_CODE (x) == PLUS
2398 && GET_CODE (XEXP (x, 1)) == CONST_INT
2399 && GET_CODE (XEXP (x, 0)) == REG
2400 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2401 && (GET_MODE (XEXP (x, 0))
2402 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2403 && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
2404 return 0;
2406 /* This can happen as the result of virtual register instantiation, if
2407 the initial constant is too large to be a valid address. This gives
2408 us a three instruction sequence, load large offset into a register,
2409 load fp minus a constant into a register, then a MEM which is the
2410 sum of the two `constant' registers. */
2411 if (GET_CODE (x) == PLUS
2412 && GET_CODE (XEXP (x, 0)) == REG
2413 && GET_CODE (XEXP (x, 1)) == REG
2414 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2415 && (GET_MODE (XEXP (x, 0))
2416 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2417 && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2418 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2419 && (GET_MODE (XEXP (x, 1))
2420 == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2421 && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
2422 return 0;
2424 return rtx_varies_p (x);
2427 /* Canonicalize an expression:
2428 replace each register reference inside it
2429 with the "oldest" equivalent register.
2431 If INSN is non-zero and we are replacing a pseudo with a hard register
2432 or vice versa, validate_change is used to ensure that INSN remains valid
2433 after we make our substitution. The calls are made with IN_GROUP non-zero
2434 so apply_change_group must be called upon the outermost return from this
2435 function (unless INSN is zero). The result of apply_change_group can
2436 generally be discarded since the changes we are making are optional. */
2438 static rtx
2439 canon_reg (x, insn)
2440 rtx x;
2441 rtx insn;
2443 register int i;
2444 register enum rtx_code code;
2445 register char *fmt;
2447 if (x == 0)
2448 return x;
2450 code = GET_CODE (x);
2451 switch (code)
2453 case PC:
2454 case CC0:
2455 case CONST:
2456 case CONST_INT:
2457 case CONST_DOUBLE:
2458 case SYMBOL_REF:
2459 case LABEL_REF:
2460 case ADDR_VEC:
2461 case ADDR_DIFF_VEC:
2462 return x;
2464 case REG:
2466 register int first;
2468 /* Never replace a hard reg, because hard regs can appear
2469 in more than one machine mode, and we must preserve the mode
2470 of each occurrence. Also, some hard regs appear in
2471 MEMs that are shared and mustn't be altered. Don't try to
2472 replace any reg that maps to a reg of class NO_REGS. */
2473 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2474 || ! REGNO_QTY_VALID_P (REGNO (x)))
2475 return x;
2477 first = qty_first_reg[reg_qty[REGNO (x)]];
2478 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2479 : REGNO_REG_CLASS (first) == NO_REGS ? x
2480 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2484 fmt = GET_RTX_FORMAT (code);
2485 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2487 register int j;
2489 if (fmt[i] == 'e')
2491 rtx new = canon_reg (XEXP (x, i), insn);
2492 int insn_code;
2494 /* If replacing pseudo with hard reg or vice versa, ensure the
2495 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2496 if (insn != 0 && new != 0
2497 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2498 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2499 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2500 || (insn_code = recog_memoized (insn)) < 0
2501 || insn_n_dups[insn_code] > 0))
2502 validate_change (insn, &XEXP (x, i), new, 1);
2503 else
2504 XEXP (x, i) = new;
2506 else if (fmt[i] == 'E')
2507 for (j = 0; j < XVECLEN (x, i); j++)
2508 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2511 return x;
2514 /* LOC is a location within INSN that is an operand address (the contents of
2515 a MEM). Find the best equivalent address to use that is valid for this
2516 insn.
2518 On most CISC machines, complicated address modes are costly, and rtx_cost
2519 is a good approximation for that cost. However, most RISC machines have
2520 only a few (usually only one) memory reference formats. If an address is
2521 valid at all, it is often just as cheap as any other address. Hence, for
2522 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2523 costs of various addresses. For two addresses of equal cost, choose the one
2524 with the highest `rtx_cost' value as that has the potential of eliminating
2525 the most insns. For equal costs, we choose the first in the equivalence
2526 class. Note that we ignore the fact that pseudo registers are cheaper
2527 than hard registers here because we would also prefer the pseudo registers.
2530 static void
2531 find_best_addr (insn, loc)
2532 rtx insn;
2533 rtx *loc;
2535 struct table_elt *elt, *p;
2536 rtx addr = *loc;
2537 int our_cost;
2538 int found_better = 1;
2539 int save_do_not_record = do_not_record;
2540 int save_hash_arg_in_memory = hash_arg_in_memory;
2541 int save_hash_arg_in_struct = hash_arg_in_struct;
2542 int addr_volatile;
2543 int regno;
2544 unsigned hash;
2546 /* Do not try to replace constant addresses or addresses of local and
2547 argument slots. These MEM expressions are made only once and inserted
2548 in many instructions, as well as being used to control symbol table
2549 output. It is not safe to clobber them.
2551 There are some uncommon cases where the address is already in a register
2552 for some reason, but we cannot take advantage of that because we have
2553 no easy way to unshare the MEM. In addition, looking up all stack
2554 addresses is costly. */
2555 if ((GET_CODE (addr) == PLUS
2556 && GET_CODE (XEXP (addr, 0)) == REG
2557 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2558 && (regno = REGNO (XEXP (addr, 0)),
2559 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2560 || regno == ARG_POINTER_REGNUM))
2561 || (GET_CODE (addr) == REG
2562 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2563 || regno == HARD_FRAME_POINTER_REGNUM
2564 || regno == ARG_POINTER_REGNUM))
2565 || CONSTANT_ADDRESS_P (addr))
2566 return;
2568 /* If this address is not simply a register, try to fold it. This will
2569 sometimes simplify the expression. Many simplifications
2570 will not be valid, but some, usually applying the associative rule, will
2571 be valid and produce better code. */
2572 if (GET_CODE (addr) != REG)
2574 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2576 if (1
2577 #ifdef ADDRESS_COST
2578 && (ADDRESS_COST (folded) < ADDRESS_COST (addr)
2579 || (ADDRESS_COST (folded) == ADDRESS_COST (addr)
2580 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2581 #else
2582 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2583 #endif
2584 && validate_change (insn, loc, folded, 0))
2585 addr = folded;
2588 /* If this address is not in the hash table, we can't look for equivalences
2589 of the whole address. Also, ignore if volatile. */
2591 do_not_record = 0;
2592 hash = HASH (addr, Pmode);
2593 addr_volatile = do_not_record;
2594 do_not_record = save_do_not_record;
2595 hash_arg_in_memory = save_hash_arg_in_memory;
2596 hash_arg_in_struct = save_hash_arg_in_struct;
2598 if (addr_volatile)
2599 return;
2601 elt = lookup (addr, hash, Pmode);
2603 #ifndef ADDRESS_COST
2604 if (elt)
2606 our_cost = elt->cost;
2608 /* Find the lowest cost below ours that works. */
2609 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2610 if (elt->cost < our_cost
2611 && (GET_CODE (elt->exp) == REG
2612 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2613 && validate_change (insn, loc,
2614 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2615 return;
2617 #else
2619 if (elt)
2621 /* We need to find the best (under the criteria documented above) entry
2622 in the class that is valid. We use the `flag' field to indicate
2623 choices that were invalid and iterate until we can't find a better
2624 one that hasn't already been tried. */
2626 for (p = elt->first_same_value; p; p = p->next_same_value)
2627 p->flag = 0;
2629 while (found_better)
2631 int best_addr_cost = ADDRESS_COST (*loc);
2632 int best_rtx_cost = (elt->cost + 1) >> 1;
2633 struct table_elt *best_elt = elt;
2635 found_better = 0;
2636 for (p = elt->first_same_value; p; p = p->next_same_value)
2637 if (! p->flag
2638 && (GET_CODE (p->exp) == REG
2639 || exp_equiv_p (p->exp, p->exp, 1, 0))
2640 && (ADDRESS_COST (p->exp) < best_addr_cost
2641 || (ADDRESS_COST (p->exp) == best_addr_cost
2642 && (p->cost + 1) >> 1 > best_rtx_cost)))
2644 found_better = 1;
2645 best_addr_cost = ADDRESS_COST (p->exp);
2646 best_rtx_cost = (p->cost + 1) >> 1;
2647 best_elt = p;
2650 if (found_better)
2652 if (validate_change (insn, loc,
2653 canon_reg (copy_rtx (best_elt->exp),
2654 NULL_RTX), 0))
2655 return;
2656 else
2657 best_elt->flag = 1;
2662 /* If the address is a binary operation with the first operand a register
2663 and the second a constant, do the same as above, but looking for
2664 equivalences of the register. Then try to simplify before checking for
2665 the best address to use. This catches a few cases: First is when we
2666 have REG+const and the register is another REG+const. We can often merge
2667 the constants and eliminate one insn and one register. It may also be
2668 that a machine has a cheap REG+REG+const. Finally, this improves the
2669 code on the Alpha for unaligned byte stores. */
2671 if (flag_expensive_optimizations
2672 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2673 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2674 && GET_CODE (XEXP (*loc, 0)) == REG
2675 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2677 rtx c = XEXP (*loc, 1);
2679 do_not_record = 0;
2680 hash = HASH (XEXP (*loc, 0), Pmode);
2681 do_not_record = save_do_not_record;
2682 hash_arg_in_memory = save_hash_arg_in_memory;
2683 hash_arg_in_struct = save_hash_arg_in_struct;
2685 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2686 if (elt == 0)
2687 return;
2689 /* We need to find the best (under the criteria documented above) entry
2690 in the class that is valid. We use the `flag' field to indicate
2691 choices that were invalid and iterate until we can't find a better
2692 one that hasn't already been tried. */
2694 for (p = elt->first_same_value; p; p = p->next_same_value)
2695 p->flag = 0;
2697 while (found_better)
2699 int best_addr_cost = ADDRESS_COST (*loc);
2700 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2701 struct table_elt *best_elt = elt;
2702 rtx best_rtx = *loc;
2703 int count;
2705 /* This is at worst case an O(n^2) algorithm, so limit our search
2706 to the first 32 elements on the list. This avoids trouble
2707 compiling code with very long basic blocks that can easily
2708 call cse_gen_binary so many times that we run out of memory. */
2710 found_better = 0;
2711 for (p = elt->first_same_value, count = 0;
2712 p && count < 32;
2713 p = p->next_same_value, count++)
2714 if (! p->flag
2715 && (GET_CODE (p->exp) == REG
2716 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2718 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2720 if ((ADDRESS_COST (new) < best_addr_cost
2721 || (ADDRESS_COST (new) == best_addr_cost
2722 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2724 found_better = 1;
2725 best_addr_cost = ADDRESS_COST (new);
2726 best_rtx_cost = (COST (new) + 1) >> 1;
2727 best_elt = p;
2728 best_rtx = new;
2732 if (found_better)
2734 if (validate_change (insn, loc,
2735 canon_reg (copy_rtx (best_rtx),
2736 NULL_RTX), 0))
2737 return;
2738 else
2739 best_elt->flag = 1;
2743 #endif
2746 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2747 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2748 what values are being compared.
2750 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2751 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2752 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2753 compared to produce cc0.
2755 The return value is the comparison operator and is either the code of
2756 A or the code corresponding to the inverse of the comparison. */
2758 static enum rtx_code
2759 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2760 enum rtx_code code;
2761 rtx *parg1, *parg2;
2762 enum machine_mode *pmode1, *pmode2;
2764 rtx arg1, arg2;
2766 arg1 = *parg1, arg2 = *parg2;
2768 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2770 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2772 /* Set non-zero when we find something of interest. */
2773 rtx x = 0;
2774 int reverse_code = 0;
2775 struct table_elt *p = 0;
2777 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2778 On machines with CC0, this is the only case that can occur, since
2779 fold_rtx will return the COMPARE or item being compared with zero
2780 when given CC0. */
2782 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2783 x = arg1;
2785 /* If ARG1 is a comparison operator and CODE is testing for
2786 STORE_FLAG_VALUE, get the inner arguments. */
2788 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2790 if (code == NE
2791 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2792 && code == LT && STORE_FLAG_VALUE == -1)
2793 #ifdef FLOAT_STORE_FLAG_VALUE
2794 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2795 && FLOAT_STORE_FLAG_VALUE < 0)
2796 #endif
2798 x = arg1;
2799 else if (code == EQ
2800 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2801 && code == GE && STORE_FLAG_VALUE == -1)
2802 #ifdef FLOAT_STORE_FLAG_VALUE
2803 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2804 && FLOAT_STORE_FLAG_VALUE < 0)
2805 #endif
2807 x = arg1, reverse_code = 1;
2810 /* ??? We could also check for
2812 (ne (and (eq (...) (const_int 1))) (const_int 0))
2814 and related forms, but let's wait until we see them occurring. */
2816 if (x == 0)
2817 /* Look up ARG1 in the hash table and see if it has an equivalence
2818 that lets us see what is being compared. */
2819 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2820 GET_MODE (arg1));
2821 if (p) p = p->first_same_value;
2823 for (; p; p = p->next_same_value)
2825 enum machine_mode inner_mode = GET_MODE (p->exp);
2827 /* If the entry isn't valid, skip it. */
2828 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2829 continue;
2831 if (GET_CODE (p->exp) == COMPARE
2832 /* Another possibility is that this machine has a compare insn
2833 that includes the comparison code. In that case, ARG1 would
2834 be equivalent to a comparison operation that would set ARG1 to
2835 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2836 ORIG_CODE is the actual comparison being done; if it is an EQ,
2837 we must reverse ORIG_CODE. On machine with a negative value
2838 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2839 || ((code == NE
2840 || (code == LT
2841 && GET_MODE_CLASS (inner_mode) == MODE_INT
2842 && (GET_MODE_BITSIZE (inner_mode)
2843 <= HOST_BITS_PER_WIDE_INT)
2844 && (STORE_FLAG_VALUE
2845 & ((HOST_WIDE_INT) 1
2846 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2847 #ifdef FLOAT_STORE_FLAG_VALUE
2848 || (code == LT
2849 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2850 && FLOAT_STORE_FLAG_VALUE < 0)
2851 #endif
2853 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2855 x = p->exp;
2856 break;
2858 else if ((code == EQ
2859 || (code == GE
2860 && GET_MODE_CLASS (inner_mode) == MODE_INT
2861 && (GET_MODE_BITSIZE (inner_mode)
2862 <= HOST_BITS_PER_WIDE_INT)
2863 && (STORE_FLAG_VALUE
2864 & ((HOST_WIDE_INT) 1
2865 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2866 #ifdef FLOAT_STORE_FLAG_VALUE
2867 || (code == GE
2868 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2869 && FLOAT_STORE_FLAG_VALUE < 0)
2870 #endif
2872 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2874 reverse_code = 1;
2875 x = p->exp;
2876 break;
2879 /* If this is fp + constant, the equivalent is a better operand since
2880 it may let us predict the value of the comparison. */
2881 else if (NONZERO_BASE_PLUS_P (p->exp))
2883 arg1 = p->exp;
2884 continue;
2888 /* If we didn't find a useful equivalence for ARG1, we are done.
2889 Otherwise, set up for the next iteration. */
2890 if (x == 0)
2891 break;
2893 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2894 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2895 code = GET_CODE (x);
2897 if (reverse_code)
2898 code = reverse_condition (code);
2901 /* Return our results. Return the modes from before fold_rtx
2902 because fold_rtx might produce const_int, and then it's too late. */
2903 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2904 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2906 return code;
2909 /* Try to simplify a unary operation CODE whose output mode is to be
2910 MODE with input operand OP whose mode was originally OP_MODE.
2911 Return zero if no simplification can be made. */
2914 simplify_unary_operation (code, mode, op, op_mode)
2915 enum rtx_code code;
2916 enum machine_mode mode;
2917 rtx op;
2918 enum machine_mode op_mode;
2920 register int width = GET_MODE_BITSIZE (mode);
2922 /* The order of these tests is critical so that, for example, we don't
2923 check the wrong mode (input vs. output) for a conversion operation,
2924 such as FIX. At some point, this should be simplified. */
2926 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2928 if (code == FLOAT && GET_MODE (op) == VOIDmode
2929 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2931 HOST_WIDE_INT hv, lv;
2932 REAL_VALUE_TYPE d;
2934 if (GET_CODE (op) == CONST_INT)
2935 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2936 else
2937 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2939 #ifdef REAL_ARITHMETIC
2940 REAL_VALUE_FROM_INT (d, lv, hv, mode);
2941 #else
2942 if (hv < 0)
2944 d = (double) (~ hv);
2945 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2946 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2947 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2948 d = (- d - 1.0);
2950 else
2952 d = (double) hv;
2953 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2954 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2955 d += (double) (unsigned HOST_WIDE_INT) lv;
2957 #endif /* REAL_ARITHMETIC */
2958 d = real_value_truncate (mode, d);
2959 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2961 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2962 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2964 HOST_WIDE_INT hv, lv;
2965 REAL_VALUE_TYPE d;
2967 if (GET_CODE (op) == CONST_INT)
2968 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2969 else
2970 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2972 if (op_mode == VOIDmode)
2974 /* We don't know how to interpret negative-looking numbers in
2975 this case, so don't try to fold those. */
2976 if (hv < 0)
2977 return 0;
2979 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2981 else
2982 hv = 0, lv &= GET_MODE_MASK (op_mode);
2984 #ifdef REAL_ARITHMETIC
2985 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
2986 #else
2988 d = (double) (unsigned HOST_WIDE_INT) hv;
2989 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2990 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2991 d += (double) (unsigned HOST_WIDE_INT) lv;
2992 #endif /* REAL_ARITHMETIC */
2993 d = real_value_truncate (mode, d);
2994 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2996 #endif
2998 if (GET_CODE (op) == CONST_INT
2999 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3001 register HOST_WIDE_INT arg0 = INTVAL (op);
3002 register HOST_WIDE_INT val;
3004 switch (code)
3006 case NOT:
3007 val = ~ arg0;
3008 break;
3010 case NEG:
3011 val = - arg0;
3012 break;
3014 case ABS:
3015 val = (arg0 >= 0 ? arg0 : - arg0);
3016 break;
3018 case FFS:
3019 /* Don't use ffs here. Instead, get low order bit and then its
3020 number. If arg0 is zero, this will return 0, as desired. */
3021 arg0 &= GET_MODE_MASK (mode);
3022 val = exact_log2 (arg0 & (- arg0)) + 1;
3023 break;
3025 case TRUNCATE:
3026 val = arg0;
3027 break;
3029 case ZERO_EXTEND:
3030 if (op_mode == VOIDmode)
3031 op_mode = mode;
3032 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3034 /* If we were really extending the mode,
3035 we would have to distinguish between zero-extension
3036 and sign-extension. */
3037 if (width != GET_MODE_BITSIZE (op_mode))
3038 abort ();
3039 val = arg0;
3041 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3042 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3043 else
3044 return 0;
3045 break;
3047 case SIGN_EXTEND:
3048 if (op_mode == VOIDmode)
3049 op_mode = mode;
3050 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3052 /* If we were really extending the mode,
3053 we would have to distinguish between zero-extension
3054 and sign-extension. */
3055 if (width != GET_MODE_BITSIZE (op_mode))
3056 abort ();
3057 val = arg0;
3059 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3062 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3063 if (val
3064 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3065 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3067 else
3068 return 0;
3069 break;
3071 case SQRT:
3072 return 0;
3074 default:
3075 abort ();
3078 /* Clear the bits that don't belong in our mode,
3079 unless they and our sign bit are all one.
3080 So we get either a reasonable negative value or a reasonable
3081 unsigned value for this mode. */
3082 if (width < HOST_BITS_PER_WIDE_INT
3083 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3084 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3085 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3087 return GEN_INT (val);
3090 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3091 for a DImode operation on a CONST_INT. */
3092 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3093 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3095 HOST_WIDE_INT l1, h1, lv, hv;
3097 if (GET_CODE (op) == CONST_DOUBLE)
3098 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3099 else
3100 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3102 switch (code)
3104 case NOT:
3105 lv = ~ l1;
3106 hv = ~ h1;
3107 break;
3109 case NEG:
3110 neg_double (l1, h1, &lv, &hv);
3111 break;
3113 case ABS:
3114 if (h1 < 0)
3115 neg_double (l1, h1, &lv, &hv);
3116 else
3117 lv = l1, hv = h1;
3118 break;
3120 case FFS:
3121 hv = 0;
3122 if (l1 == 0)
3123 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3124 else
3125 lv = exact_log2 (l1 & (-l1)) + 1;
3126 break;
3128 case TRUNCATE:
3129 /* This is just a change-of-mode, so do nothing. */
3130 lv = l1, hv = h1;
3131 break;
3133 case ZERO_EXTEND:
3134 if (op_mode == VOIDmode
3135 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3136 return 0;
3138 hv = 0;
3139 lv = l1 & GET_MODE_MASK (op_mode);
3140 break;
3142 case SIGN_EXTEND:
3143 if (op_mode == VOIDmode
3144 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3145 return 0;
3146 else
3148 lv = l1 & GET_MODE_MASK (op_mode);
3149 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3150 && (lv & ((HOST_WIDE_INT) 1
3151 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3152 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3154 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3156 break;
3158 case SQRT:
3159 return 0;
3161 default:
3162 return 0;
3165 return immed_double_const (lv, hv, mode);
3168 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3169 else if (GET_CODE (op) == CONST_DOUBLE
3170 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3172 REAL_VALUE_TYPE d;
3173 jmp_buf handler;
3174 rtx x;
3176 if (setjmp (handler))
3177 /* There used to be a warning here, but that is inadvisable.
3178 People may want to cause traps, and the natural way
3179 to do it should not get a warning. */
3180 return 0;
3182 set_float_handler (handler);
3184 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3186 switch (code)
3188 case NEG:
3189 d = REAL_VALUE_NEGATE (d);
3190 break;
3192 case ABS:
3193 if (REAL_VALUE_NEGATIVE (d))
3194 d = REAL_VALUE_NEGATE (d);
3195 break;
3197 case FLOAT_TRUNCATE:
3198 d = real_value_truncate (mode, d);
3199 break;
3201 case FLOAT_EXTEND:
3202 /* All this does is change the mode. */
3203 break;
3205 case FIX:
3206 d = REAL_VALUE_RNDZINT (d);
3207 break;
3209 case UNSIGNED_FIX:
3210 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3211 break;
3213 case SQRT:
3214 return 0;
3216 default:
3217 abort ();
3220 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3221 set_float_handler (NULL_PTR);
3222 return x;
3225 else if (GET_CODE (op) == CONST_DOUBLE
3226 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3227 && GET_MODE_CLASS (mode) == MODE_INT
3228 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3230 REAL_VALUE_TYPE d;
3231 jmp_buf handler;
3232 HOST_WIDE_INT val;
3234 if (setjmp (handler))
3235 return 0;
3237 set_float_handler (handler);
3239 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3241 switch (code)
3243 case FIX:
3244 val = REAL_VALUE_FIX (d);
3245 break;
3247 case UNSIGNED_FIX:
3248 val = REAL_VALUE_UNSIGNED_FIX (d);
3249 break;
3251 default:
3252 abort ();
3255 set_float_handler (NULL_PTR);
3257 /* Clear the bits that don't belong in our mode,
3258 unless they and our sign bit are all one.
3259 So we get either a reasonable negative value or a reasonable
3260 unsigned value for this mode. */
3261 if (width < HOST_BITS_PER_WIDE_INT
3262 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3263 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3264 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3266 /* If this would be an entire word for the target, but is not for
3267 the host, then sign-extend on the host so that the number will look
3268 the same way on the host that it would on the target.
3270 For example, when building a 64 bit alpha hosted 32 bit sparc
3271 targeted compiler, then we want the 32 bit unsigned value -1 to be
3272 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3273 The later confuses the sparc backend. */
3275 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3276 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3277 val |= ((HOST_WIDE_INT) (-1) << width);
3279 return GEN_INT (val);
3281 #endif
3282 /* This was formerly used only for non-IEEE float.
3283 eggert@twinsun.com says it is safe for IEEE also. */
3284 else
3286 /* There are some simplifications we can do even if the operands
3287 aren't constant. */
3288 switch (code)
3290 case NEG:
3291 case NOT:
3292 /* (not (not X)) == X, similarly for NEG. */
3293 if (GET_CODE (op) == code)
3294 return XEXP (op, 0);
3295 break;
3297 case SIGN_EXTEND:
3298 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3299 becomes just the MINUS if its mode is MODE. This allows
3300 folding switch statements on machines using casesi (such as
3301 the Vax). */
3302 if (GET_CODE (op) == TRUNCATE
3303 && GET_MODE (XEXP (op, 0)) == mode
3304 && GET_CODE (XEXP (op, 0)) == MINUS
3305 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3306 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3307 return XEXP (op, 0);
3309 #ifdef POINTERS_EXTEND_UNSIGNED
3310 if (! POINTERS_EXTEND_UNSIGNED
3311 && mode == Pmode && GET_MODE (op) == ptr_mode
3312 && CONSTANT_P (op))
3313 return convert_memory_address (Pmode, op);
3314 #endif
3315 break;
3317 #ifdef POINTERS_EXTEND_UNSIGNED
3318 case ZERO_EXTEND:
3319 if (POINTERS_EXTEND_UNSIGNED
3320 && mode == Pmode && GET_MODE (op) == ptr_mode
3321 && CONSTANT_P (op))
3322 return convert_memory_address (Pmode, op);
3323 break;
3324 #endif
3327 return 0;
3331 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3332 and OP1. Return 0 if no simplification is possible.
3334 Don't use this for relational operations such as EQ or LT.
3335 Use simplify_relational_operation instead. */
3338 simplify_binary_operation (code, mode, op0, op1)
3339 enum rtx_code code;
3340 enum machine_mode mode;
3341 rtx op0, op1;
3343 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3344 HOST_WIDE_INT val;
3345 int width = GET_MODE_BITSIZE (mode);
3346 rtx tem;
3348 /* Relational operations don't work here. We must know the mode
3349 of the operands in order to do the comparison correctly.
3350 Assuming a full word can give incorrect results.
3351 Consider comparing 128 with -128 in QImode. */
3353 if (GET_RTX_CLASS (code) == '<')
3354 abort ();
3356 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3357 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3358 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3359 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3361 REAL_VALUE_TYPE f0, f1, value;
3362 jmp_buf handler;
3364 if (setjmp (handler))
3365 return 0;
3367 set_float_handler (handler);
3369 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3370 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3371 f0 = real_value_truncate (mode, f0);
3372 f1 = real_value_truncate (mode, f1);
3374 #ifdef REAL_ARITHMETIC
3375 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3376 #else
3377 switch (code)
3379 case PLUS:
3380 value = f0 + f1;
3381 break;
3382 case MINUS:
3383 value = f0 - f1;
3384 break;
3385 case MULT:
3386 value = f0 * f1;
3387 break;
3388 case DIV:
3389 #ifndef REAL_INFINITY
3390 if (f1 == 0)
3391 return 0;
3392 #endif
3393 value = f0 / f1;
3394 break;
3395 case SMIN:
3396 value = MIN (f0, f1);
3397 break;
3398 case SMAX:
3399 value = MAX (f0, f1);
3400 break;
3401 default:
3402 abort ();
3404 #endif
3406 value = real_value_truncate (mode, value);
3407 set_float_handler (NULL_PTR);
3408 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3410 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3412 /* We can fold some multi-word operations. */
3413 if (GET_MODE_CLASS (mode) == MODE_INT
3414 && width == HOST_BITS_PER_WIDE_INT * 2
3415 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3416 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3418 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3420 if (GET_CODE (op0) == CONST_DOUBLE)
3421 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3422 else
3423 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3425 if (GET_CODE (op1) == CONST_DOUBLE)
3426 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3427 else
3428 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3430 switch (code)
3432 case MINUS:
3433 /* A - B == A + (-B). */
3434 neg_double (l2, h2, &lv, &hv);
3435 l2 = lv, h2 = hv;
3437 /* .. fall through ... */
3439 case PLUS:
3440 add_double (l1, h1, l2, h2, &lv, &hv);
3441 break;
3443 case MULT:
3444 mul_double (l1, h1, l2, h2, &lv, &hv);
3445 break;
3447 case DIV: case MOD: case UDIV: case UMOD:
3448 /* We'd need to include tree.h to do this and it doesn't seem worth
3449 it. */
3450 return 0;
3452 case AND:
3453 lv = l1 & l2, hv = h1 & h2;
3454 break;
3456 case IOR:
3457 lv = l1 | l2, hv = h1 | h2;
3458 break;
3460 case XOR:
3461 lv = l1 ^ l2, hv = h1 ^ h2;
3462 break;
3464 case SMIN:
3465 if (h1 < h2
3466 || (h1 == h2
3467 && ((unsigned HOST_WIDE_INT) l1
3468 < (unsigned HOST_WIDE_INT) l2)))
3469 lv = l1, hv = h1;
3470 else
3471 lv = l2, hv = h2;
3472 break;
3474 case SMAX:
3475 if (h1 > h2
3476 || (h1 == h2
3477 && ((unsigned HOST_WIDE_INT) l1
3478 > (unsigned HOST_WIDE_INT) l2)))
3479 lv = l1, hv = h1;
3480 else
3481 lv = l2, hv = h2;
3482 break;
3484 case UMIN:
3485 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3486 || (h1 == h2
3487 && ((unsigned HOST_WIDE_INT) l1
3488 < (unsigned HOST_WIDE_INT) l2)))
3489 lv = l1, hv = h1;
3490 else
3491 lv = l2, hv = h2;
3492 break;
3494 case UMAX:
3495 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3496 || (h1 == h2
3497 && ((unsigned HOST_WIDE_INT) l1
3498 > (unsigned HOST_WIDE_INT) l2)))
3499 lv = l1, hv = h1;
3500 else
3501 lv = l2, hv = h2;
3502 break;
3504 case LSHIFTRT: case ASHIFTRT:
3505 case ASHIFT:
3506 case ROTATE: case ROTATERT:
3507 #ifdef SHIFT_COUNT_TRUNCATED
3508 if (SHIFT_COUNT_TRUNCATED)
3509 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3510 #endif
3512 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3513 return 0;
3515 if (code == LSHIFTRT || code == ASHIFTRT)
3516 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3517 code == ASHIFTRT);
3518 else if (code == ASHIFT)
3519 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3520 else if (code == ROTATE)
3521 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3522 else /* code == ROTATERT */
3523 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3524 break;
3526 default:
3527 return 0;
3530 return immed_double_const (lv, hv, mode);
3533 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3534 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3536 /* Even if we can't compute a constant result,
3537 there are some cases worth simplifying. */
3539 switch (code)
3541 case PLUS:
3542 /* In IEEE floating point, x+0 is not the same as x. Similarly
3543 for the other optimizations below. */
3544 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3545 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3546 break;
3548 if (op1 == CONST0_RTX (mode))
3549 return op0;
3551 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3552 if (GET_CODE (op0) == NEG)
3553 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3554 else if (GET_CODE (op1) == NEG)
3555 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3557 /* Handle both-operands-constant cases. We can only add
3558 CONST_INTs to constants since the sum of relocatable symbols
3559 can't be handled by most assemblers. Don't add CONST_INT
3560 to CONST_INT since overflow won't be computed properly if wider
3561 than HOST_BITS_PER_WIDE_INT. */
3563 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3564 && GET_CODE (op1) == CONST_INT)
3565 return plus_constant (op0, INTVAL (op1));
3566 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3567 && GET_CODE (op0) == CONST_INT)
3568 return plus_constant (op1, INTVAL (op0));
3570 /* See if this is something like X * C - X or vice versa or
3571 if the multiplication is written as a shift. If so, we can
3572 distribute and make a new multiply, shift, or maybe just
3573 have X (if C is 2 in the example above). But don't make
3574 real multiply if we didn't have one before. */
3576 if (! FLOAT_MODE_P (mode))
3578 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3579 rtx lhs = op0, rhs = op1;
3580 int had_mult = 0;
3582 if (GET_CODE (lhs) == NEG)
3583 coeff0 = -1, lhs = XEXP (lhs, 0);
3584 else if (GET_CODE (lhs) == MULT
3585 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3587 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3588 had_mult = 1;
3590 else if (GET_CODE (lhs) == ASHIFT
3591 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3592 && INTVAL (XEXP (lhs, 1)) >= 0
3593 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3595 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3596 lhs = XEXP (lhs, 0);
3599 if (GET_CODE (rhs) == NEG)
3600 coeff1 = -1, rhs = XEXP (rhs, 0);
3601 else if (GET_CODE (rhs) == MULT
3602 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3604 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3605 had_mult = 1;
3607 else if (GET_CODE (rhs) == ASHIFT
3608 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3609 && INTVAL (XEXP (rhs, 1)) >= 0
3610 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3612 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3613 rhs = XEXP (rhs, 0);
3616 if (rtx_equal_p (lhs, rhs))
3618 tem = cse_gen_binary (MULT, mode, lhs,
3619 GEN_INT (coeff0 + coeff1));
3620 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3624 /* If one of the operands is a PLUS or a MINUS, see if we can
3625 simplify this by the associative law.
3626 Don't use the associative law for floating point.
3627 The inaccuracy makes it nonassociative,
3628 and subtle programs can break if operations are associated. */
3630 if (INTEGRAL_MODE_P (mode)
3631 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3632 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3633 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3634 return tem;
3635 break;
3637 case COMPARE:
3638 #ifdef HAVE_cc0
3639 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3640 using cc0, in which case we want to leave it as a COMPARE
3641 so we can distinguish it from a register-register-copy.
3643 In IEEE floating point, x-0 is not the same as x. */
3645 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3646 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3647 && op1 == CONST0_RTX (mode))
3648 return op0;
3649 #else
3650 /* Do nothing here. */
3651 #endif
3652 break;
3654 case MINUS:
3655 /* None of these optimizations can be done for IEEE
3656 floating point. */
3657 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3658 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3659 break;
3661 /* We can't assume x-x is 0 even with non-IEEE floating point,
3662 but since it is zero except in very strange circumstances, we
3663 will treat it as zero with -ffast-math. */
3664 if (rtx_equal_p (op0, op1)
3665 && ! side_effects_p (op0)
3666 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3667 return CONST0_RTX (mode);
3669 /* Change subtraction from zero into negation. */
3670 if (op0 == CONST0_RTX (mode))
3671 return gen_rtx (NEG, mode, op1);
3673 /* (-1 - a) is ~a. */
3674 if (op0 == constm1_rtx)
3675 return gen_rtx (NOT, mode, op1);
3677 /* Subtracting 0 has no effect. */
3678 if (op1 == CONST0_RTX (mode))
3679 return op0;
3681 /* See if this is something like X * C - X or vice versa or
3682 if the multiplication is written as a shift. If so, we can
3683 distribute and make a new multiply, shift, or maybe just
3684 have X (if C is 2 in the example above). But don't make
3685 real multiply if we didn't have one before. */
3687 if (! FLOAT_MODE_P (mode))
3689 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3690 rtx lhs = op0, rhs = op1;
3691 int had_mult = 0;
3693 if (GET_CODE (lhs) == NEG)
3694 coeff0 = -1, lhs = XEXP (lhs, 0);
3695 else if (GET_CODE (lhs) == MULT
3696 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3698 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3699 had_mult = 1;
3701 else if (GET_CODE (lhs) == ASHIFT
3702 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3703 && INTVAL (XEXP (lhs, 1)) >= 0
3704 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3706 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3707 lhs = XEXP (lhs, 0);
3710 if (GET_CODE (rhs) == NEG)
3711 coeff1 = - 1, rhs = XEXP (rhs, 0);
3712 else if (GET_CODE (rhs) == MULT
3713 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3715 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3716 had_mult = 1;
3718 else if (GET_CODE (rhs) == ASHIFT
3719 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3720 && INTVAL (XEXP (rhs, 1)) >= 0
3721 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3723 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3724 rhs = XEXP (rhs, 0);
3727 if (rtx_equal_p (lhs, rhs))
3729 tem = cse_gen_binary (MULT, mode, lhs,
3730 GEN_INT (coeff0 - coeff1));
3731 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3735 /* (a - (-b)) -> (a + b). */
3736 if (GET_CODE (op1) == NEG)
3737 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3739 /* If one of the operands is a PLUS or a MINUS, see if we can
3740 simplify this by the associative law.
3741 Don't use the associative law for floating point.
3742 The inaccuracy makes it nonassociative,
3743 and subtle programs can break if operations are associated. */
3745 if (INTEGRAL_MODE_P (mode)
3746 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3747 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3748 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3749 return tem;
3751 /* Don't let a relocatable value get a negative coeff. */
3752 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3753 return plus_constant (op0, - INTVAL (op1));
3755 /* (x - (x & y)) -> (x & ~y) */
3756 if (GET_CODE (op1) == AND)
3758 if (rtx_equal_p (op0, XEXP (op1, 0)))
3759 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
3760 if (rtx_equal_p (op0, XEXP (op1, 1)))
3761 return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
3763 break;
3765 case MULT:
3766 if (op1 == constm1_rtx)
3768 tem = simplify_unary_operation (NEG, mode, op0, mode);
3770 return tem ? tem : gen_rtx (NEG, mode, op0);
3773 /* In IEEE floating point, x*0 is not always 0. */
3774 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3775 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3776 && op1 == CONST0_RTX (mode)
3777 && ! side_effects_p (op0))
3778 return op1;
3780 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3781 However, ANSI says we can drop signals,
3782 so we can do this anyway. */
3783 if (op1 == CONST1_RTX (mode))
3784 return op0;
3786 /* Convert multiply by constant power of two into shift unless
3787 we are still generating RTL. This test is a kludge. */
3788 if (GET_CODE (op1) == CONST_INT
3789 && (val = exact_log2 (INTVAL (op1))) >= 0
3790 /* If the mode is larger than the host word size, and the
3791 uppermost bit is set, then this isn't a power of two due
3792 to implicit sign extension. */
3793 && (width <= HOST_BITS_PER_WIDE_INT
3794 || val != HOST_BITS_PER_WIDE_INT - 1)
3795 && ! rtx_equal_function_value_matters)
3796 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3798 if (GET_CODE (op1) == CONST_DOUBLE
3799 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3801 REAL_VALUE_TYPE d;
3802 jmp_buf handler;
3803 int op1is2, op1ism1;
3805 if (setjmp (handler))
3806 return 0;
3808 set_float_handler (handler);
3809 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3810 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3811 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3812 set_float_handler (NULL_PTR);
3814 /* x*2 is x+x and x*(-1) is -x */
3815 if (op1is2 && GET_MODE (op0) == mode)
3816 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3818 else if (op1ism1 && GET_MODE (op0) == mode)
3819 return gen_rtx (NEG, mode, op0);
3821 break;
3823 case IOR:
3824 if (op1 == const0_rtx)
3825 return op0;
3826 if (GET_CODE (op1) == CONST_INT
3827 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3828 return op1;
3829 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3830 return op0;
3831 /* A | (~A) -> -1 */
3832 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3833 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3834 && ! side_effects_p (op0)
3835 && GET_MODE_CLASS (mode) != MODE_CC)
3836 return constm1_rtx;
3837 break;
3839 case XOR:
3840 if (op1 == const0_rtx)
3841 return op0;
3842 if (GET_CODE (op1) == CONST_INT
3843 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3844 return gen_rtx (NOT, mode, op0);
3845 if (op0 == op1 && ! side_effects_p (op0)
3846 && GET_MODE_CLASS (mode) != MODE_CC)
3847 return const0_rtx;
3848 break;
3850 case AND:
3851 if (op1 == const0_rtx && ! side_effects_p (op0))
3852 return const0_rtx;
3853 if (GET_CODE (op1) == CONST_INT
3854 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3855 return op0;
3856 if (op0 == op1 && ! side_effects_p (op0)
3857 && GET_MODE_CLASS (mode) != MODE_CC)
3858 return op0;
3859 /* A & (~A) -> 0 */
3860 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3861 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3862 && ! side_effects_p (op0)
3863 && GET_MODE_CLASS (mode) != MODE_CC)
3864 return const0_rtx;
3865 break;
3867 case UDIV:
3868 /* Convert divide by power of two into shift (divide by 1 handled
3869 below). */
3870 if (GET_CODE (op1) == CONST_INT
3871 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3872 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3874 /* ... fall through ... */
3876 case DIV:
3877 if (op1 == CONST1_RTX (mode))
3878 return op0;
3880 /* In IEEE floating point, 0/x is not always 0. */
3881 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3882 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3883 && op0 == CONST0_RTX (mode)
3884 && ! side_effects_p (op1))
3885 return op0;
3887 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3888 /* Change division by a constant into multiplication. Only do
3889 this with -ffast-math until an expert says it is safe in
3890 general. */
3891 else if (GET_CODE (op1) == CONST_DOUBLE
3892 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3893 && op1 != CONST0_RTX (mode)
3894 && flag_fast_math)
3896 REAL_VALUE_TYPE d;
3897 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3899 if (! REAL_VALUES_EQUAL (d, dconst0))
3901 #if defined (REAL_ARITHMETIC)
3902 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3903 return gen_rtx (MULT, mode, op0,
3904 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3905 #else
3906 return gen_rtx (MULT, mode, op0,
3907 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3908 #endif
3911 #endif
3912 break;
3914 case UMOD:
3915 /* Handle modulus by power of two (mod with 1 handled below). */
3916 if (GET_CODE (op1) == CONST_INT
3917 && exact_log2 (INTVAL (op1)) > 0)
3918 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3920 /* ... fall through ... */
3922 case MOD:
3923 if ((op0 == const0_rtx || op1 == const1_rtx)
3924 && ! side_effects_p (op0) && ! side_effects_p (op1))
3925 return const0_rtx;
3926 break;
3928 case ROTATERT:
3929 case ROTATE:
3930 /* Rotating ~0 always results in ~0. */
3931 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3932 && INTVAL (op0) == GET_MODE_MASK (mode)
3933 && ! side_effects_p (op1))
3934 return op0;
3936 /* ... fall through ... */
3938 case ASHIFT:
3939 case ASHIFTRT:
3940 case LSHIFTRT:
3941 if (op1 == const0_rtx)
3942 return op0;
3943 if (op0 == const0_rtx && ! side_effects_p (op1))
3944 return op0;
3945 break;
3947 case SMIN:
3948 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3949 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3950 && ! side_effects_p (op0))
3951 return op1;
3952 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3953 return op0;
3954 break;
3956 case SMAX:
3957 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3958 && (INTVAL (op1)
3959 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3960 && ! side_effects_p (op0))
3961 return op1;
3962 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3963 return op0;
3964 break;
3966 case UMIN:
3967 if (op1 == const0_rtx && ! side_effects_p (op0))
3968 return op1;
3969 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3970 return op0;
3971 break;
3973 case UMAX:
3974 if (op1 == constm1_rtx && ! side_effects_p (op0))
3975 return op1;
3976 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3977 return op0;
3978 break;
3980 default:
3981 abort ();
3984 return 0;
3987 /* Get the integer argument values in two forms:
3988 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3990 arg0 = INTVAL (op0);
3991 arg1 = INTVAL (op1);
3993 if (width < HOST_BITS_PER_WIDE_INT)
3995 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3996 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3998 arg0s = arg0;
3999 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4000 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4002 arg1s = arg1;
4003 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4004 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4006 else
4008 arg0s = arg0;
4009 arg1s = arg1;
4012 /* Compute the value of the arithmetic. */
4014 switch (code)
4016 case PLUS:
4017 val = arg0s + arg1s;
4018 break;
4020 case MINUS:
4021 val = arg0s - arg1s;
4022 break;
4024 case MULT:
4025 val = arg0s * arg1s;
4026 break;
4028 case DIV:
4029 if (arg1s == 0)
4030 return 0;
4031 val = arg0s / arg1s;
4032 break;
4034 case MOD:
4035 if (arg1s == 0)
4036 return 0;
4037 val = arg0s % arg1s;
4038 break;
4040 case UDIV:
4041 if (arg1 == 0)
4042 return 0;
4043 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4044 break;
4046 case UMOD:
4047 if (arg1 == 0)
4048 return 0;
4049 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4050 break;
4052 case AND:
4053 val = arg0 & arg1;
4054 break;
4056 case IOR:
4057 val = arg0 | arg1;
4058 break;
4060 case XOR:
4061 val = arg0 ^ arg1;
4062 break;
4064 case LSHIFTRT:
4065 /* If shift count is undefined, don't fold it; let the machine do
4066 what it wants. But truncate it if the machine will do that. */
4067 if (arg1 < 0)
4068 return 0;
4070 #ifdef SHIFT_COUNT_TRUNCATED
4071 if (SHIFT_COUNT_TRUNCATED)
4072 arg1 %= width;
4073 #endif
4075 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4076 break;
4078 case ASHIFT:
4079 if (arg1 < 0)
4080 return 0;
4082 #ifdef SHIFT_COUNT_TRUNCATED
4083 if (SHIFT_COUNT_TRUNCATED)
4084 arg1 %= width;
4085 #endif
4087 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4088 break;
4090 case ASHIFTRT:
4091 if (arg1 < 0)
4092 return 0;
4094 #ifdef SHIFT_COUNT_TRUNCATED
4095 if (SHIFT_COUNT_TRUNCATED)
4096 arg1 %= width;
4097 #endif
4099 val = arg0s >> arg1;
4101 /* Bootstrap compiler may not have sign extended the right shift.
4102 Manually extend the sign to insure bootstrap cc matches gcc. */
4103 if (arg0s < 0 && arg1 > 0)
4104 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4106 break;
4108 case ROTATERT:
4109 if (arg1 < 0)
4110 return 0;
4112 arg1 %= width;
4113 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4114 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4115 break;
4117 case ROTATE:
4118 if (arg1 < 0)
4119 return 0;
4121 arg1 %= width;
4122 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4123 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4124 break;
4126 case COMPARE:
4127 /* Do nothing here. */
4128 return 0;
4130 case SMIN:
4131 val = arg0s <= arg1s ? arg0s : arg1s;
4132 break;
4134 case UMIN:
4135 val = ((unsigned HOST_WIDE_INT) arg0
4136 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4137 break;
4139 case SMAX:
4140 val = arg0s > arg1s ? arg0s : arg1s;
4141 break;
4143 case UMAX:
4144 val = ((unsigned HOST_WIDE_INT) arg0
4145 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4146 break;
4148 default:
4149 abort ();
4152 /* Clear the bits that don't belong in our mode, unless they and our sign
4153 bit are all one. So we get either a reasonable negative value or a
4154 reasonable unsigned value for this mode. */
4155 if (width < HOST_BITS_PER_WIDE_INT
4156 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4157 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4158 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4160 /* If this would be an entire word for the target, but is not for
4161 the host, then sign-extend on the host so that the number will look
4162 the same way on the host that it would on the target.
4164 For example, when building a 64 bit alpha hosted 32 bit sparc
4165 targeted compiler, then we want the 32 bit unsigned value -1 to be
4166 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4167 The later confuses the sparc backend. */
4169 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4170 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4171 val |= ((HOST_WIDE_INT) (-1) << width);
4173 return GEN_INT (val);
4176 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4177 PLUS or MINUS.
4179 Rather than test for specific case, we do this by a brute-force method
4180 and do all possible simplifications until no more changes occur. Then
4181 we rebuild the operation. */
4183 static rtx
4184 simplify_plus_minus (code, mode, op0, op1)
4185 enum rtx_code code;
4186 enum machine_mode mode;
4187 rtx op0, op1;
4189 rtx ops[8];
4190 int negs[8];
4191 rtx result, tem;
4192 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4193 int first = 1, negate = 0, changed;
4194 int i, j;
4196 bzero ((char *) ops, sizeof ops);
4198 /* Set up the two operands and then expand them until nothing has been
4199 changed. If we run out of room in our array, give up; this should
4200 almost never happen. */
4202 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4204 changed = 1;
4205 while (changed)
4207 changed = 0;
4209 for (i = 0; i < n_ops; i++)
4210 switch (GET_CODE (ops[i]))
4212 case PLUS:
4213 case MINUS:
4214 if (n_ops == 7)
4215 return 0;
4217 ops[n_ops] = XEXP (ops[i], 1);
4218 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4219 ops[i] = XEXP (ops[i], 0);
4220 input_ops++;
4221 changed = 1;
4222 break;
4224 case NEG:
4225 ops[i] = XEXP (ops[i], 0);
4226 negs[i] = ! negs[i];
4227 changed = 1;
4228 break;
4230 case CONST:
4231 ops[i] = XEXP (ops[i], 0);
4232 input_consts++;
4233 changed = 1;
4234 break;
4236 case NOT:
4237 /* ~a -> (-a - 1) */
4238 if (n_ops != 7)
4240 ops[n_ops] = constm1_rtx;
4241 negs[n_ops++] = negs[i];
4242 ops[i] = XEXP (ops[i], 0);
4243 negs[i] = ! negs[i];
4244 changed = 1;
4246 break;
4248 case CONST_INT:
4249 if (negs[i])
4250 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4251 break;
4255 /* If we only have two operands, we can't do anything. */
4256 if (n_ops <= 2)
4257 return 0;
4259 /* Now simplify each pair of operands until nothing changes. The first
4260 time through just simplify constants against each other. */
4262 changed = 1;
4263 while (changed)
4265 changed = first;
4267 for (i = 0; i < n_ops - 1; i++)
4268 for (j = i + 1; j < n_ops; j++)
4269 if (ops[i] != 0 && ops[j] != 0
4270 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4272 rtx lhs = ops[i], rhs = ops[j];
4273 enum rtx_code ncode = PLUS;
4275 if (negs[i] && ! negs[j])
4276 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4277 else if (! negs[i] && negs[j])
4278 ncode = MINUS;
4280 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4281 if (tem)
4283 ops[i] = tem, ops[j] = 0;
4284 negs[i] = negs[i] && negs[j];
4285 if (GET_CODE (tem) == NEG)
4286 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4288 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4289 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4290 changed = 1;
4294 first = 0;
4297 /* Pack all the operands to the lower-numbered entries and give up if
4298 we didn't reduce the number of operands we had. Make sure we
4299 count a CONST as two operands. If we have the same number of
4300 operands, but have made more CONSTs than we had, this is also
4301 an improvement, so accept it. */
4303 for (i = 0, j = 0; j < n_ops; j++)
4304 if (ops[j] != 0)
4306 ops[i] = ops[j], negs[i++] = negs[j];
4307 if (GET_CODE (ops[j]) == CONST)
4308 n_consts++;
4311 if (i + n_consts > input_ops
4312 || (i + n_consts == input_ops && n_consts <= input_consts))
4313 return 0;
4315 n_ops = i;
4317 /* If we have a CONST_INT, put it last. */
4318 for (i = 0; i < n_ops - 1; i++)
4319 if (GET_CODE (ops[i]) == CONST_INT)
4321 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4322 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4325 /* Put a non-negated operand first. If there aren't any, make all
4326 operands positive and negate the whole thing later. */
4327 for (i = 0; i < n_ops && negs[i]; i++)
4330 if (i == n_ops)
4332 for (i = 0; i < n_ops; i++)
4333 negs[i] = 0;
4334 negate = 1;
4336 else if (i != 0)
4338 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4339 j = negs[0], negs[0] = negs[i], negs[i] = j;
4342 /* Now make the result by performing the requested operations. */
4343 result = ops[0];
4344 for (i = 1; i < n_ops; i++)
4345 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4347 return negate ? gen_rtx (NEG, mode, result) : result;
4350 /* Make a binary operation by properly ordering the operands and
4351 seeing if the expression folds. */
4353 static rtx
4354 cse_gen_binary (code, mode, op0, op1)
4355 enum rtx_code code;
4356 enum machine_mode mode;
4357 rtx op0, op1;
4359 rtx tem;
4361 /* Put complex operands first and constants second if commutative. */
4362 if (GET_RTX_CLASS (code) == 'c'
4363 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4364 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4365 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4366 || (GET_CODE (op0) == SUBREG
4367 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4368 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4369 tem = op0, op0 = op1, op1 = tem;
4371 /* If this simplifies, do it. */
4372 tem = simplify_binary_operation (code, mode, op0, op1);
4374 if (tem)
4375 return tem;
4377 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4378 just form the operation. */
4380 if (code == PLUS && GET_CODE (op1) == CONST_INT
4381 && GET_MODE (op0) != VOIDmode)
4382 return plus_constant (op0, INTVAL (op1));
4383 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4384 && GET_MODE (op0) != VOIDmode)
4385 return plus_constant (op0, - INTVAL (op1));
4386 else
4387 return gen_rtx (code, mode, op0, op1);
4390 /* Like simplify_binary_operation except used for relational operators.
4391 MODE is the mode of the operands, not that of the result. If MODE
4392 is VOIDmode, both operands must also be VOIDmode and we compare the
4393 operands in "infinite precision".
4395 If no simplification is possible, this function returns zero. Otherwise,
4396 it returns either const_true_rtx or const0_rtx. */
4399 simplify_relational_operation (code, mode, op0, op1)
4400 enum rtx_code code;
4401 enum machine_mode mode;
4402 rtx op0, op1;
4404 int equal, op0lt, op0ltu, op1lt, op1ltu;
4405 rtx tem;
4407 /* If op0 is a compare, extract the comparison arguments from it. */
4408 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4409 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4411 /* We can't simplify MODE_CC values since we don't know what the
4412 actual comparison is. */
4413 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4414 #ifdef HAVE_cc0
4415 || op0 == cc0_rtx
4416 #endif
4418 return 0;
4420 /* For integer comparisons of A and B maybe we can simplify A - B and can
4421 then simplify a comparison of that with zero. If A and B are both either
4422 a register or a CONST_INT, this can't help; testing for these cases will
4423 prevent infinite recursion here and speed things up.
4425 If CODE is an unsigned comparison, then we can never do this optimization,
4426 because it gives an incorrect result if the subtraction wraps around zero.
4427 ANSI C defines unsigned operations such that they never overflow, and
4428 thus such cases can not be ignored. */
4430 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4431 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4432 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4433 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4434 && code != GTU && code != GEU && code != LTU && code != LEU)
4435 return simplify_relational_operation (signed_condition (code),
4436 mode, tem, const0_rtx);
4438 /* For non-IEEE floating-point, if the two operands are equal, we know the
4439 result. */
4440 if (rtx_equal_p (op0, op1)
4441 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4442 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4443 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4445 /* If the operands are floating-point constants, see if we can fold
4446 the result. */
4447 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4448 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4449 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4451 REAL_VALUE_TYPE d0, d1;
4452 jmp_buf handler;
4454 if (setjmp (handler))
4455 return 0;
4457 set_float_handler (handler);
4458 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4459 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4460 equal = REAL_VALUES_EQUAL (d0, d1);
4461 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4462 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4463 set_float_handler (NULL_PTR);
4465 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4467 /* Otherwise, see if the operands are both integers. */
4468 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4469 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4470 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4472 int width = GET_MODE_BITSIZE (mode);
4473 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4474 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4476 /* Get the two words comprising each integer constant. */
4477 if (GET_CODE (op0) == CONST_DOUBLE)
4479 l0u = l0s = CONST_DOUBLE_LOW (op0);
4480 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4482 else
4484 l0u = l0s = INTVAL (op0);
4485 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4488 if (GET_CODE (op1) == CONST_DOUBLE)
4490 l1u = l1s = CONST_DOUBLE_LOW (op1);
4491 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4493 else
4495 l1u = l1s = INTVAL (op1);
4496 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4499 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4500 we have to sign or zero-extend the values. */
4501 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4502 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4504 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4506 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4507 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4509 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4510 l0s |= ((HOST_WIDE_INT) (-1) << width);
4512 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4513 l1s |= ((HOST_WIDE_INT) (-1) << width);
4516 equal = (h0u == h1u && l0u == l1u);
4517 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4518 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4519 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4520 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4523 /* Otherwise, there are some code-specific tests we can make. */
4524 else
4526 switch (code)
4528 case EQ:
4529 /* References to the frame plus a constant or labels cannot
4530 be zero, but a SYMBOL_REF can due to #pragma weak. */
4531 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4532 || GET_CODE (op0) == LABEL_REF)
4533 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4534 /* On some machines, the ap reg can be 0 sometimes. */
4535 && op0 != arg_pointer_rtx
4536 #endif
4538 return const0_rtx;
4539 break;
4541 case NE:
4542 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4543 || GET_CODE (op0) == LABEL_REF)
4544 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4545 && op0 != arg_pointer_rtx
4546 #endif
4548 return const_true_rtx;
4549 break;
4551 case GEU:
4552 /* Unsigned values are never negative. */
4553 if (op1 == const0_rtx)
4554 return const_true_rtx;
4555 break;
4557 case LTU:
4558 if (op1 == const0_rtx)
4559 return const0_rtx;
4560 break;
4562 case LEU:
4563 /* Unsigned values are never greater than the largest
4564 unsigned value. */
4565 if (GET_CODE (op1) == CONST_INT
4566 && INTVAL (op1) == GET_MODE_MASK (mode)
4567 && INTEGRAL_MODE_P (mode))
4568 return const_true_rtx;
4569 break;
4571 case GTU:
4572 if (GET_CODE (op1) == CONST_INT
4573 && INTVAL (op1) == GET_MODE_MASK (mode)
4574 && INTEGRAL_MODE_P (mode))
4575 return const0_rtx;
4576 break;
4579 return 0;
4582 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4583 as appropriate. */
4584 switch (code)
4586 case EQ:
4587 return equal ? const_true_rtx : const0_rtx;
4588 case NE:
4589 return ! equal ? const_true_rtx : const0_rtx;
4590 case LT:
4591 return op0lt ? const_true_rtx : const0_rtx;
4592 case GT:
4593 return op1lt ? const_true_rtx : const0_rtx;
4594 case LTU:
4595 return op0ltu ? const_true_rtx : const0_rtx;
4596 case GTU:
4597 return op1ltu ? const_true_rtx : const0_rtx;
4598 case LE:
4599 return equal || op0lt ? const_true_rtx : const0_rtx;
4600 case GE:
4601 return equal || op1lt ? const_true_rtx : const0_rtx;
4602 case LEU:
4603 return equal || op0ltu ? const_true_rtx : const0_rtx;
4604 case GEU:
4605 return equal || op1ltu ? const_true_rtx : const0_rtx;
4608 abort ();
4611 /* Simplify CODE, an operation with result mode MODE and three operands,
4612 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4613 a constant. Return 0 if no simplifications is possible. */
4616 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4617 enum rtx_code code;
4618 enum machine_mode mode, op0_mode;
4619 rtx op0, op1, op2;
4621 int width = GET_MODE_BITSIZE (mode);
4623 /* VOIDmode means "infinite" precision. */
4624 if (width == 0)
4625 width = HOST_BITS_PER_WIDE_INT;
4627 switch (code)
4629 case SIGN_EXTRACT:
4630 case ZERO_EXTRACT:
4631 if (GET_CODE (op0) == CONST_INT
4632 && GET_CODE (op1) == CONST_INT
4633 && GET_CODE (op2) == CONST_INT
4634 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4635 && width <= HOST_BITS_PER_WIDE_INT)
4637 /* Extracting a bit-field from a constant */
4638 HOST_WIDE_INT val = INTVAL (op0);
4640 if (BITS_BIG_ENDIAN)
4641 val >>= (GET_MODE_BITSIZE (op0_mode)
4642 - INTVAL (op2) - INTVAL (op1));
4643 else
4644 val >>= INTVAL (op2);
4646 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4648 /* First zero-extend. */
4649 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4650 /* If desired, propagate sign bit. */
4651 if (code == SIGN_EXTRACT
4652 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4653 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4656 /* Clear the bits that don't belong in our mode,
4657 unless they and our sign bit are all one.
4658 So we get either a reasonable negative value or a reasonable
4659 unsigned value for this mode. */
4660 if (width < HOST_BITS_PER_WIDE_INT
4661 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4662 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4663 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4665 return GEN_INT (val);
4667 break;
4669 case IF_THEN_ELSE:
4670 if (GET_CODE (op0) == CONST_INT)
4671 return op0 != const0_rtx ? op1 : op2;
4672 break;
4674 default:
4675 abort ();
4678 return 0;
4681 /* If X is a nontrivial arithmetic operation on an argument
4682 for which a constant value can be determined, return
4683 the result of operating on that value, as a constant.
4684 Otherwise, return X, possibly with one or more operands
4685 modified by recursive calls to this function.
4687 If X is a register whose contents are known, we do NOT
4688 return those contents here. equiv_constant is called to
4689 perform that task.
4691 INSN is the insn that we may be modifying. If it is 0, make a copy
4692 of X before modifying it. */
4694 static rtx
4695 fold_rtx (x, insn)
4696 rtx x;
4697 rtx insn;
4699 register enum rtx_code code;
4700 register enum machine_mode mode;
4701 register char *fmt;
4702 register int i;
4703 rtx new = 0;
4704 int copied = 0;
4705 int must_swap = 0;
4707 /* Folded equivalents of first two operands of X. */
4708 rtx folded_arg0;
4709 rtx folded_arg1;
4711 /* Constant equivalents of first three operands of X;
4712 0 when no such equivalent is known. */
4713 rtx const_arg0;
4714 rtx const_arg1;
4715 rtx const_arg2;
4717 /* The mode of the first operand of X. We need this for sign and zero
4718 extends. */
4719 enum machine_mode mode_arg0;
4721 if (x == 0)
4722 return x;
4724 mode = GET_MODE (x);
4725 code = GET_CODE (x);
4726 switch (code)
4728 case CONST:
4729 case CONST_INT:
4730 case CONST_DOUBLE:
4731 case SYMBOL_REF:
4732 case LABEL_REF:
4733 case REG:
4734 /* No use simplifying an EXPR_LIST
4735 since they are used only for lists of args
4736 in a function call's REG_EQUAL note. */
4737 case EXPR_LIST:
4738 return x;
4740 #ifdef HAVE_cc0
4741 case CC0:
4742 return prev_insn_cc0;
4743 #endif
4745 case PC:
4746 /* If the next insn is a CODE_LABEL followed by a jump table,
4747 PC's value is a LABEL_REF pointing to that label. That
4748 lets us fold switch statements on the Vax. */
4749 if (insn && GET_CODE (insn) == JUMP_INSN)
4751 rtx next = next_nonnote_insn (insn);
4753 if (next && GET_CODE (next) == CODE_LABEL
4754 && NEXT_INSN (next) != 0
4755 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4756 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4757 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4758 return gen_rtx (LABEL_REF, Pmode, next);
4760 break;
4762 case SUBREG:
4763 /* See if we previously assigned a constant value to this SUBREG. */
4764 if ((new = lookup_as_function (x, CONST_INT)) != 0
4765 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4766 return new;
4768 /* If this is a paradoxical SUBREG, we have no idea what value the
4769 extra bits would have. However, if the operand is equivalent
4770 to a SUBREG whose operand is the same as our mode, and all the
4771 modes are within a word, we can just use the inner operand
4772 because these SUBREGs just say how to treat the register.
4774 Similarly if we find an integer constant. */
4776 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4778 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4779 struct table_elt *elt;
4781 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4782 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4783 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4784 imode)) != 0)
4785 for (elt = elt->first_same_value;
4786 elt; elt = elt->next_same_value)
4788 if (CONSTANT_P (elt->exp)
4789 && GET_MODE (elt->exp) == VOIDmode)
4790 return elt->exp;
4792 if (GET_CODE (elt->exp) == SUBREG
4793 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4794 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4795 return copy_rtx (SUBREG_REG (elt->exp));
4798 return x;
4801 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4802 We might be able to if the SUBREG is extracting a single word in an
4803 integral mode or extracting the low part. */
4805 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4806 const_arg0 = equiv_constant (folded_arg0);
4807 if (const_arg0)
4808 folded_arg0 = const_arg0;
4810 if (folded_arg0 != SUBREG_REG (x))
4812 new = 0;
4814 if (GET_MODE_CLASS (mode) == MODE_INT
4815 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4816 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4817 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4818 GET_MODE (SUBREG_REG (x)));
4819 if (new == 0 && subreg_lowpart_p (x))
4820 new = gen_lowpart_if_possible (mode, folded_arg0);
4821 if (new)
4822 return new;
4825 /* If this is a narrowing SUBREG and our operand is a REG, see if
4826 we can find an equivalence for REG that is an arithmetic operation
4827 in a wider mode where both operands are paradoxical SUBREGs
4828 from objects of our result mode. In that case, we couldn't report
4829 an equivalent value for that operation, since we don't know what the
4830 extra bits will be. But we can find an equivalence for this SUBREG
4831 by folding that operation is the narrow mode. This allows us to
4832 fold arithmetic in narrow modes when the machine only supports
4833 word-sized arithmetic.
4835 Also look for a case where we have a SUBREG whose operand is the
4836 same as our result. If both modes are smaller than a word, we
4837 are simply interpreting a register in different modes and we
4838 can use the inner value. */
4840 if (GET_CODE (folded_arg0) == REG
4841 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4842 && subreg_lowpart_p (x))
4844 struct table_elt *elt;
4846 /* We can use HASH here since we know that canon_hash won't be
4847 called. */
4848 elt = lookup (folded_arg0,
4849 HASH (folded_arg0, GET_MODE (folded_arg0)),
4850 GET_MODE (folded_arg0));
4852 if (elt)
4853 elt = elt->first_same_value;
4855 for (; elt; elt = elt->next_same_value)
4857 enum rtx_code eltcode = GET_CODE (elt->exp);
4859 /* Just check for unary and binary operations. */
4860 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4861 && GET_CODE (elt->exp) != SIGN_EXTEND
4862 && GET_CODE (elt->exp) != ZERO_EXTEND
4863 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4864 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4866 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4868 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4869 op0 = fold_rtx (op0, NULL_RTX);
4871 op0 = equiv_constant (op0);
4872 if (op0)
4873 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4874 op0, mode);
4876 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4877 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4878 && eltcode != DIV && eltcode != MOD
4879 && eltcode != UDIV && eltcode != UMOD
4880 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4881 && eltcode != ROTATE && eltcode != ROTATERT
4882 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4883 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4884 == mode))
4885 || CONSTANT_P (XEXP (elt->exp, 0)))
4886 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4887 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4888 == mode))
4889 || CONSTANT_P (XEXP (elt->exp, 1))))
4891 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4892 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4894 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4895 op0 = fold_rtx (op0, NULL_RTX);
4897 if (op0)
4898 op0 = equiv_constant (op0);
4900 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4901 op1 = fold_rtx (op1, NULL_RTX);
4903 if (op1)
4904 op1 = equiv_constant (op1);
4906 /* If we are looking for the low SImode part of
4907 (ashift:DI c (const_int 32)), it doesn't work
4908 to compute that in SImode, because a 32-bit shift
4909 in SImode is unpredictable. We know the value is 0. */
4910 if (op0 && op1
4911 && GET_CODE (elt->exp) == ASHIFT
4912 && GET_CODE (op1) == CONST_INT
4913 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4915 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4917 /* If the count fits in the inner mode's width,
4918 but exceeds the outer mode's width,
4919 the value will get truncated to 0
4920 by the subreg. */
4921 new = const0_rtx;
4922 else
4923 /* If the count exceeds even the inner mode's width,
4924 don't fold this expression. */
4925 new = 0;
4927 else if (op0 && op1)
4928 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4929 op0, op1);
4932 else if (GET_CODE (elt->exp) == SUBREG
4933 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4934 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4935 <= UNITS_PER_WORD)
4936 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4937 new = copy_rtx (SUBREG_REG (elt->exp));
4939 if (new)
4940 return new;
4944 return x;
4946 case NOT:
4947 case NEG:
4948 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4949 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4950 new = lookup_as_function (XEXP (x, 0), code);
4951 if (new)
4952 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4953 break;
4955 case MEM:
4956 /* If we are not actually processing an insn, don't try to find the
4957 best address. Not only don't we care, but we could modify the
4958 MEM in an invalid way since we have no insn to validate against. */
4959 if (insn != 0)
4960 find_best_addr (insn, &XEXP (x, 0));
4963 /* Even if we don't fold in the insn itself,
4964 we can safely do so here, in hopes of getting a constant. */
4965 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4966 rtx base = 0;
4967 HOST_WIDE_INT offset = 0;
4969 if (GET_CODE (addr) == REG
4970 && REGNO_QTY_VALID_P (REGNO (addr))
4971 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4972 && qty_const[reg_qty[REGNO (addr)]] != 0)
4973 addr = qty_const[reg_qty[REGNO (addr)]];
4975 /* If address is constant, split it into a base and integer offset. */
4976 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4977 base = addr;
4978 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4979 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4981 base = XEXP (XEXP (addr, 0), 0);
4982 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4984 else if (GET_CODE (addr) == LO_SUM
4985 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4986 base = XEXP (addr, 1);
4988 /* If this is a constant pool reference, we can fold it into its
4989 constant to allow better value tracking. */
4990 if (base && GET_CODE (base) == SYMBOL_REF
4991 && CONSTANT_POOL_ADDRESS_P (base))
4993 rtx constant = get_pool_constant (base);
4994 enum machine_mode const_mode = get_pool_mode (base);
4995 rtx new;
4997 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4998 constant_pool_entries_cost = COST (constant);
5000 /* If we are loading the full constant, we have an equivalence. */
5001 if (offset == 0 && mode == const_mode)
5002 return constant;
5004 /* If this actually isn't a constant (weird!), we can't do
5005 anything. Otherwise, handle the two most common cases:
5006 extracting a word from a multi-word constant, and extracting
5007 the low-order bits. Other cases don't seem common enough to
5008 worry about. */
5009 if (! CONSTANT_P (constant))
5010 return x;
5012 if (GET_MODE_CLASS (mode) == MODE_INT
5013 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5014 && offset % UNITS_PER_WORD == 0
5015 && (new = operand_subword (constant,
5016 offset / UNITS_PER_WORD,
5017 0, const_mode)) != 0)
5018 return new;
5020 if (((BYTES_BIG_ENDIAN
5021 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5022 || (! BYTES_BIG_ENDIAN && offset == 0))
5023 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5024 return new;
5027 /* If this is a reference to a label at a known position in a jump
5028 table, we also know its value. */
5029 if (base && GET_CODE (base) == LABEL_REF)
5031 rtx label = XEXP (base, 0);
5032 rtx table_insn = NEXT_INSN (label);
5034 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5035 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5037 rtx table = PATTERN (table_insn);
5039 if (offset >= 0
5040 && (offset / GET_MODE_SIZE (GET_MODE (table))
5041 < XVECLEN (table, 0)))
5042 return XVECEXP (table, 0,
5043 offset / GET_MODE_SIZE (GET_MODE (table)));
5045 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5046 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5048 rtx table = PATTERN (table_insn);
5050 if (offset >= 0
5051 && (offset / GET_MODE_SIZE (GET_MODE (table))
5052 < XVECLEN (table, 1)))
5054 offset /= GET_MODE_SIZE (GET_MODE (table));
5055 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
5056 XEXP (table, 0));
5058 if (GET_MODE (table) != Pmode)
5059 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5061 /* Indicate this is a constant. This isn't a
5062 valid form of CONST, but it will only be used
5063 to fold the next insns and then discarded, so
5064 it should be safe. */
5065 return gen_rtx (CONST, GET_MODE (new), new);
5070 return x;
5073 case ASM_OPERANDS:
5074 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5075 validate_change (insn, &XVECEXP (x, 3, i),
5076 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5077 break;
5080 const_arg0 = 0;
5081 const_arg1 = 0;
5082 const_arg2 = 0;
5083 mode_arg0 = VOIDmode;
5085 /* Try folding our operands.
5086 Then see which ones have constant values known. */
5088 fmt = GET_RTX_FORMAT (code);
5089 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5090 if (fmt[i] == 'e')
5092 rtx arg = XEXP (x, i);
5093 rtx folded_arg = arg, const_arg = 0;
5094 enum machine_mode mode_arg = GET_MODE (arg);
5095 rtx cheap_arg, expensive_arg;
5096 rtx replacements[2];
5097 int j;
5099 /* Most arguments are cheap, so handle them specially. */
5100 switch (GET_CODE (arg))
5102 case REG:
5103 /* This is the same as calling equiv_constant; it is duplicated
5104 here for speed. */
5105 if (REGNO_QTY_VALID_P (REGNO (arg))
5106 && qty_const[reg_qty[REGNO (arg)]] != 0
5107 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5108 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5109 const_arg
5110 = gen_lowpart_if_possible (GET_MODE (arg),
5111 qty_const[reg_qty[REGNO (arg)]]);
5112 break;
5114 case CONST:
5115 case CONST_INT:
5116 case SYMBOL_REF:
5117 case LABEL_REF:
5118 case CONST_DOUBLE:
5119 const_arg = arg;
5120 break;
5122 #ifdef HAVE_cc0
5123 case CC0:
5124 folded_arg = prev_insn_cc0;
5125 mode_arg = prev_insn_cc0_mode;
5126 const_arg = equiv_constant (folded_arg);
5127 break;
5128 #endif
5130 default:
5131 folded_arg = fold_rtx (arg, insn);
5132 const_arg = equiv_constant (folded_arg);
5135 /* For the first three operands, see if the operand
5136 is constant or equivalent to a constant. */
5137 switch (i)
5139 case 0:
5140 folded_arg0 = folded_arg;
5141 const_arg0 = const_arg;
5142 mode_arg0 = mode_arg;
5143 break;
5144 case 1:
5145 folded_arg1 = folded_arg;
5146 const_arg1 = const_arg;
5147 break;
5148 case 2:
5149 const_arg2 = const_arg;
5150 break;
5153 /* Pick the least expensive of the folded argument and an
5154 equivalent constant argument. */
5155 if (const_arg == 0 || const_arg == folded_arg
5156 || COST (const_arg) > COST (folded_arg))
5157 cheap_arg = folded_arg, expensive_arg = const_arg;
5158 else
5159 cheap_arg = const_arg, expensive_arg = folded_arg;
5161 /* Try to replace the operand with the cheapest of the two
5162 possibilities. If it doesn't work and this is either of the first
5163 two operands of a commutative operation, try swapping them.
5164 If THAT fails, try the more expensive, provided it is cheaper
5165 than what is already there. */
5167 if (cheap_arg == XEXP (x, i))
5168 continue;
5170 if (insn == 0 && ! copied)
5172 x = copy_rtx (x);
5173 copied = 1;
5176 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5177 for (j = 0;
5178 j < 2 && replacements[j]
5179 && COST (replacements[j]) < COST (XEXP (x, i));
5180 j++)
5182 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5183 break;
5185 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5187 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5188 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5190 if (apply_change_group ())
5192 /* Swap them back to be invalid so that this loop can
5193 continue and flag them to be swapped back later. */
5194 rtx tem;
5196 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5197 XEXP (x, 1) = tem;
5198 must_swap = 1;
5199 break;
5205 else if (fmt[i] == 'E')
5206 /* Don't try to fold inside of a vector of expressions.
5207 Doing nothing is harmless. */
5210 /* If a commutative operation, place a constant integer as the second
5211 operand unless the first operand is also a constant integer. Otherwise,
5212 place any constant second unless the first operand is also a constant. */
5214 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5216 if (must_swap || (const_arg0
5217 && (const_arg1 == 0
5218 || (GET_CODE (const_arg0) == CONST_INT
5219 && GET_CODE (const_arg1) != CONST_INT))))
5221 register rtx tem = XEXP (x, 0);
5223 if (insn == 0 && ! copied)
5225 x = copy_rtx (x);
5226 copied = 1;
5229 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5230 validate_change (insn, &XEXP (x, 1), tem, 1);
5231 if (apply_change_group ())
5233 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5234 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5239 /* If X is an arithmetic operation, see if we can simplify it. */
5241 switch (GET_RTX_CLASS (code))
5243 case '1':
5245 int is_const = 0;
5247 /* We can't simplify extension ops unless we know the
5248 original mode. */
5249 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5250 && mode_arg0 == VOIDmode)
5251 break;
5253 /* If we had a CONST, strip it off and put it back later if we
5254 fold. */
5255 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5256 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5258 new = simplify_unary_operation (code, mode,
5259 const_arg0 ? const_arg0 : folded_arg0,
5260 mode_arg0);
5261 if (new != 0 && is_const)
5262 new = gen_rtx (CONST, mode, new);
5264 break;
5266 case '<':
5267 /* See what items are actually being compared and set FOLDED_ARG[01]
5268 to those values and CODE to the actual comparison code. If any are
5269 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5270 do anything if both operands are already known to be constant. */
5272 if (const_arg0 == 0 || const_arg1 == 0)
5274 struct table_elt *p0, *p1;
5275 rtx true = const_true_rtx, false = const0_rtx;
5276 enum machine_mode mode_arg1;
5278 #ifdef FLOAT_STORE_FLAG_VALUE
5279 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5281 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5282 mode);
5283 false = CONST0_RTX (mode);
5285 #endif
5287 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5288 &mode_arg0, &mode_arg1);
5289 const_arg0 = equiv_constant (folded_arg0);
5290 const_arg1 = equiv_constant (folded_arg1);
5292 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5293 what kinds of things are being compared, so we can't do
5294 anything with this comparison. */
5296 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5297 break;
5299 /* If we do not now have two constants being compared, see
5300 if we can nevertheless deduce some things about the
5301 comparison. */
5302 if (const_arg0 == 0 || const_arg1 == 0)
5304 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5305 non-explicit constant? These aren't zero, but we
5306 don't know their sign. */
5307 if (const_arg1 == const0_rtx
5308 && (NONZERO_BASE_PLUS_P (folded_arg0)
5309 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5310 come out as 0. */
5311 || GET_CODE (folded_arg0) == SYMBOL_REF
5312 #endif
5313 || GET_CODE (folded_arg0) == LABEL_REF
5314 || GET_CODE (folded_arg0) == CONST))
5316 if (code == EQ)
5317 return false;
5318 else if (code == NE)
5319 return true;
5322 /* See if the two operands are the same. We don't do this
5323 for IEEE floating-point since we can't assume x == x
5324 since x might be a NaN. */
5326 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5327 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5328 && (folded_arg0 == folded_arg1
5329 || (GET_CODE (folded_arg0) == REG
5330 && GET_CODE (folded_arg1) == REG
5331 && (reg_qty[REGNO (folded_arg0)]
5332 == reg_qty[REGNO (folded_arg1)]))
5333 || ((p0 = lookup (folded_arg0,
5334 (safe_hash (folded_arg0, mode_arg0)
5335 % NBUCKETS), mode_arg0))
5336 && (p1 = lookup (folded_arg1,
5337 (safe_hash (folded_arg1, mode_arg0)
5338 % NBUCKETS), mode_arg0))
5339 && p0->first_same_value == p1->first_same_value)))
5340 return ((code == EQ || code == LE || code == GE
5341 || code == LEU || code == GEU)
5342 ? true : false);
5344 /* If FOLDED_ARG0 is a register, see if the comparison we are
5345 doing now is either the same as we did before or the reverse
5346 (we only check the reverse if not floating-point). */
5347 else if (GET_CODE (folded_arg0) == REG)
5349 int qty = reg_qty[REGNO (folded_arg0)];
5351 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5352 && (comparison_dominates_p (qty_comparison_code[qty], code)
5353 || (comparison_dominates_p (qty_comparison_code[qty],
5354 reverse_condition (code))
5355 && ! FLOAT_MODE_P (mode_arg0)))
5356 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5357 || (const_arg1
5358 && rtx_equal_p (qty_comparison_const[qty],
5359 const_arg1))
5360 || (GET_CODE (folded_arg1) == REG
5361 && (reg_qty[REGNO (folded_arg1)]
5362 == qty_comparison_qty[qty]))))
5363 return (comparison_dominates_p (qty_comparison_code[qty],
5364 code)
5365 ? true : false);
5370 /* If we are comparing against zero, see if the first operand is
5371 equivalent to an IOR with a constant. If so, we may be able to
5372 determine the result of this comparison. */
5374 if (const_arg1 == const0_rtx)
5376 rtx y = lookup_as_function (folded_arg0, IOR);
5377 rtx inner_const;
5379 if (y != 0
5380 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5381 && GET_CODE (inner_const) == CONST_INT
5382 && INTVAL (inner_const) != 0)
5384 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5385 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5386 && (INTVAL (inner_const)
5387 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5388 rtx true = const_true_rtx, false = const0_rtx;
5390 #ifdef FLOAT_STORE_FLAG_VALUE
5391 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5393 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5394 mode);
5395 false = CONST0_RTX (mode);
5397 #endif
5399 switch (code)
5401 case EQ:
5402 return false;
5403 case NE:
5404 return true;
5405 case LT: case LE:
5406 if (has_sign)
5407 return true;
5408 break;
5409 case GT: case GE:
5410 if (has_sign)
5411 return false;
5412 break;
5417 new = simplify_relational_operation (code, mode_arg0,
5418 const_arg0 ? const_arg0 : folded_arg0,
5419 const_arg1 ? const_arg1 : folded_arg1);
5420 #ifdef FLOAT_STORE_FLAG_VALUE
5421 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5422 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5423 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5424 #endif
5425 break;
5427 case '2':
5428 case 'c':
5429 switch (code)
5431 case PLUS:
5432 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5433 with that LABEL_REF as its second operand. If so, the result is
5434 the first operand of that MINUS. This handles switches with an
5435 ADDR_DIFF_VEC table. */
5436 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5438 rtx y
5439 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5440 : lookup_as_function (folded_arg0, MINUS);
5442 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5443 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5444 return XEXP (y, 0);
5446 /* Now try for a CONST of a MINUS like the above. */
5447 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5448 : lookup_as_function (folded_arg0, CONST))) != 0
5449 && GET_CODE (XEXP (y, 0)) == MINUS
5450 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5451 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5452 return XEXP (XEXP (y, 0), 0);
5455 /* Likewise if the operands are in the other order. */
5456 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5458 rtx y
5459 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5460 : lookup_as_function (folded_arg1, MINUS);
5462 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5463 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5464 return XEXP (y, 0);
5466 /* Now try for a CONST of a MINUS like the above. */
5467 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5468 : lookup_as_function (folded_arg1, CONST))) != 0
5469 && GET_CODE (XEXP (y, 0)) == MINUS
5470 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5471 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5472 return XEXP (XEXP (y, 0), 0);
5475 /* If second operand is a register equivalent to a negative
5476 CONST_INT, see if we can find a register equivalent to the
5477 positive constant. Make a MINUS if so. Don't do this for
5478 a non-negative constant since we might then alternate between
5479 chosing positive and negative constants. Having the positive
5480 constant previously-used is the more common case. Be sure
5481 the resulting constant is non-negative; if const_arg1 were
5482 the smallest negative number this would overflow: depending
5483 on the mode, this would either just be the same value (and
5484 hence not save anything) or be incorrect. */
5485 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5486 && INTVAL (const_arg1) < 0
5487 && - INTVAL (const_arg1) >= 0
5488 && GET_CODE (folded_arg1) == REG)
5490 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5491 struct table_elt *p
5492 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5493 mode);
5495 if (p)
5496 for (p = p->first_same_value; p; p = p->next_same_value)
5497 if (GET_CODE (p->exp) == REG)
5498 return cse_gen_binary (MINUS, mode, folded_arg0,
5499 canon_reg (p->exp, NULL_RTX));
5501 goto from_plus;
5503 case MINUS:
5504 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5505 If so, produce (PLUS Z C2-C). */
5506 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5508 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5509 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5510 return fold_rtx (plus_constant (copy_rtx (y),
5511 -INTVAL (const_arg1)),
5512 NULL_RTX);
5515 /* ... fall through ... */
5517 from_plus:
5518 case SMIN: case SMAX: case UMIN: case UMAX:
5519 case IOR: case AND: case XOR:
5520 case MULT: case DIV: case UDIV:
5521 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5522 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5523 is known to be of similar form, we may be able to replace the
5524 operation with a combined operation. This may eliminate the
5525 intermediate operation if every use is simplified in this way.
5526 Note that the similar optimization done by combine.c only works
5527 if the intermediate operation's result has only one reference. */
5529 if (GET_CODE (folded_arg0) == REG
5530 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5532 int is_shift
5533 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5534 rtx y = lookup_as_function (folded_arg0, code);
5535 rtx inner_const;
5536 enum rtx_code associate_code;
5537 rtx new_const;
5539 if (y == 0
5540 || 0 == (inner_const
5541 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5542 || GET_CODE (inner_const) != CONST_INT
5543 /* If we have compiled a statement like
5544 "if (x == (x & mask1))", and now are looking at
5545 "x & mask2", we will have a case where the first operand
5546 of Y is the same as our first operand. Unless we detect
5547 this case, an infinite loop will result. */
5548 || XEXP (y, 0) == folded_arg0)
5549 break;
5551 /* Don't associate these operations if they are a PLUS with the
5552 same constant and it is a power of two. These might be doable
5553 with a pre- or post-increment. Similarly for two subtracts of
5554 identical powers of two with post decrement. */
5556 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5557 && (0
5558 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5559 || exact_log2 (INTVAL (const_arg1)) >= 0
5560 #endif
5561 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5562 || exact_log2 (- INTVAL (const_arg1)) >= 0
5563 #endif
5565 break;
5567 /* Compute the code used to compose the constants. For example,
5568 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5570 associate_code
5571 = (code == MULT || code == DIV || code == UDIV ? MULT
5572 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5574 new_const = simplify_binary_operation (associate_code, mode,
5575 const_arg1, inner_const);
5577 if (new_const == 0)
5578 break;
5580 /* If we are associating shift operations, don't let this
5581 produce a shift of the size of the object or larger.
5582 This could occur when we follow a sign-extend by a right
5583 shift on a machine that does a sign-extend as a pair
5584 of shifts. */
5586 if (is_shift && GET_CODE (new_const) == CONST_INT
5587 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5589 /* As an exception, we can turn an ASHIFTRT of this
5590 form into a shift of the number of bits - 1. */
5591 if (code == ASHIFTRT)
5592 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5593 else
5594 break;
5597 y = copy_rtx (XEXP (y, 0));
5599 /* If Y contains our first operand (the most common way this
5600 can happen is if Y is a MEM), we would do into an infinite
5601 loop if we tried to fold it. So don't in that case. */
5603 if (! reg_mentioned_p (folded_arg0, y))
5604 y = fold_rtx (y, insn);
5606 return cse_gen_binary (code, mode, y, new_const);
5610 new = simplify_binary_operation (code, mode,
5611 const_arg0 ? const_arg0 : folded_arg0,
5612 const_arg1 ? const_arg1 : folded_arg1);
5613 break;
5615 case 'o':
5616 /* (lo_sum (high X) X) is simply X. */
5617 if (code == LO_SUM && const_arg0 != 0
5618 && GET_CODE (const_arg0) == HIGH
5619 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5620 return const_arg1;
5621 break;
5623 case '3':
5624 case 'b':
5625 new = simplify_ternary_operation (code, mode, mode_arg0,
5626 const_arg0 ? const_arg0 : folded_arg0,
5627 const_arg1 ? const_arg1 : folded_arg1,
5628 const_arg2 ? const_arg2 : XEXP (x, 2));
5629 break;
5632 return new ? new : x;
5635 /* Return a constant value currently equivalent to X.
5636 Return 0 if we don't know one. */
5638 static rtx
5639 equiv_constant (x)
5640 rtx x;
5642 if (GET_CODE (x) == REG
5643 && REGNO_QTY_VALID_P (REGNO (x))
5644 && qty_const[reg_qty[REGNO (x)]])
5645 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5647 if (x != 0 && CONSTANT_P (x))
5648 return x;
5650 /* If X is a MEM, try to fold it outside the context of any insn to see if
5651 it might be equivalent to a constant. That handles the case where it
5652 is a constant-pool reference. Then try to look it up in the hash table
5653 in case it is something whose value we have seen before. */
5655 if (GET_CODE (x) == MEM)
5657 struct table_elt *elt;
5659 x = fold_rtx (x, NULL_RTX);
5660 if (CONSTANT_P (x))
5661 return x;
5663 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5664 if (elt == 0)
5665 return 0;
5667 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5668 if (elt->is_const && CONSTANT_P (elt->exp))
5669 return elt->exp;
5672 return 0;
5675 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5676 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5677 least-significant part of X.
5678 MODE specifies how big a part of X to return.
5680 If the requested operation cannot be done, 0 is returned.
5682 This is similar to gen_lowpart in emit-rtl.c. */
5685 gen_lowpart_if_possible (mode, x)
5686 enum machine_mode mode;
5687 register rtx x;
5689 rtx result = gen_lowpart_common (mode, x);
5691 if (result)
5692 return result;
5693 else if (GET_CODE (x) == MEM)
5695 /* This is the only other case we handle. */
5696 register int offset = 0;
5697 rtx new;
5699 if (WORDS_BIG_ENDIAN)
5700 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5701 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5702 if (BYTES_BIG_ENDIAN)
5703 /* Adjust the address so that the address-after-the-data is
5704 unchanged. */
5705 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5706 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5707 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5708 if (! memory_address_p (mode, XEXP (new, 0)))
5709 return 0;
5710 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5711 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5712 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5713 return new;
5715 else
5716 return 0;
5719 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5720 branch. It will be zero if not.
5722 In certain cases, this can cause us to add an equivalence. For example,
5723 if we are following the taken case of
5724 if (i == 2)
5725 we can add the fact that `i' and '2' are now equivalent.
5727 In any case, we can record that this comparison was passed. If the same
5728 comparison is seen later, we will know its value. */
5730 static void
5731 record_jump_equiv (insn, taken)
5732 rtx insn;
5733 int taken;
5735 int cond_known_true;
5736 rtx op0, op1;
5737 enum machine_mode mode, mode0, mode1;
5738 int reversed_nonequality = 0;
5739 enum rtx_code code;
5741 /* Ensure this is the right kind of insn. */
5742 if (! condjump_p (insn) || simplejump_p (insn))
5743 return;
5745 /* See if this jump condition is known true or false. */
5746 if (taken)
5747 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5748 else
5749 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5751 /* Get the type of comparison being done and the operands being compared.
5752 If we had to reverse a non-equality condition, record that fact so we
5753 know that it isn't valid for floating-point. */
5754 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5755 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5756 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5758 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5759 if (! cond_known_true)
5761 reversed_nonequality = (code != EQ && code != NE);
5762 code = reverse_condition (code);
5765 /* The mode is the mode of the non-constant. */
5766 mode = mode0;
5767 if (mode1 != VOIDmode)
5768 mode = mode1;
5770 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5773 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5774 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5775 Make any useful entries we can with that information. Called from
5776 above function and called recursively. */
5778 static void
5779 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5780 enum rtx_code code;
5781 enum machine_mode mode;
5782 rtx op0, op1;
5783 int reversed_nonequality;
5785 unsigned op0_hash, op1_hash;
5786 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5787 struct table_elt *op0_elt, *op1_elt;
5789 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5790 we know that they are also equal in the smaller mode (this is also
5791 true for all smaller modes whether or not there is a SUBREG, but
5792 is not worth testing for with no SUBREG. */
5794 /* Note that GET_MODE (op0) may not equal MODE. */
5795 if (code == EQ && GET_CODE (op0) == SUBREG
5796 && (GET_MODE_SIZE (GET_MODE (op0))
5797 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5799 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5800 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5802 record_jump_cond (code, mode, SUBREG_REG (op0),
5803 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5804 reversed_nonequality);
5807 if (code == EQ && GET_CODE (op1) == SUBREG
5808 && (GET_MODE_SIZE (GET_MODE (op1))
5809 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5811 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5812 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5814 record_jump_cond (code, mode, SUBREG_REG (op1),
5815 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5816 reversed_nonequality);
5819 /* Similarly, if this is an NE comparison, and either is a SUBREG
5820 making a smaller mode, we know the whole thing is also NE. */
5822 /* Note that GET_MODE (op0) may not equal MODE;
5823 if we test MODE instead, we can get an infinite recursion
5824 alternating between two modes each wider than MODE. */
5826 if (code == NE && GET_CODE (op0) == SUBREG
5827 && subreg_lowpart_p (op0)
5828 && (GET_MODE_SIZE (GET_MODE (op0))
5829 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5831 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5832 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5834 record_jump_cond (code, mode, SUBREG_REG (op0),
5835 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5836 reversed_nonequality);
5839 if (code == NE && GET_CODE (op1) == SUBREG
5840 && subreg_lowpart_p (op1)
5841 && (GET_MODE_SIZE (GET_MODE (op1))
5842 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5844 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5845 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5847 record_jump_cond (code, mode, SUBREG_REG (op1),
5848 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5849 reversed_nonequality);
5852 /* Hash both operands. */
5854 do_not_record = 0;
5855 hash_arg_in_memory = 0;
5856 hash_arg_in_struct = 0;
5857 op0_hash = HASH (op0, mode);
5858 op0_in_memory = hash_arg_in_memory;
5859 op0_in_struct = hash_arg_in_struct;
5861 if (do_not_record)
5862 return;
5864 do_not_record = 0;
5865 hash_arg_in_memory = 0;
5866 hash_arg_in_struct = 0;
5867 op1_hash = HASH (op1, mode);
5868 op1_in_memory = hash_arg_in_memory;
5869 op1_in_struct = hash_arg_in_struct;
5871 if (do_not_record)
5872 return;
5874 /* Look up both operands. */
5875 op0_elt = lookup (op0, op0_hash, mode);
5876 op1_elt = lookup (op1, op1_hash, mode);
5878 /* If both operands are already equivalent or if they are not in the
5879 table but are identical, do nothing. */
5880 if ((op0_elt != 0 && op1_elt != 0
5881 && op0_elt->first_same_value == op1_elt->first_same_value)
5882 || op0 == op1 || rtx_equal_p (op0, op1))
5883 return;
5885 /* If we aren't setting two things equal all we can do is save this
5886 comparison. Similarly if this is floating-point. In the latter
5887 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5888 If we record the equality, we might inadvertently delete code
5889 whose intent was to change -0 to +0. */
5891 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5893 /* If we reversed a floating-point comparison, if OP0 is not a
5894 register, or if OP1 is neither a register or constant, we can't
5895 do anything. */
5897 if (GET_CODE (op1) != REG)
5898 op1 = equiv_constant (op1);
5900 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5901 || GET_CODE (op0) != REG || op1 == 0)
5902 return;
5904 /* Put OP0 in the hash table if it isn't already. This gives it a
5905 new quantity number. */
5906 if (op0_elt == 0)
5908 if (insert_regs (op0, NULL_PTR, 0))
5910 rehash_using_reg (op0);
5911 op0_hash = HASH (op0, mode);
5913 /* If OP0 is contained in OP1, this changes its hash code
5914 as well. Faster to rehash than to check, except
5915 for the simple case of a constant. */
5916 if (! CONSTANT_P (op1))
5917 op1_hash = HASH (op1,mode);
5920 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5921 op0_elt->in_memory = op0_in_memory;
5922 op0_elt->in_struct = op0_in_struct;
5925 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5926 if (GET_CODE (op1) == REG)
5928 /* Look it up again--in case op0 and op1 are the same. */
5929 op1_elt = lookup (op1, op1_hash, mode);
5931 /* Put OP1 in the hash table so it gets a new quantity number. */
5932 if (op1_elt == 0)
5934 if (insert_regs (op1, NULL_PTR, 0))
5936 rehash_using_reg (op1);
5937 op1_hash = HASH (op1, mode);
5940 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5941 op1_elt->in_memory = op1_in_memory;
5942 op1_elt->in_struct = op1_in_struct;
5945 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5946 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5948 else
5950 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5951 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5954 return;
5957 /* If either side is still missing an equivalence, make it now,
5958 then merge the equivalences. */
5960 if (op0_elt == 0)
5962 if (insert_regs (op0, NULL_PTR, 0))
5964 rehash_using_reg (op0);
5965 op0_hash = HASH (op0, mode);
5968 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
5969 op0_elt->in_memory = op0_in_memory;
5970 op0_elt->in_struct = op0_in_struct;
5973 if (op1_elt == 0)
5975 if (insert_regs (op1, NULL_PTR, 0))
5977 rehash_using_reg (op1);
5978 op1_hash = HASH (op1, mode);
5981 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
5982 op1_elt->in_memory = op1_in_memory;
5983 op1_elt->in_struct = op1_in_struct;
5986 merge_equiv_classes (op0_elt, op1_elt);
5987 last_jump_equiv_class = op0_elt;
5990 /* CSE processing for one instruction.
5991 First simplify sources and addresses of all assignments
5992 in the instruction, using previously-computed equivalents values.
5993 Then install the new sources and destinations in the table
5994 of available values.
5996 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5997 the insn. */
5999 /* Data on one SET contained in the instruction. */
6001 struct set
6003 /* The SET rtx itself. */
6004 rtx rtl;
6005 /* The SET_SRC of the rtx (the original value, if it is changing). */
6006 rtx src;
6007 /* The hash-table element for the SET_SRC of the SET. */
6008 struct table_elt *src_elt;
6009 /* Hash value for the SET_SRC. */
6010 unsigned src_hash;
6011 /* Hash value for the SET_DEST. */
6012 unsigned dest_hash;
6013 /* The SET_DEST, with SUBREG, etc., stripped. */
6014 rtx inner_dest;
6015 /* Place where the pointer to the INNER_DEST was found. */
6016 rtx *inner_dest_loc;
6017 /* Nonzero if the SET_SRC is in memory. */
6018 char src_in_memory;
6019 /* Nonzero if the SET_SRC is in a structure. */
6020 char src_in_struct;
6021 /* Nonzero if the SET_SRC contains something
6022 whose value cannot be predicted and understood. */
6023 char src_volatile;
6024 /* Original machine mode, in case it becomes a CONST_INT. */
6025 enum machine_mode mode;
6026 /* A constant equivalent for SET_SRC, if any. */
6027 rtx src_const;
6028 /* Hash value of constant equivalent for SET_SRC. */
6029 unsigned src_const_hash;
6030 /* Table entry for constant equivalent for SET_SRC, if any. */
6031 struct table_elt *src_const_elt;
6034 static void
6035 cse_insn (insn, in_libcall_block)
6036 rtx insn;
6037 int in_libcall_block;
6039 register rtx x = PATTERN (insn);
6040 register int i;
6041 rtx tem;
6042 register int n_sets = 0;
6044 /* Records what this insn does to set CC0. */
6045 rtx this_insn_cc0 = 0;
6046 enum machine_mode this_insn_cc0_mode;
6048 rtx src_eqv = 0;
6049 struct table_elt *src_eqv_elt = 0;
6050 int src_eqv_volatile;
6051 int src_eqv_in_memory;
6052 int src_eqv_in_struct;
6053 unsigned src_eqv_hash;
6055 struct set *sets;
6057 this_insn = insn;
6059 /* Find all the SETs and CLOBBERs in this instruction.
6060 Record all the SETs in the array `set' and count them.
6061 Also determine whether there is a CLOBBER that invalidates
6062 all memory references, or all references at varying addresses. */
6064 if (GET_CODE (insn) == CALL_INSN)
6066 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6067 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6068 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6071 if (GET_CODE (x) == SET)
6073 sets = (struct set *) alloca (sizeof (struct set));
6074 sets[0].rtl = x;
6076 /* Ignore SETs that are unconditional jumps.
6077 They never need cse processing, so this does not hurt.
6078 The reason is not efficiency but rather
6079 so that we can test at the end for instructions
6080 that have been simplified to unconditional jumps
6081 and not be misled by unchanged instructions
6082 that were unconditional jumps to begin with. */
6083 if (SET_DEST (x) == pc_rtx
6084 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6087 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6088 The hard function value register is used only once, to copy to
6089 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6090 Ensure we invalidate the destination register. On the 80386 no
6091 other code would invalidate it since it is a fixed_reg.
6092 We need not check the return of apply_change_group; see canon_reg. */
6094 else if (GET_CODE (SET_SRC (x)) == CALL)
6096 canon_reg (SET_SRC (x), insn);
6097 apply_change_group ();
6098 fold_rtx (SET_SRC (x), insn);
6099 invalidate (SET_DEST (x), VOIDmode);
6101 else
6102 n_sets = 1;
6104 else if (GET_CODE (x) == PARALLEL)
6106 register int lim = XVECLEN (x, 0);
6108 sets = (struct set *) alloca (lim * sizeof (struct set));
6110 /* Find all regs explicitly clobbered in this insn,
6111 and ensure they are not replaced with any other regs
6112 elsewhere in this insn.
6113 When a reg that is clobbered is also used for input,
6114 we should presume that that is for a reason,
6115 and we should not substitute some other register
6116 which is not supposed to be clobbered.
6117 Therefore, this loop cannot be merged into the one below
6118 because a CALL may precede a CLOBBER and refer to the
6119 value clobbered. We must not let a canonicalization do
6120 anything in that case. */
6121 for (i = 0; i < lim; i++)
6123 register rtx y = XVECEXP (x, 0, i);
6124 if (GET_CODE (y) == CLOBBER)
6126 rtx clobbered = XEXP (y, 0);
6128 if (GET_CODE (clobbered) == REG
6129 || GET_CODE (clobbered) == SUBREG)
6130 invalidate (clobbered, VOIDmode);
6131 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6132 || GET_CODE (clobbered) == ZERO_EXTRACT)
6133 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6137 for (i = 0; i < lim; i++)
6139 register rtx y = XVECEXP (x, 0, i);
6140 if (GET_CODE (y) == SET)
6142 /* As above, we ignore unconditional jumps and call-insns and
6143 ignore the result of apply_change_group. */
6144 if (GET_CODE (SET_SRC (y)) == CALL)
6146 canon_reg (SET_SRC (y), insn);
6147 apply_change_group ();
6148 fold_rtx (SET_SRC (y), insn);
6149 invalidate (SET_DEST (y), VOIDmode);
6151 else if (SET_DEST (y) == pc_rtx
6152 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6154 else
6155 sets[n_sets++].rtl = y;
6157 else if (GET_CODE (y) == CLOBBER)
6159 /* If we clobber memory, canon the address.
6160 This does nothing when a register is clobbered
6161 because we have already invalidated the reg. */
6162 if (GET_CODE (XEXP (y, 0)) == MEM)
6163 canon_reg (XEXP (y, 0), NULL_RTX);
6165 else if (GET_CODE (y) == USE
6166 && ! (GET_CODE (XEXP (y, 0)) == REG
6167 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6168 canon_reg (y, NULL_RTX);
6169 else if (GET_CODE (y) == CALL)
6171 /* The result of apply_change_group can be ignored; see
6172 canon_reg. */
6173 canon_reg (y, insn);
6174 apply_change_group ();
6175 fold_rtx (y, insn);
6179 else if (GET_CODE (x) == CLOBBER)
6181 if (GET_CODE (XEXP (x, 0)) == MEM)
6182 canon_reg (XEXP (x, 0), NULL_RTX);
6185 /* Canonicalize a USE of a pseudo register or memory location. */
6186 else if (GET_CODE (x) == USE
6187 && ! (GET_CODE (XEXP (x, 0)) == REG
6188 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6189 canon_reg (XEXP (x, 0), NULL_RTX);
6190 else if (GET_CODE (x) == CALL)
6192 /* The result of apply_change_group can be ignored; see canon_reg. */
6193 canon_reg (x, insn);
6194 apply_change_group ();
6195 fold_rtx (x, insn);
6198 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6199 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6200 is handled specially for this case, and if it isn't set, then there will
6201 be no equivalence for the destination. */
6202 if (n_sets == 1 && REG_NOTES (insn) != 0
6203 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6204 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6205 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6206 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6208 /* Canonicalize sources and addresses of destinations.
6209 We do this in a separate pass to avoid problems when a MATCH_DUP is
6210 present in the insn pattern. In that case, we want to ensure that
6211 we don't break the duplicate nature of the pattern. So we will replace
6212 both operands at the same time. Otherwise, we would fail to find an
6213 equivalent substitution in the loop calling validate_change below.
6215 We used to suppress canonicalization of DEST if it appears in SRC,
6216 but we don't do this any more. */
6218 for (i = 0; i < n_sets; i++)
6220 rtx dest = SET_DEST (sets[i].rtl);
6221 rtx src = SET_SRC (sets[i].rtl);
6222 rtx new = canon_reg (src, insn);
6223 int insn_code;
6225 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6226 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6227 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6228 || (insn_code = recog_memoized (insn)) < 0
6229 || insn_n_dups[insn_code] > 0)
6230 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6231 else
6232 SET_SRC (sets[i].rtl) = new;
6234 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6236 validate_change (insn, &XEXP (dest, 1),
6237 canon_reg (XEXP (dest, 1), insn), 1);
6238 validate_change (insn, &XEXP (dest, 2),
6239 canon_reg (XEXP (dest, 2), insn), 1);
6242 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6243 || GET_CODE (dest) == ZERO_EXTRACT
6244 || GET_CODE (dest) == SIGN_EXTRACT)
6245 dest = XEXP (dest, 0);
6247 if (GET_CODE (dest) == MEM)
6248 canon_reg (dest, insn);
6251 /* Now that we have done all the replacements, we can apply the change
6252 group and see if they all work. Note that this will cause some
6253 canonicalizations that would have worked individually not to be applied
6254 because some other canonicalization didn't work, but this should not
6255 occur often.
6257 The result of apply_change_group can be ignored; see canon_reg. */
6259 apply_change_group ();
6261 /* Set sets[i].src_elt to the class each source belongs to.
6262 Detect assignments from or to volatile things
6263 and set set[i] to zero so they will be ignored
6264 in the rest of this function.
6266 Nothing in this loop changes the hash table or the register chains. */
6268 for (i = 0; i < n_sets; i++)
6270 register rtx src, dest;
6271 register rtx src_folded;
6272 register struct table_elt *elt = 0, *p;
6273 enum machine_mode mode;
6274 rtx src_eqv_here;
6275 rtx src_const = 0;
6276 rtx src_related = 0;
6277 struct table_elt *src_const_elt = 0;
6278 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6279 int src_related_cost = 10000, src_elt_cost = 10000;
6280 /* Set non-zero if we need to call force_const_mem on with the
6281 contents of src_folded before using it. */
6282 int src_folded_force_flag = 0;
6284 dest = SET_DEST (sets[i].rtl);
6285 src = SET_SRC (sets[i].rtl);
6287 /* If SRC is a constant that has no machine mode,
6288 hash it with the destination's machine mode.
6289 This way we can keep different modes separate. */
6291 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6292 sets[i].mode = mode;
6294 if (src_eqv)
6296 enum machine_mode eqvmode = mode;
6297 if (GET_CODE (dest) == STRICT_LOW_PART)
6298 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6299 do_not_record = 0;
6300 hash_arg_in_memory = 0;
6301 hash_arg_in_struct = 0;
6302 src_eqv = fold_rtx (src_eqv, insn);
6303 src_eqv_hash = HASH (src_eqv, eqvmode);
6305 /* Find the equivalence class for the equivalent expression. */
6307 if (!do_not_record)
6308 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6310 src_eqv_volatile = do_not_record;
6311 src_eqv_in_memory = hash_arg_in_memory;
6312 src_eqv_in_struct = hash_arg_in_struct;
6315 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6316 value of the INNER register, not the destination. So it is not
6317 a valid substitution for the source. But save it for later. */
6318 if (GET_CODE (dest) == STRICT_LOW_PART)
6319 src_eqv_here = 0;
6320 else
6321 src_eqv_here = src_eqv;
6323 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6324 simplified result, which may not necessarily be valid. */
6325 src_folded = fold_rtx (src, insn);
6327 #if 0
6328 /* ??? This caused bad code to be generated for the m68k port with -O2.
6329 Suppose src is (CONST_INT -1), and that after truncation src_folded
6330 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6331 At the end we will add src and src_const to the same equivalence
6332 class. We now have 3 and -1 on the same equivalence class. This
6333 causes later instructions to be mis-optimized. */
6334 /* If storing a constant in a bitfield, pre-truncate the constant
6335 so we will be able to record it later. */
6336 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6337 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6339 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6341 if (GET_CODE (src) == CONST_INT
6342 && GET_CODE (width) == CONST_INT
6343 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6344 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6345 src_folded
6346 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6347 << INTVAL (width)) - 1));
6349 #endif
6351 /* Compute SRC's hash code, and also notice if it
6352 should not be recorded at all. In that case,
6353 prevent any further processing of this assignment. */
6354 do_not_record = 0;
6355 hash_arg_in_memory = 0;
6356 hash_arg_in_struct = 0;
6358 sets[i].src = src;
6359 sets[i].src_hash = HASH (src, mode);
6360 sets[i].src_volatile = do_not_record;
6361 sets[i].src_in_memory = hash_arg_in_memory;
6362 sets[i].src_in_struct = hash_arg_in_struct;
6364 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6365 a pseudo that is set more than once, do not record SRC. Using
6366 SRC as a replacement for anything else will be incorrect in that
6367 situation. Note that this usually occurs only for stack slots,
6368 in which case all the RTL would be refering to SRC, so we don't
6369 lose any optimization opportunities by not having SRC in the
6370 hash table. */
6372 if (GET_CODE (src) == MEM
6373 && find_reg_note (insn, REG_EQUIV, src) != 0
6374 && GET_CODE (dest) == REG
6375 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6376 && REG_N_SETS (REGNO (dest)) != 1)
6377 sets[i].src_volatile = 1;
6379 #if 0
6380 /* It is no longer clear why we used to do this, but it doesn't
6381 appear to still be needed. So let's try without it since this
6382 code hurts cse'ing widened ops. */
6383 /* If source is a perverse subreg (such as QI treated as an SI),
6384 treat it as volatile. It may do the work of an SI in one context
6385 where the extra bits are not being used, but cannot replace an SI
6386 in general. */
6387 if (GET_CODE (src) == SUBREG
6388 && (GET_MODE_SIZE (GET_MODE (src))
6389 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6390 sets[i].src_volatile = 1;
6391 #endif
6393 /* Locate all possible equivalent forms for SRC. Try to replace
6394 SRC in the insn with each cheaper equivalent.
6396 We have the following types of equivalents: SRC itself, a folded
6397 version, a value given in a REG_EQUAL note, or a value related
6398 to a constant.
6400 Each of these equivalents may be part of an additional class
6401 of equivalents (if more than one is in the table, they must be in
6402 the same class; we check for this).
6404 If the source is volatile, we don't do any table lookups.
6406 We note any constant equivalent for possible later use in a
6407 REG_NOTE. */
6409 if (!sets[i].src_volatile)
6410 elt = lookup (src, sets[i].src_hash, mode);
6412 sets[i].src_elt = elt;
6414 if (elt && src_eqv_here && src_eqv_elt)
6416 if (elt->first_same_value != src_eqv_elt->first_same_value)
6418 /* The REG_EQUAL is indicating that two formerly distinct
6419 classes are now equivalent. So merge them. */
6420 merge_equiv_classes (elt, src_eqv_elt);
6421 src_eqv_hash = HASH (src_eqv, elt->mode);
6422 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6425 src_eqv_here = 0;
6428 else if (src_eqv_elt)
6429 elt = src_eqv_elt;
6431 /* Try to find a constant somewhere and record it in `src_const'.
6432 Record its table element, if any, in `src_const_elt'. Look in
6433 any known equivalences first. (If the constant is not in the
6434 table, also set `sets[i].src_const_hash'). */
6435 if (elt)
6436 for (p = elt->first_same_value; p; p = p->next_same_value)
6437 if (p->is_const)
6439 src_const = p->exp;
6440 src_const_elt = elt;
6441 break;
6444 if (src_const == 0
6445 && (CONSTANT_P (src_folded)
6446 /* Consider (minus (label_ref L1) (label_ref L2)) as
6447 "constant" here so we will record it. This allows us
6448 to fold switch statements when an ADDR_DIFF_VEC is used. */
6449 || (GET_CODE (src_folded) == MINUS
6450 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6451 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6452 src_const = src_folded, src_const_elt = elt;
6453 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6454 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6456 /* If we don't know if the constant is in the table, get its
6457 hash code and look it up. */
6458 if (src_const && src_const_elt == 0)
6460 sets[i].src_const_hash = HASH (src_const, mode);
6461 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6464 sets[i].src_const = src_const;
6465 sets[i].src_const_elt = src_const_elt;
6467 /* If the constant and our source are both in the table, mark them as
6468 equivalent. Otherwise, if a constant is in the table but the source
6469 isn't, set ELT to it. */
6470 if (src_const_elt && elt
6471 && src_const_elt->first_same_value != elt->first_same_value)
6472 merge_equiv_classes (elt, src_const_elt);
6473 else if (src_const_elt && elt == 0)
6474 elt = src_const_elt;
6476 /* See if there is a register linearly related to a constant
6477 equivalent of SRC. */
6478 if (src_const
6479 && (GET_CODE (src_const) == CONST
6480 || (src_const_elt && src_const_elt->related_value != 0)))
6482 src_related = use_related_value (src_const, src_const_elt);
6483 if (src_related)
6485 struct table_elt *src_related_elt
6486 = lookup (src_related, HASH (src_related, mode), mode);
6487 if (src_related_elt && elt)
6489 if (elt->first_same_value
6490 != src_related_elt->first_same_value)
6491 /* This can occur when we previously saw a CONST
6492 involving a SYMBOL_REF and then see the SYMBOL_REF
6493 twice. Merge the involved classes. */
6494 merge_equiv_classes (elt, src_related_elt);
6496 src_related = 0;
6497 src_related_elt = 0;
6499 else if (src_related_elt && elt == 0)
6500 elt = src_related_elt;
6504 /* See if we have a CONST_INT that is already in a register in a
6505 wider mode. */
6507 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6508 && GET_MODE_CLASS (mode) == MODE_INT
6509 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6511 enum machine_mode wider_mode;
6513 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6514 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6515 && src_related == 0;
6516 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6518 struct table_elt *const_elt
6519 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6521 if (const_elt == 0)
6522 continue;
6524 for (const_elt = const_elt->first_same_value;
6525 const_elt; const_elt = const_elt->next_same_value)
6526 if (GET_CODE (const_elt->exp) == REG)
6528 src_related = gen_lowpart_if_possible (mode,
6529 const_elt->exp);
6530 break;
6535 /* Another possibility is that we have an AND with a constant in
6536 a mode narrower than a word. If so, it might have been generated
6537 as part of an "if" which would narrow the AND. If we already
6538 have done the AND in a wider mode, we can use a SUBREG of that
6539 value. */
6541 if (flag_expensive_optimizations && ! src_related
6542 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6543 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6545 enum machine_mode tmode;
6546 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6548 for (tmode = GET_MODE_WIDER_MODE (mode);
6549 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6550 tmode = GET_MODE_WIDER_MODE (tmode))
6552 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6553 struct table_elt *larger_elt;
6555 if (inner)
6557 PUT_MODE (new_and, tmode);
6558 XEXP (new_and, 0) = inner;
6559 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6560 if (larger_elt == 0)
6561 continue;
6563 for (larger_elt = larger_elt->first_same_value;
6564 larger_elt; larger_elt = larger_elt->next_same_value)
6565 if (GET_CODE (larger_elt->exp) == REG)
6567 src_related
6568 = gen_lowpart_if_possible (mode, larger_elt->exp);
6569 break;
6572 if (src_related)
6573 break;
6578 #ifdef LOAD_EXTEND_OP
6579 /* See if a MEM has already been loaded with a widening operation;
6580 if it has, we can use a subreg of that. Many CISC machines
6581 also have such operations, but this is only likely to be
6582 beneficial these machines. */
6584 if (flag_expensive_optimizations && src_related == 0
6585 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6586 && GET_MODE_CLASS (mode) == MODE_INT
6587 && GET_CODE (src) == MEM && ! do_not_record
6588 && LOAD_EXTEND_OP (mode) != NIL)
6590 enum machine_mode tmode;
6592 /* Set what we are trying to extend and the operation it might
6593 have been extended with. */
6594 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6595 XEXP (memory_extend_rtx, 0) = src;
6597 for (tmode = GET_MODE_WIDER_MODE (mode);
6598 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6599 tmode = GET_MODE_WIDER_MODE (tmode))
6601 struct table_elt *larger_elt;
6603 PUT_MODE (memory_extend_rtx, tmode);
6604 larger_elt = lookup (memory_extend_rtx,
6605 HASH (memory_extend_rtx, tmode), tmode);
6606 if (larger_elt == 0)
6607 continue;
6609 for (larger_elt = larger_elt->first_same_value;
6610 larger_elt; larger_elt = larger_elt->next_same_value)
6611 if (GET_CODE (larger_elt->exp) == REG)
6613 src_related = gen_lowpart_if_possible (mode,
6614 larger_elt->exp);
6615 break;
6618 if (src_related)
6619 break;
6622 #endif /* LOAD_EXTEND_OP */
6624 if (src == src_folded)
6625 src_folded = 0;
6627 /* At this point, ELT, if non-zero, points to a class of expressions
6628 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6629 and SRC_RELATED, if non-zero, each contain additional equivalent
6630 expressions. Prune these latter expressions by deleting expressions
6631 already in the equivalence class.
6633 Check for an equivalent identical to the destination. If found,
6634 this is the preferred equivalent since it will likely lead to
6635 elimination of the insn. Indicate this by placing it in
6636 `src_related'. */
6638 if (elt) elt = elt->first_same_value;
6639 for (p = elt; p; p = p->next_same_value)
6641 enum rtx_code code = GET_CODE (p->exp);
6643 /* If the expression is not valid, ignore it. Then we do not
6644 have to check for validity below. In most cases, we can use
6645 `rtx_equal_p', since canonicalization has already been done. */
6646 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6647 continue;
6649 /* Also skip paradoxical subregs, unless that's what we're
6650 looking for. */
6651 if (code == SUBREG
6652 && (GET_MODE_SIZE (GET_MODE (p->exp))
6653 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6654 && ! (src != 0
6655 && GET_CODE (src) == SUBREG
6656 && GET_MODE (src) == GET_MODE (p->exp)
6657 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6658 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6659 continue;
6661 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6662 src = 0;
6663 else if (src_folded && GET_CODE (src_folded) == code
6664 && rtx_equal_p (src_folded, p->exp))
6665 src_folded = 0;
6666 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6667 && rtx_equal_p (src_eqv_here, p->exp))
6668 src_eqv_here = 0;
6669 else if (src_related && GET_CODE (src_related) == code
6670 && rtx_equal_p (src_related, p->exp))
6671 src_related = 0;
6673 /* This is the same as the destination of the insns, we want
6674 to prefer it. Copy it to src_related. The code below will
6675 then give it a negative cost. */
6676 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6677 src_related = dest;
6681 /* Find the cheapest valid equivalent, trying all the available
6682 possibilities. Prefer items not in the hash table to ones
6683 that are when they are equal cost. Note that we can never
6684 worsen an insn as the current contents will also succeed.
6685 If we find an equivalent identical to the destination, use it as best,
6686 since this insn will probably be eliminated in that case. */
6687 if (src)
6689 if (rtx_equal_p (src, dest))
6690 src_cost = -1;
6691 else
6692 src_cost = COST (src);
6695 if (src_eqv_here)
6697 if (rtx_equal_p (src_eqv_here, dest))
6698 src_eqv_cost = -1;
6699 else
6700 src_eqv_cost = COST (src_eqv_here);
6703 if (src_folded)
6705 if (rtx_equal_p (src_folded, dest))
6706 src_folded_cost = -1;
6707 else
6708 src_folded_cost = COST (src_folded);
6711 if (src_related)
6713 if (rtx_equal_p (src_related, dest))
6714 src_related_cost = -1;
6715 else
6716 src_related_cost = COST (src_related);
6719 /* If this was an indirect jump insn, a known label will really be
6720 cheaper even though it looks more expensive. */
6721 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6722 src_folded = src_const, src_folded_cost = -1;
6724 /* Terminate loop when replacement made. This must terminate since
6725 the current contents will be tested and will always be valid. */
6726 while (1)
6728 rtx trial;
6730 /* Skip invalid entries. */
6731 while (elt && GET_CODE (elt->exp) != REG
6732 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6733 elt = elt->next_same_value;
6735 /* A paradoxical subreg would be bad here: it'll be the right
6736 size, but later may be adjusted so that the upper bits aren't
6737 what we want. So reject it. */
6738 if (elt != 0
6739 && GET_CODE (elt->exp) == SUBREG
6740 && (GET_MODE_SIZE (GET_MODE (elt->exp))
6741 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6742 /* It is okay, though, if the rtx we're trying to match
6743 will ignore any of the bits we can't predict. */
6744 && ! (src != 0
6745 && GET_CODE (src) == SUBREG
6746 && GET_MODE (src) == GET_MODE (elt->exp)
6747 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6748 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6750 elt = elt->next_same_value;
6751 continue;
6754 if (elt) src_elt_cost = elt->cost;
6756 /* Find cheapest and skip it for the next time. For items
6757 of equal cost, use this order:
6758 src_folded, src, src_eqv, src_related and hash table entry. */
6759 if (src_folded_cost <= src_cost
6760 && src_folded_cost <= src_eqv_cost
6761 && src_folded_cost <= src_related_cost
6762 && src_folded_cost <= src_elt_cost)
6764 trial = src_folded, src_folded_cost = 10000;
6765 if (src_folded_force_flag)
6766 trial = force_const_mem (mode, trial);
6768 else if (src_cost <= src_eqv_cost
6769 && src_cost <= src_related_cost
6770 && src_cost <= src_elt_cost)
6771 trial = src, src_cost = 10000;
6772 else if (src_eqv_cost <= src_related_cost
6773 && src_eqv_cost <= src_elt_cost)
6774 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6775 else if (src_related_cost <= src_elt_cost)
6776 trial = copy_rtx (src_related), src_related_cost = 10000;
6777 else
6779 trial = copy_rtx (elt->exp);
6780 elt = elt->next_same_value;
6781 src_elt_cost = 10000;
6784 /* We don't normally have an insn matching (set (pc) (pc)), so
6785 check for this separately here. We will delete such an
6786 insn below.
6788 Tablejump insns contain a USE of the table, so simply replacing
6789 the operand with the constant won't match. This is simply an
6790 unconditional branch, however, and is therefore valid. Just
6791 insert the substitution here and we will delete and re-emit
6792 the insn later. */
6794 if (n_sets == 1 && dest == pc_rtx
6795 && (trial == pc_rtx
6796 || (GET_CODE (trial) == LABEL_REF
6797 && ! condjump_p (insn))))
6799 /* If TRIAL is a label in front of a jump table, we are
6800 really falling through the switch (this is how casesi
6801 insns work), so we must branch around the table. */
6802 if (GET_CODE (trial) == CODE_LABEL
6803 && NEXT_INSN (trial) != 0
6804 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6805 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6806 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6808 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6810 SET_SRC (sets[i].rtl) = trial;
6811 cse_jumps_altered = 1;
6812 break;
6815 /* Look for a substitution that makes a valid insn. */
6816 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6818 /* The result of apply_change_group can be ignored; see
6819 canon_reg. */
6821 validate_change (insn, &SET_SRC (sets[i].rtl),
6822 canon_reg (SET_SRC (sets[i].rtl), insn),
6824 apply_change_group ();
6825 break;
6828 /* If we previously found constant pool entries for
6829 constants and this is a constant, try making a
6830 pool entry. Put it in src_folded unless we already have done
6831 this since that is where it likely came from. */
6833 else if (constant_pool_entries_cost
6834 && CONSTANT_P (trial)
6835 && ! (GET_CODE (trial) == CONST
6836 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6837 && (src_folded == 0
6838 || (GET_CODE (src_folded) != MEM
6839 && ! src_folded_force_flag))
6840 && GET_MODE_CLASS (mode) != MODE_CC
6841 && mode != VOIDmode)
6843 src_folded_force_flag = 1;
6844 src_folded = trial;
6845 src_folded_cost = constant_pool_entries_cost;
6849 src = SET_SRC (sets[i].rtl);
6851 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6852 However, there is an important exception: If both are registers
6853 that are not the head of their equivalence class, replace SET_SRC
6854 with the head of the class. If we do not do this, we will have
6855 both registers live over a portion of the basic block. This way,
6856 their lifetimes will likely abut instead of overlapping. */
6857 if (GET_CODE (dest) == REG
6858 && REGNO_QTY_VALID_P (REGNO (dest))
6859 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6860 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6861 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6862 /* Don't do this if the original insn had a hard reg as
6863 SET_SRC. */
6864 && (GET_CODE (sets[i].src) != REG
6865 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6866 /* We can't call canon_reg here because it won't do anything if
6867 SRC is a hard register. */
6869 int first = qty_first_reg[reg_qty[REGNO (src)]];
6871 src = SET_SRC (sets[i].rtl)
6872 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6873 : gen_rtx (REG, GET_MODE (src), first);
6875 /* If we had a constant that is cheaper than what we are now
6876 setting SRC to, use that constant. We ignored it when we
6877 thought we could make this into a no-op. */
6878 if (src_const && COST (src_const) < COST (src)
6879 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6880 src = src_const;
6883 /* If we made a change, recompute SRC values. */
6884 if (src != sets[i].src)
6886 do_not_record = 0;
6887 hash_arg_in_memory = 0;
6888 hash_arg_in_struct = 0;
6889 sets[i].src = src;
6890 sets[i].src_hash = HASH (src, mode);
6891 sets[i].src_volatile = do_not_record;
6892 sets[i].src_in_memory = hash_arg_in_memory;
6893 sets[i].src_in_struct = hash_arg_in_struct;
6894 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6897 /* If this is a single SET, we are setting a register, and we have an
6898 equivalent constant, we want to add a REG_NOTE. We don't want
6899 to write a REG_EQUAL note for a constant pseudo since verifying that
6900 that pseudo hasn't been eliminated is a pain. Such a note also
6901 won't help anything. */
6902 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6903 && GET_CODE (src_const) != REG)
6905 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6907 /* Record the actual constant value in a REG_EQUAL note, making
6908 a new one if one does not already exist. */
6909 if (tem)
6910 XEXP (tem, 0) = src_const;
6911 else
6912 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6913 src_const, REG_NOTES (insn));
6915 /* If storing a constant value in a register that
6916 previously held the constant value 0,
6917 record this fact with a REG_WAS_0 note on this insn.
6919 Note that the *register* is required to have previously held 0,
6920 not just any register in the quantity and we must point to the
6921 insn that set that register to zero.
6923 Rather than track each register individually, we just see if
6924 the last set for this quantity was for this register. */
6926 if (REGNO_QTY_VALID_P (REGNO (dest))
6927 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6929 /* See if we previously had a REG_WAS_0 note. */
6930 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6931 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6933 if ((tem = single_set (const_insn)) != 0
6934 && rtx_equal_p (SET_DEST (tem), dest))
6936 if (note)
6937 XEXP (note, 0) = const_insn;
6938 else
6939 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6940 const_insn, REG_NOTES (insn));
6945 /* Now deal with the destination. */
6946 do_not_record = 0;
6947 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6949 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6950 to the MEM or REG within it. */
6951 while (GET_CODE (dest) == SIGN_EXTRACT
6952 || GET_CODE (dest) == ZERO_EXTRACT
6953 || GET_CODE (dest) == SUBREG
6954 || GET_CODE (dest) == STRICT_LOW_PART)
6956 sets[i].inner_dest_loc = &XEXP (dest, 0);
6957 dest = XEXP (dest, 0);
6960 sets[i].inner_dest = dest;
6962 if (GET_CODE (dest) == MEM)
6964 #ifdef PUSH_ROUNDING
6965 /* Stack pushes invalidate the stack pointer. */
6966 rtx addr = XEXP (dest, 0);
6967 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6968 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6969 && XEXP (addr, 0) == stack_pointer_rtx)
6970 invalidate (stack_pointer_rtx, Pmode);
6971 #endif
6972 dest = fold_rtx (dest, insn);
6975 /* Compute the hash code of the destination now,
6976 before the effects of this instruction are recorded,
6977 since the register values used in the address computation
6978 are those before this instruction. */
6979 sets[i].dest_hash = HASH (dest, mode);
6981 /* Don't enter a bit-field in the hash table
6982 because the value in it after the store
6983 may not equal what was stored, due to truncation. */
6985 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6986 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6988 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6990 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6991 && GET_CODE (width) == CONST_INT
6992 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6993 && ! (INTVAL (src_const)
6994 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6995 /* Exception: if the value is constant,
6996 and it won't be truncated, record it. */
6998 else
7000 /* This is chosen so that the destination will be invalidated
7001 but no new value will be recorded.
7002 We must invalidate because sometimes constant
7003 values can be recorded for bitfields. */
7004 sets[i].src_elt = 0;
7005 sets[i].src_volatile = 1;
7006 src_eqv = 0;
7007 src_eqv_elt = 0;
7011 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7012 the insn. */
7013 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7015 PUT_CODE (insn, NOTE);
7016 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7017 NOTE_SOURCE_FILE (insn) = 0;
7018 cse_jumps_altered = 1;
7019 /* One less use of the label this insn used to jump to. */
7020 --LABEL_NUSES (JUMP_LABEL (insn));
7021 /* No more processing for this set. */
7022 sets[i].rtl = 0;
7025 /* If this SET is now setting PC to a label, we know it used to
7026 be a conditional or computed branch. So we see if we can follow
7027 it. If it was a computed branch, delete it and re-emit. */
7028 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7030 rtx p;
7032 /* If this is not in the format for a simple branch and
7033 we are the only SET in it, re-emit it. */
7034 if (! simplejump_p (insn) && n_sets == 1)
7036 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7037 JUMP_LABEL (new) = XEXP (src, 0);
7038 LABEL_NUSES (XEXP (src, 0))++;
7039 delete_insn (insn);
7040 insn = new;
7042 else
7043 /* Otherwise, force rerecognition, since it probably had
7044 a different pattern before.
7045 This shouldn't really be necessary, since whatever
7046 changed the source value above should have done this.
7047 Until the right place is found, might as well do this here. */
7048 INSN_CODE (insn) = -1;
7050 /* Now that we've converted this jump to an unconditional jump,
7051 there is dead code after it. Delete the dead code until we
7052 reach a BARRIER, the end of the function, or a label. Do
7053 not delete NOTEs except for NOTE_INSN_DELETED since later
7054 phases assume these notes are retained. */
7056 p = insn;
7058 while (NEXT_INSN (p) != 0
7059 && GET_CODE (NEXT_INSN (p)) != BARRIER
7060 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7062 if (GET_CODE (NEXT_INSN (p)) != NOTE
7063 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7064 delete_insn (NEXT_INSN (p));
7065 else
7066 p = NEXT_INSN (p);
7069 /* If we don't have a BARRIER immediately after INSN, put one there.
7070 Much code assumes that there are no NOTEs between a JUMP_INSN and
7071 BARRIER. */
7073 if (NEXT_INSN (insn) == 0
7074 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7075 emit_barrier_before (NEXT_INSN (insn));
7077 /* We might have two BARRIERs separated by notes. Delete the second
7078 one if so. */
7080 if (p != insn && NEXT_INSN (p) != 0
7081 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7082 delete_insn (NEXT_INSN (p));
7084 cse_jumps_altered = 1;
7085 sets[i].rtl = 0;
7088 /* If destination is volatile, invalidate it and then do no further
7089 processing for this assignment. */
7091 else if (do_not_record)
7093 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7094 || GET_CODE (dest) == MEM)
7095 invalidate (dest, VOIDmode);
7096 else if (GET_CODE (dest) == STRICT_LOW_PART
7097 || GET_CODE (dest) == ZERO_EXTRACT)
7098 invalidate (XEXP (dest, 0), GET_MODE (dest));
7099 sets[i].rtl = 0;
7102 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7103 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7105 #ifdef HAVE_cc0
7106 /* If setting CC0, record what it was set to, or a constant, if it
7107 is equivalent to a constant. If it is being set to a floating-point
7108 value, make a COMPARE with the appropriate constant of 0. If we
7109 don't do this, later code can interpret this as a test against
7110 const0_rtx, which can cause problems if we try to put it into an
7111 insn as a floating-point operand. */
7112 if (dest == cc0_rtx)
7114 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7115 this_insn_cc0_mode = mode;
7116 if (FLOAT_MODE_P (mode))
7117 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
7118 CONST0_RTX (mode));
7120 #endif
7123 /* Now enter all non-volatile source expressions in the hash table
7124 if they are not already present.
7125 Record their equivalence classes in src_elt.
7126 This way we can insert the corresponding destinations into
7127 the same classes even if the actual sources are no longer in them
7128 (having been invalidated). */
7130 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7131 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7133 register struct table_elt *elt;
7134 register struct table_elt *classp = sets[0].src_elt;
7135 rtx dest = SET_DEST (sets[0].rtl);
7136 enum machine_mode eqvmode = GET_MODE (dest);
7138 if (GET_CODE (dest) == STRICT_LOW_PART)
7140 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7141 classp = 0;
7143 if (insert_regs (src_eqv, classp, 0))
7145 rehash_using_reg (src_eqv);
7146 src_eqv_hash = HASH (src_eqv, eqvmode);
7148 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7149 elt->in_memory = src_eqv_in_memory;
7150 elt->in_struct = src_eqv_in_struct;
7151 src_eqv_elt = elt;
7153 /* Check to see if src_eqv_elt is the same as a set source which
7154 does not yet have an elt, and if so set the elt of the set source
7155 to src_eqv_elt. */
7156 for (i = 0; i < n_sets; i++)
7157 if (sets[i].rtl && sets[i].src_elt == 0
7158 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7159 sets[i].src_elt = src_eqv_elt;
7162 for (i = 0; i < n_sets; i++)
7163 if (sets[i].rtl && ! sets[i].src_volatile
7164 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7166 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7168 /* REG_EQUAL in setting a STRICT_LOW_PART
7169 gives an equivalent for the entire destination register,
7170 not just for the subreg being stored in now.
7171 This is a more interesting equivalence, so we arrange later
7172 to treat the entire reg as the destination. */
7173 sets[i].src_elt = src_eqv_elt;
7174 sets[i].src_hash = src_eqv_hash;
7176 else
7178 /* Insert source and constant equivalent into hash table, if not
7179 already present. */
7180 register struct table_elt *classp = src_eqv_elt;
7181 register rtx src = sets[i].src;
7182 register rtx dest = SET_DEST (sets[i].rtl);
7183 enum machine_mode mode
7184 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7186 if (sets[i].src_elt == 0)
7188 register struct table_elt *elt;
7190 /* Note that these insert_regs calls cannot remove
7191 any of the src_elt's, because they would have failed to
7192 match if not still valid. */
7193 if (insert_regs (src, classp, 0))
7195 rehash_using_reg (src);
7196 sets[i].src_hash = HASH (src, mode);
7198 elt = insert (src, classp, sets[i].src_hash, mode);
7199 elt->in_memory = sets[i].src_in_memory;
7200 elt->in_struct = sets[i].src_in_struct;
7201 sets[i].src_elt = classp = elt;
7204 if (sets[i].src_const && sets[i].src_const_elt == 0
7205 && src != sets[i].src_const
7206 && ! rtx_equal_p (sets[i].src_const, src))
7207 sets[i].src_elt = insert (sets[i].src_const, classp,
7208 sets[i].src_const_hash, mode);
7211 else if (sets[i].src_elt == 0)
7212 /* If we did not insert the source into the hash table (e.g., it was
7213 volatile), note the equivalence class for the REG_EQUAL value, if any,
7214 so that the destination goes into that class. */
7215 sets[i].src_elt = src_eqv_elt;
7217 invalidate_from_clobbers (x);
7219 /* Some registers are invalidated by subroutine calls. Memory is
7220 invalidated by non-constant calls. */
7222 if (GET_CODE (insn) == CALL_INSN)
7224 if (! CONST_CALL_P (insn))
7225 invalidate_memory ();
7226 invalidate_for_call ();
7229 /* Now invalidate everything set by this instruction.
7230 If a SUBREG or other funny destination is being set,
7231 sets[i].rtl is still nonzero, so here we invalidate the reg
7232 a part of which is being set. */
7234 for (i = 0; i < n_sets; i++)
7235 if (sets[i].rtl)
7237 /* We can't use the inner dest, because the mode associated with
7238 a ZERO_EXTRACT is significant. */
7239 register rtx dest = SET_DEST (sets[i].rtl);
7241 /* Needed for registers to remove the register from its
7242 previous quantity's chain.
7243 Needed for memory if this is a nonvarying address, unless
7244 we have just done an invalidate_memory that covers even those. */
7245 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7246 || GET_CODE (dest) == MEM)
7247 invalidate (dest, VOIDmode);
7248 else if (GET_CODE (dest) == STRICT_LOW_PART
7249 || GET_CODE (dest) == ZERO_EXTRACT)
7250 invalidate (XEXP (dest, 0), GET_MODE (dest));
7253 /* Make sure registers mentioned in destinations
7254 are safe for use in an expression to be inserted.
7255 This removes from the hash table
7256 any invalid entry that refers to one of these registers.
7258 We don't care about the return value from mention_regs because
7259 we are going to hash the SET_DEST values unconditionally. */
7261 for (i = 0; i < n_sets; i++)
7262 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7263 mention_regs (SET_DEST (sets[i].rtl));
7265 /* We may have just removed some of the src_elt's from the hash table.
7266 So replace each one with the current head of the same class. */
7268 for (i = 0; i < n_sets; i++)
7269 if (sets[i].rtl)
7271 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7272 /* If elt was removed, find current head of same class,
7273 or 0 if nothing remains of that class. */
7275 register struct table_elt *elt = sets[i].src_elt;
7277 while (elt && elt->prev_same_value)
7278 elt = elt->prev_same_value;
7280 while (elt && elt->first_same_value == 0)
7281 elt = elt->next_same_value;
7282 sets[i].src_elt = elt ? elt->first_same_value : 0;
7286 /* Now insert the destinations into their equivalence classes. */
7288 for (i = 0; i < n_sets; i++)
7289 if (sets[i].rtl)
7291 register rtx dest = SET_DEST (sets[i].rtl);
7292 register struct table_elt *elt;
7294 /* Don't record value if we are not supposed to risk allocating
7295 floating-point values in registers that might be wider than
7296 memory. */
7297 if ((flag_float_store
7298 && GET_CODE (dest) == MEM
7299 && FLOAT_MODE_P (GET_MODE (dest)))
7300 /* Don't record values of destinations set inside a libcall block
7301 since we might delete the libcall. Things should have been set
7302 up so we won't want to reuse such a value, but we play it safe
7303 here. */
7304 || in_libcall_block
7305 /* If we didn't put a REG_EQUAL value or a source into the hash
7306 table, there is no point is recording DEST. */
7307 || sets[i].src_elt == 0
7308 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7309 or SIGN_EXTEND, don't record DEST since it can cause
7310 some tracking to be wrong.
7312 ??? Think about this more later. */
7313 || (GET_CODE (dest) == SUBREG
7314 && (GET_MODE_SIZE (GET_MODE (dest))
7315 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7316 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7317 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7318 continue;
7320 /* STRICT_LOW_PART isn't part of the value BEING set,
7321 and neither is the SUBREG inside it.
7322 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7323 if (GET_CODE (dest) == STRICT_LOW_PART)
7324 dest = SUBREG_REG (XEXP (dest, 0));
7326 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7327 /* Registers must also be inserted into chains for quantities. */
7328 if (insert_regs (dest, sets[i].src_elt, 1))
7330 /* If `insert_regs' changes something, the hash code must be
7331 recalculated. */
7332 rehash_using_reg (dest);
7333 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7336 elt = insert (dest, sets[i].src_elt,
7337 sets[i].dest_hash, GET_MODE (dest));
7338 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7339 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7340 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7341 0))));
7343 if (elt->in_memory)
7345 /* This implicitly assumes a whole struct
7346 need not have MEM_IN_STRUCT_P.
7347 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7348 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7349 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7352 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7353 narrower than M2, and both M1 and M2 are the same number of words,
7354 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7355 make that equivalence as well.
7357 However, BAR may have equivalences for which gen_lowpart_if_possible
7358 will produce a simpler value than gen_lowpart_if_possible applied to
7359 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7360 BAR's equivalences. If we don't get a simplified form, make
7361 the SUBREG. It will not be used in an equivalence, but will
7362 cause two similar assignments to be detected.
7364 Note the loop below will find SUBREG_REG (DEST) since we have
7365 already entered SRC and DEST of the SET in the table. */
7367 if (GET_CODE (dest) == SUBREG
7368 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7369 / UNITS_PER_WORD)
7370 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7371 && (GET_MODE_SIZE (GET_MODE (dest))
7372 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7373 && sets[i].src_elt != 0)
7375 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7376 struct table_elt *elt, *classp = 0;
7378 for (elt = sets[i].src_elt->first_same_value; elt;
7379 elt = elt->next_same_value)
7381 rtx new_src = 0;
7382 unsigned src_hash;
7383 struct table_elt *src_elt;
7385 /* Ignore invalid entries. */
7386 if (GET_CODE (elt->exp) != REG
7387 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7388 continue;
7390 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7391 if (new_src == 0)
7392 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7394 src_hash = HASH (new_src, new_mode);
7395 src_elt = lookup (new_src, src_hash, new_mode);
7397 /* Put the new source in the hash table is if isn't
7398 already. */
7399 if (src_elt == 0)
7401 if (insert_regs (new_src, classp, 0))
7403 rehash_using_reg (new_src);
7404 src_hash = HASH (new_src, new_mode);
7406 src_elt = insert (new_src, classp, src_hash, new_mode);
7407 src_elt->in_memory = elt->in_memory;
7408 src_elt->in_struct = elt->in_struct;
7410 else if (classp && classp != src_elt->first_same_value)
7411 /* Show that two things that we've seen before are
7412 actually the same. */
7413 merge_equiv_classes (src_elt, classp);
7415 classp = src_elt->first_same_value;
7420 /* Special handling for (set REG0 REG1)
7421 where REG0 is the "cheapest", cheaper than REG1.
7422 After cse, REG1 will probably not be used in the sequel,
7423 so (if easily done) change this insn to (set REG1 REG0) and
7424 replace REG1 with REG0 in the previous insn that computed their value.
7425 Then REG1 will become a dead store and won't cloud the situation
7426 for later optimizations.
7428 Do not make this change if REG1 is a hard register, because it will
7429 then be used in the sequel and we may be changing a two-operand insn
7430 into a three-operand insn.
7432 Also do not do this if we are operating on a copy of INSN. */
7434 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7435 && NEXT_INSN (PREV_INSN (insn)) == insn
7436 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7437 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7438 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7439 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7440 == REGNO (SET_DEST (sets[0].rtl))))
7442 rtx prev = PREV_INSN (insn);
7443 while (prev && GET_CODE (prev) == NOTE)
7444 prev = PREV_INSN (prev);
7446 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7447 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7449 rtx dest = SET_DEST (sets[0].rtl);
7450 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7452 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7453 validate_change (insn, & SET_DEST (sets[0].rtl),
7454 SET_SRC (sets[0].rtl), 1);
7455 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7456 apply_change_group ();
7458 /* If REG1 was equivalent to a constant, REG0 is not. */
7459 if (note)
7460 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7462 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7463 any REG_WAS_0 note on INSN to PREV. */
7464 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7465 if (note)
7466 remove_note (prev, note);
7468 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7469 if (note)
7471 remove_note (insn, note);
7472 XEXP (note, 1) = REG_NOTES (prev);
7473 REG_NOTES (prev) = note;
7476 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7477 then we must delete it, because the value in REG0 has changed. */
7478 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7479 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7480 remove_note (insn, note);
7484 /* If this is a conditional jump insn, record any known equivalences due to
7485 the condition being tested. */
7487 last_jump_equiv_class = 0;
7488 if (GET_CODE (insn) == JUMP_INSN
7489 && n_sets == 1 && GET_CODE (x) == SET
7490 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7491 record_jump_equiv (insn, 0);
7493 #ifdef HAVE_cc0
7494 /* If the previous insn set CC0 and this insn no longer references CC0,
7495 delete the previous insn. Here we use the fact that nothing expects CC0
7496 to be valid over an insn, which is true until the final pass. */
7497 if (prev_insn && GET_CODE (prev_insn) == INSN
7498 && (tem = single_set (prev_insn)) != 0
7499 && SET_DEST (tem) == cc0_rtx
7500 && ! reg_mentioned_p (cc0_rtx, x))
7502 PUT_CODE (prev_insn, NOTE);
7503 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7504 NOTE_SOURCE_FILE (prev_insn) = 0;
7507 prev_insn_cc0 = this_insn_cc0;
7508 prev_insn_cc0_mode = this_insn_cc0_mode;
7509 #endif
7511 prev_insn = insn;
7514 /* Remove from the ahsh table all expressions that reference memory. */
7515 static void
7516 invalidate_memory ()
7518 register int i;
7519 register struct table_elt *p, *next;
7521 for (i = 0; i < NBUCKETS; i++)
7522 for (p = table[i]; p; p = next)
7524 next = p->next_same_hash;
7525 if (p->in_memory)
7526 remove_from_table (p, i);
7530 /* XXX ??? The name of this function bears little resemblance to
7531 what this function actually does. FIXME. */
7532 static int
7533 note_mem_written (addr)
7534 register rtx addr;
7536 /* Pushing or popping the stack invalidates just the stack pointer. */
7537 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7538 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7539 && GET_CODE (XEXP (addr, 0)) == REG
7540 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7542 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7543 reg_tick[STACK_POINTER_REGNUM]++;
7545 /* This should be *very* rare. */
7546 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7547 invalidate (stack_pointer_rtx, VOIDmode);
7548 return 1;
7550 return 0;
7553 /* Perform invalidation on the basis of everything about an insn
7554 except for invalidating the actual places that are SET in it.
7555 This includes the places CLOBBERed, and anything that might
7556 alias with something that is SET or CLOBBERed.
7558 X is the pattern of the insn. */
7560 static void
7561 invalidate_from_clobbers (x)
7562 rtx x;
7564 if (GET_CODE (x) == CLOBBER)
7566 rtx ref = XEXP (x, 0);
7567 if (ref)
7569 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7570 || GET_CODE (ref) == MEM)
7571 invalidate (ref, VOIDmode);
7572 else if (GET_CODE (ref) == STRICT_LOW_PART
7573 || GET_CODE (ref) == ZERO_EXTRACT)
7574 invalidate (XEXP (ref, 0), GET_MODE (ref));
7577 else if (GET_CODE (x) == PARALLEL)
7579 register int i;
7580 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7582 register rtx y = XVECEXP (x, 0, i);
7583 if (GET_CODE (y) == CLOBBER)
7585 rtx ref = XEXP (y, 0);
7586 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7587 || GET_CODE (ref) == MEM)
7588 invalidate (ref, VOIDmode);
7589 else if (GET_CODE (ref) == STRICT_LOW_PART
7590 || GET_CODE (ref) == ZERO_EXTRACT)
7591 invalidate (XEXP (ref, 0), GET_MODE (ref));
7597 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7598 and replace any registers in them with either an equivalent constant
7599 or the canonical form of the register. If we are inside an address,
7600 only do this if the address remains valid.
7602 OBJECT is 0 except when within a MEM in which case it is the MEM.
7604 Return the replacement for X. */
7606 static rtx
7607 cse_process_notes (x, object)
7608 rtx x;
7609 rtx object;
7611 enum rtx_code code = GET_CODE (x);
7612 char *fmt = GET_RTX_FORMAT (code);
7613 int i;
7615 switch (code)
7617 case CONST_INT:
7618 case CONST:
7619 case SYMBOL_REF:
7620 case LABEL_REF:
7621 case CONST_DOUBLE:
7622 case PC:
7623 case CC0:
7624 case LO_SUM:
7625 return x;
7627 case MEM:
7628 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7629 return x;
7631 case EXPR_LIST:
7632 case INSN_LIST:
7633 if (REG_NOTE_KIND (x) == REG_EQUAL)
7634 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7635 if (XEXP (x, 1))
7636 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7637 return x;
7639 case SIGN_EXTEND:
7640 case ZERO_EXTEND:
7641 case SUBREG:
7643 rtx new = cse_process_notes (XEXP (x, 0), object);
7644 /* We don't substitute VOIDmode constants into these rtx,
7645 since they would impede folding. */
7646 if (GET_MODE (new) != VOIDmode)
7647 validate_change (object, &XEXP (x, 0), new, 0);
7648 return x;
7651 case REG:
7652 i = reg_qty[REGNO (x)];
7654 /* Return a constant or a constant register. */
7655 if (REGNO_QTY_VALID_P (REGNO (x))
7656 && qty_const[i] != 0
7657 && (CONSTANT_P (qty_const[i])
7658 || GET_CODE (qty_const[i]) == REG))
7660 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7661 if (new)
7662 return new;
7665 /* Otherwise, canonicalize this register. */
7666 return canon_reg (x, NULL_RTX);
7669 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7670 if (fmt[i] == 'e')
7671 validate_change (object, &XEXP (x, i),
7672 cse_process_notes (XEXP (x, i), object), 0);
7674 return x;
7677 /* Find common subexpressions between the end test of a loop and the beginning
7678 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7680 Often we have a loop where an expression in the exit test is used
7681 in the body of the loop. For example "while (*p) *q++ = *p++;".
7682 Because of the way we duplicate the loop exit test in front of the loop,
7683 however, we don't detect that common subexpression. This will be caught
7684 when global cse is implemented, but this is a quite common case.
7686 This function handles the most common cases of these common expressions.
7687 It is called after we have processed the basic block ending with the
7688 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7689 jumps to a label used only once. */
7691 static void
7692 cse_around_loop (loop_start)
7693 rtx loop_start;
7695 rtx insn;
7696 int i;
7697 struct table_elt *p;
7699 /* If the jump at the end of the loop doesn't go to the start, we don't
7700 do anything. */
7701 for (insn = PREV_INSN (loop_start);
7702 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7703 insn = PREV_INSN (insn))
7706 if (insn == 0
7707 || GET_CODE (insn) != NOTE
7708 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7709 return;
7711 /* If the last insn of the loop (the end test) was an NE comparison,
7712 we will interpret it as an EQ comparison, since we fell through
7713 the loop. Any equivalences resulting from that comparison are
7714 therefore not valid and must be invalidated. */
7715 if (last_jump_equiv_class)
7716 for (p = last_jump_equiv_class->first_same_value; p;
7717 p = p->next_same_value)
7718 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7719 || (GET_CODE (p->exp) == SUBREG
7720 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7721 invalidate (p->exp, VOIDmode);
7722 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7723 || GET_CODE (p->exp) == ZERO_EXTRACT)
7724 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7726 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7727 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7729 The only thing we do with SET_DEST is invalidate entries, so we
7730 can safely process each SET in order. It is slightly less efficient
7731 to do so, but we only want to handle the most common cases. */
7733 for (insn = NEXT_INSN (loop_start);
7734 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7735 && ! (GET_CODE (insn) == NOTE
7736 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7737 insn = NEXT_INSN (insn))
7739 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7740 && (GET_CODE (PATTERN (insn)) == SET
7741 || GET_CODE (PATTERN (insn)) == CLOBBER))
7742 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7743 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7744 && GET_CODE (PATTERN (insn)) == PARALLEL)
7745 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7746 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7747 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7748 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7749 loop_start);
7753 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7754 since they are done elsewhere. This function is called via note_stores. */
7756 static void
7757 invalidate_skipped_set (dest, set)
7758 rtx set;
7759 rtx dest;
7761 enum rtx_code code = GET_CODE (dest);
7763 if (code == MEM
7764 && ! note_mem_written (dest) /* If this is not a stack push ... */
7765 /* There are times when an address can appear varying and be a PLUS
7766 during this scan when it would be a fixed address were we to know
7767 the proper equivalences. So invalidate all memory if there is
7768 a BLKmode or nonscalar memory reference or a reference to a
7769 variable address. */
7770 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
7771 || cse_rtx_varies_p (XEXP (dest, 0))))
7773 invalidate_memory ();
7774 return;
7777 if (GET_CODE (set) == CLOBBER
7778 #ifdef HAVE_cc0
7779 || dest == cc0_rtx
7780 #endif
7781 || dest == pc_rtx)
7782 return;
7784 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
7785 invalidate (XEXP (dest, 0), GET_MODE (dest));
7786 else if (code == REG || code == SUBREG || code == MEM)
7787 invalidate (dest, VOIDmode);
7790 /* Invalidate all insns from START up to the end of the function or the
7791 next label. This called when we wish to CSE around a block that is
7792 conditionally executed. */
7794 static void
7795 invalidate_skipped_block (start)
7796 rtx start;
7798 rtx insn;
7800 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7801 insn = NEXT_INSN (insn))
7803 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7804 continue;
7806 if (GET_CODE (insn) == CALL_INSN)
7808 if (! CONST_CALL_P (insn))
7809 invalidate_memory ();
7810 invalidate_for_call ();
7813 note_stores (PATTERN (insn), invalidate_skipped_set);
7817 /* Used for communication between the following two routines; contains a
7818 value to be checked for modification. */
7820 static rtx cse_check_loop_start_value;
7822 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7823 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7825 static void
7826 cse_check_loop_start (x, set)
7827 rtx x;
7828 rtx set;
7830 if (cse_check_loop_start_value == 0
7831 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7832 return;
7834 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7835 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7836 cse_check_loop_start_value = 0;
7839 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7840 a loop that starts with the label at LOOP_START.
7842 If X is a SET, we see if its SET_SRC is currently in our hash table.
7843 If so, we see if it has a value equal to some register used only in the
7844 loop exit code (as marked by jump.c).
7846 If those two conditions are true, we search backwards from the start of
7847 the loop to see if that same value was loaded into a register that still
7848 retains its value at the start of the loop.
7850 If so, we insert an insn after the load to copy the destination of that
7851 load into the equivalent register and (try to) replace our SET_SRC with that
7852 register.
7854 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7856 static void
7857 cse_set_around_loop (x, insn, loop_start)
7858 rtx x;
7859 rtx insn;
7860 rtx loop_start;
7862 struct table_elt *src_elt;
7864 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7865 are setting PC or CC0 or whose SET_SRC is already a register. */
7866 if (GET_CODE (x) == SET
7867 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7868 && GET_CODE (SET_SRC (x)) != REG)
7870 src_elt = lookup (SET_SRC (x),
7871 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7872 GET_MODE (SET_DEST (x)));
7874 if (src_elt)
7875 for (src_elt = src_elt->first_same_value; src_elt;
7876 src_elt = src_elt->next_same_value)
7877 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7878 && COST (src_elt->exp) < COST (SET_SRC (x)))
7880 rtx p, set;
7882 /* Look for an insn in front of LOOP_START that sets
7883 something in the desired mode to SET_SRC (x) before we hit
7884 a label or CALL_INSN. */
7886 for (p = prev_nonnote_insn (loop_start);
7887 p && GET_CODE (p) != CALL_INSN
7888 && GET_CODE (p) != CODE_LABEL;
7889 p = prev_nonnote_insn (p))
7890 if ((set = single_set (p)) != 0
7891 && GET_CODE (SET_DEST (set)) == REG
7892 && GET_MODE (SET_DEST (set)) == src_elt->mode
7893 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7895 /* We now have to ensure that nothing between P
7896 and LOOP_START modified anything referenced in
7897 SET_SRC (x). We know that nothing within the loop
7898 can modify it, or we would have invalidated it in
7899 the hash table. */
7900 rtx q;
7902 cse_check_loop_start_value = SET_SRC (x);
7903 for (q = p; q != loop_start; q = NEXT_INSN (q))
7904 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7905 note_stores (PATTERN (q), cse_check_loop_start);
7907 /* If nothing was changed and we can replace our
7908 SET_SRC, add an insn after P to copy its destination
7909 to what we will be replacing SET_SRC with. */
7910 if (cse_check_loop_start_value
7911 && validate_change (insn, &SET_SRC (x),
7912 src_elt->exp, 0))
7913 emit_insn_after (gen_move_insn (src_elt->exp,
7914 SET_DEST (set)),
7916 break;
7921 /* Now invalidate anything modified by X. */
7922 note_mem_written (SET_DEST (x));
7924 /* See comment on similar code in cse_insn for explanation of these tests. */
7925 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7926 || GET_CODE (SET_DEST (x)) == MEM)
7927 invalidate (SET_DEST (x), VOIDmode);
7928 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7929 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7930 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7933 /* Find the end of INSN's basic block and return its range,
7934 the total number of SETs in all the insns of the block, the last insn of the
7935 block, and the branch path.
7937 The branch path indicates which branches should be followed. If a non-zero
7938 path size is specified, the block should be rescanned and a different set
7939 of branches will be taken. The branch path is only used if
7940 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7942 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7943 used to describe the block. It is filled in with the information about
7944 the current block. The incoming structure's branch path, if any, is used
7945 to construct the output branch path. */
7947 void
7948 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7949 rtx insn;
7950 struct cse_basic_block_data *data;
7951 int follow_jumps;
7952 int after_loop;
7953 int skip_blocks;
7955 rtx p = insn, q;
7956 int nsets = 0;
7957 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7958 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7959 int path_size = data->path_size;
7960 int path_entry = 0;
7961 int i;
7963 /* Update the previous branch path, if any. If the last branch was
7964 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7965 shorten the path by one and look at the previous branch. We know that
7966 at least one branch must have been taken if PATH_SIZE is non-zero. */
7967 while (path_size > 0)
7969 if (data->path[path_size - 1].status != NOT_TAKEN)
7971 data->path[path_size - 1].status = NOT_TAKEN;
7972 break;
7974 else
7975 path_size--;
7978 /* Scan to end of this basic block. */
7979 while (p && GET_CODE (p) != CODE_LABEL)
7981 /* Don't cse out the end of a loop. This makes a difference
7982 only for the unusual loops that always execute at least once;
7983 all other loops have labels there so we will stop in any case.
7984 Cse'ing out the end of the loop is dangerous because it
7985 might cause an invariant expression inside the loop
7986 to be reused after the end of the loop. This would make it
7987 hard to move the expression out of the loop in loop.c,
7988 especially if it is one of several equivalent expressions
7989 and loop.c would like to eliminate it.
7991 If we are running after loop.c has finished, we can ignore
7992 the NOTE_INSN_LOOP_END. */
7994 if (! after_loop && GET_CODE (p) == NOTE
7995 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7996 break;
7998 /* Don't cse over a call to setjmp; on some machines (eg vax)
7999 the regs restored by the longjmp come from
8000 a later time than the setjmp. */
8001 if (GET_CODE (p) == NOTE
8002 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8003 break;
8005 /* A PARALLEL can have lots of SETs in it,
8006 especially if it is really an ASM_OPERANDS. */
8007 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8008 && GET_CODE (PATTERN (p)) == PARALLEL)
8009 nsets += XVECLEN (PATTERN (p), 0);
8010 else if (GET_CODE (p) != NOTE)
8011 nsets += 1;
8013 /* Ignore insns made by CSE; they cannot affect the boundaries of
8014 the basic block. */
8016 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8017 high_cuid = INSN_CUID (p);
8018 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8019 low_cuid = INSN_CUID (p);
8021 /* See if this insn is in our branch path. If it is and we are to
8022 take it, do so. */
8023 if (path_entry < path_size && data->path[path_entry].branch == p)
8025 if (data->path[path_entry].status != NOT_TAKEN)
8026 p = JUMP_LABEL (p);
8028 /* Point to next entry in path, if any. */
8029 path_entry++;
8032 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8033 was specified, we haven't reached our maximum path length, there are
8034 insns following the target of the jump, this is the only use of the
8035 jump label, and the target label is preceded by a BARRIER.
8037 Alternatively, we can follow the jump if it branches around a
8038 block of code and there are no other branches into the block.
8039 In this case invalidate_skipped_block will be called to invalidate any
8040 registers set in the block when following the jump. */
8042 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8043 && GET_CODE (p) == JUMP_INSN
8044 && GET_CODE (PATTERN (p)) == SET
8045 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8046 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8047 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8049 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8050 if ((GET_CODE (q) != NOTE
8051 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8052 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8053 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8054 break;
8056 /* If we ran into a BARRIER, this code is an extension of the
8057 basic block when the branch is taken. */
8058 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8060 /* Don't allow ourself to keep walking around an
8061 always-executed loop. */
8062 if (next_real_insn (q) == next)
8064 p = NEXT_INSN (p);
8065 continue;
8068 /* Similarly, don't put a branch in our path more than once. */
8069 for (i = 0; i < path_entry; i++)
8070 if (data->path[i].branch == p)
8071 break;
8073 if (i != path_entry)
8074 break;
8076 data->path[path_entry].branch = p;
8077 data->path[path_entry++].status = TAKEN;
8079 /* This branch now ends our path. It was possible that we
8080 didn't see this branch the last time around (when the
8081 insn in front of the target was a JUMP_INSN that was
8082 turned into a no-op). */
8083 path_size = path_entry;
8085 p = JUMP_LABEL (p);
8086 /* Mark block so we won't scan it again later. */
8087 PUT_MODE (NEXT_INSN (p), QImode);
8089 /* Detect a branch around a block of code. */
8090 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8092 register rtx tmp;
8094 if (next_real_insn (q) == next)
8096 p = NEXT_INSN (p);
8097 continue;
8100 for (i = 0; i < path_entry; i++)
8101 if (data->path[i].branch == p)
8102 break;
8104 if (i != path_entry)
8105 break;
8107 /* This is no_labels_between_p (p, q) with an added check for
8108 reaching the end of a function (in case Q precedes P). */
8109 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8110 if (GET_CODE (tmp) == CODE_LABEL)
8111 break;
8113 if (tmp == q)
8115 data->path[path_entry].branch = p;
8116 data->path[path_entry++].status = AROUND;
8118 path_size = path_entry;
8120 p = JUMP_LABEL (p);
8121 /* Mark block so we won't scan it again later. */
8122 PUT_MODE (NEXT_INSN (p), QImode);
8126 p = NEXT_INSN (p);
8129 data->low_cuid = low_cuid;
8130 data->high_cuid = high_cuid;
8131 data->nsets = nsets;
8132 data->last = p;
8134 /* If all jumps in the path are not taken, set our path length to zero
8135 so a rescan won't be done. */
8136 for (i = path_size - 1; i >= 0; i--)
8137 if (data->path[i].status != NOT_TAKEN)
8138 break;
8140 if (i == -1)
8141 data->path_size = 0;
8142 else
8143 data->path_size = path_size;
8145 /* End the current branch path. */
8146 data->path[path_size].branch = 0;
8149 /* Perform cse on the instructions of a function.
8150 F is the first instruction.
8151 NREGS is one plus the highest pseudo-reg number used in the instruction.
8153 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8154 (only if -frerun-cse-after-loop).
8156 Returns 1 if jump_optimize should be redone due to simplifications
8157 in conditional jump instructions. */
8160 cse_main (f, nregs, after_loop, file)
8161 rtx f;
8162 int nregs;
8163 int after_loop;
8164 FILE *file;
8166 struct cse_basic_block_data val;
8167 register rtx insn = f;
8168 register int i;
8170 cse_jumps_altered = 0;
8171 recorded_label_ref = 0;
8172 constant_pool_entries_cost = 0;
8173 val.path_size = 0;
8175 init_recog ();
8176 init_alias_analysis ();
8178 max_reg = nregs;
8180 all_minus_one = (int *) alloca (nregs * sizeof (int));
8181 consec_ints = (int *) alloca (nregs * sizeof (int));
8183 for (i = 0; i < nregs; i++)
8185 all_minus_one[i] = -1;
8186 consec_ints[i] = i;
8189 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8190 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8191 reg_qty = (int *) alloca (nregs * sizeof (int));
8192 reg_in_table = (int *) alloca (nregs * sizeof (int));
8193 reg_tick = (int *) alloca (nregs * sizeof (int));
8195 #ifdef LOAD_EXTEND_OP
8197 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8198 and change the code and mode as appropriate. */
8199 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8200 #endif
8202 /* Discard all the free elements of the previous function
8203 since they are allocated in the temporarily obstack. */
8204 bzero ((char *) table, sizeof table);
8205 free_element_chain = 0;
8206 n_elements_made = 0;
8208 /* Find the largest uid. */
8210 max_uid = get_max_uid ();
8211 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8212 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8214 /* Compute the mapping from uids to cuids.
8215 CUIDs are numbers assigned to insns, like uids,
8216 except that cuids increase monotonically through the code.
8217 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8218 between two insns is not affected by -g. */
8220 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8222 if (GET_CODE (insn) != NOTE
8223 || NOTE_LINE_NUMBER (insn) < 0)
8224 INSN_CUID (insn) = ++i;
8225 else
8226 /* Give a line number note the same cuid as preceding insn. */
8227 INSN_CUID (insn) = i;
8230 /* Initialize which registers are clobbered by calls. */
8232 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8234 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8235 if ((call_used_regs[i]
8236 /* Used to check !fixed_regs[i] here, but that isn't safe;
8237 fixed regs are still call-clobbered, and sched can get
8238 confused if they can "live across calls".
8240 The frame pointer is always preserved across calls. The arg
8241 pointer is if it is fixed. The stack pointer usually is, unless
8242 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8243 will be present. If we are generating PIC code, the PIC offset
8244 table register is preserved across calls. */
8246 && i != STACK_POINTER_REGNUM
8247 && i != FRAME_POINTER_REGNUM
8248 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8249 && i != HARD_FRAME_POINTER_REGNUM
8250 #endif
8251 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8252 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8253 #endif
8254 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8255 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8256 #endif
8258 || global_regs[i])
8259 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8261 /* Loop over basic blocks.
8262 Compute the maximum number of qty's needed for each basic block
8263 (which is 2 for each SET). */
8264 insn = f;
8265 while (insn)
8267 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8268 flag_cse_skip_blocks);
8270 /* If this basic block was already processed or has no sets, skip it. */
8271 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8273 PUT_MODE (insn, VOIDmode);
8274 insn = (val.last ? NEXT_INSN (val.last) : 0);
8275 val.path_size = 0;
8276 continue;
8279 cse_basic_block_start = val.low_cuid;
8280 cse_basic_block_end = val.high_cuid;
8281 max_qty = val.nsets * 2;
8283 if (file)
8284 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8285 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8286 val.nsets);
8288 /* Make MAX_QTY bigger to give us room to optimize
8289 past the end of this basic block, if that should prove useful. */
8290 if (max_qty < 500)
8291 max_qty = 500;
8293 max_qty += max_reg;
8295 /* If this basic block is being extended by following certain jumps,
8296 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8297 Otherwise, we start after this basic block. */
8298 if (val.path_size > 0)
8299 cse_basic_block (insn, val.last, val.path, 0);
8300 else
8302 int old_cse_jumps_altered = cse_jumps_altered;
8303 rtx temp;
8305 /* When cse changes a conditional jump to an unconditional
8306 jump, we want to reprocess the block, since it will give
8307 us a new branch path to investigate. */
8308 cse_jumps_altered = 0;
8309 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8310 if (cse_jumps_altered == 0
8311 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8312 insn = temp;
8314 cse_jumps_altered |= old_cse_jumps_altered;
8317 #ifdef USE_C_ALLOCA
8318 alloca (0);
8319 #endif
8322 /* Tell refers_to_mem_p that qty_const info is not available. */
8323 qty_const = 0;
8325 if (max_elements_made < n_elements_made)
8326 max_elements_made = n_elements_made;
8328 return cse_jumps_altered || recorded_label_ref;
8331 /* Process a single basic block. FROM and TO and the limits of the basic
8332 block. NEXT_BRANCH points to the branch path when following jumps or
8333 a null path when not following jumps.
8335 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8336 loop. This is true when we are being called for the last time on a
8337 block and this CSE pass is before loop.c. */
8339 static rtx
8340 cse_basic_block (from, to, next_branch, around_loop)
8341 register rtx from, to;
8342 struct branch_path *next_branch;
8343 int around_loop;
8345 register rtx insn;
8346 int to_usage = 0;
8347 int in_libcall_block = 0;
8349 /* Each of these arrays is undefined before max_reg, so only allocate
8350 the space actually needed and adjust the start below. */
8352 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8353 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8354 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8355 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8356 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8357 qty_comparison_code
8358 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8359 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8360 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8362 qty_first_reg -= max_reg;
8363 qty_last_reg -= max_reg;
8364 qty_mode -= max_reg;
8365 qty_const -= max_reg;
8366 qty_const_insn -= max_reg;
8367 qty_comparison_code -= max_reg;
8368 qty_comparison_qty -= max_reg;
8369 qty_comparison_const -= max_reg;
8371 new_basic_block ();
8373 /* TO might be a label. If so, protect it from being deleted. */
8374 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8375 ++LABEL_NUSES (to);
8377 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8379 register enum rtx_code code;
8381 /* See if this is a branch that is part of the path. If so, and it is
8382 to be taken, do so. */
8383 if (next_branch->branch == insn)
8385 enum taken status = next_branch++->status;
8386 if (status != NOT_TAKEN)
8388 if (status == TAKEN)
8389 record_jump_equiv (insn, 1);
8390 else
8391 invalidate_skipped_block (NEXT_INSN (insn));
8393 /* Set the last insn as the jump insn; it doesn't affect cc0.
8394 Then follow this branch. */
8395 #ifdef HAVE_cc0
8396 prev_insn_cc0 = 0;
8397 #endif
8398 prev_insn = insn;
8399 insn = JUMP_LABEL (insn);
8400 continue;
8404 code = GET_CODE (insn);
8405 if (GET_MODE (insn) == QImode)
8406 PUT_MODE (insn, VOIDmode);
8408 if (GET_RTX_CLASS (code) == 'i')
8410 /* Process notes first so we have all notes in canonical forms when
8411 looking for duplicate operations. */
8413 if (REG_NOTES (insn))
8414 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8416 /* Track when we are inside in LIBCALL block. Inside such a block,
8417 we do not want to record destinations. The last insn of a
8418 LIBCALL block is not considered to be part of the block, since
8419 its destination is the result of the block and hence should be
8420 recorded. */
8422 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8423 in_libcall_block = 1;
8424 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8425 in_libcall_block = 0;
8427 cse_insn (insn, in_libcall_block);
8430 /* If INSN is now an unconditional jump, skip to the end of our
8431 basic block by pretending that we just did the last insn in the
8432 basic block. If we are jumping to the end of our block, show
8433 that we can have one usage of TO. */
8435 if (simplejump_p (insn))
8437 if (to == 0)
8438 return 0;
8440 if (JUMP_LABEL (insn) == to)
8441 to_usage = 1;
8443 /* Maybe TO was deleted because the jump is unconditional.
8444 If so, there is nothing left in this basic block. */
8445 /* ??? Perhaps it would be smarter to set TO
8446 to whatever follows this insn,
8447 and pretend the basic block had always ended here. */
8448 if (INSN_DELETED_P (to))
8449 break;
8451 insn = PREV_INSN (to);
8454 /* See if it is ok to keep on going past the label
8455 which used to end our basic block. Remember that we incremented
8456 the count of that label, so we decrement it here. If we made
8457 a jump unconditional, TO_USAGE will be one; in that case, we don't
8458 want to count the use in that jump. */
8460 if (to != 0 && NEXT_INSN (insn) == to
8461 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8463 struct cse_basic_block_data val;
8464 rtx prev;
8466 insn = NEXT_INSN (to);
8468 if (LABEL_NUSES (to) == 0)
8469 insn = delete_insn (to);
8471 /* If TO was the last insn in the function, we are done. */
8472 if (insn == 0)
8473 return 0;
8475 /* If TO was preceded by a BARRIER we are done with this block
8476 because it has no continuation. */
8477 prev = prev_nonnote_insn (to);
8478 if (prev && GET_CODE (prev) == BARRIER)
8479 return insn;
8481 /* Find the end of the following block. Note that we won't be
8482 following branches in this case. */
8483 to_usage = 0;
8484 val.path_size = 0;
8485 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8487 /* If the tables we allocated have enough space left
8488 to handle all the SETs in the next basic block,
8489 continue through it. Otherwise, return,
8490 and that block will be scanned individually. */
8491 if (val.nsets * 2 + next_qty > max_qty)
8492 break;
8494 cse_basic_block_start = val.low_cuid;
8495 cse_basic_block_end = val.high_cuid;
8496 to = val.last;
8498 /* Prevent TO from being deleted if it is a label. */
8499 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8500 ++LABEL_NUSES (to);
8502 /* Back up so we process the first insn in the extension. */
8503 insn = PREV_INSN (insn);
8507 if (next_qty > max_qty)
8508 abort ();
8510 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8511 the previous insn is the only insn that branches to the head of a loop,
8512 we can cse into the loop. Don't do this if we changed the jump
8513 structure of a loop unless we aren't going to be following jumps. */
8515 if ((cse_jumps_altered == 0
8516 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8517 && around_loop && to != 0
8518 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8519 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8520 && JUMP_LABEL (PREV_INSN (to)) != 0
8521 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8522 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8524 return to ? NEXT_INSN (to) : 0;
8527 /* Count the number of times registers are used (not set) in X.
8528 COUNTS is an array in which we accumulate the count, INCR is how much
8529 we count each register usage.
8531 Don't count a usage of DEST, which is the SET_DEST of a SET which
8532 contains X in its SET_SRC. This is because such a SET does not
8533 modify the liveness of DEST. */
8535 static void
8536 count_reg_usage (x, counts, dest, incr)
8537 rtx x;
8538 int *counts;
8539 rtx dest;
8540 int incr;
8542 enum rtx_code code;
8543 char *fmt;
8544 int i, j;
8546 if (x == 0)
8547 return;
8549 switch (code = GET_CODE (x))
8551 case REG:
8552 if (x != dest)
8553 counts[REGNO (x)] += incr;
8554 return;
8556 case PC:
8557 case CC0:
8558 case CONST:
8559 case CONST_INT:
8560 case CONST_DOUBLE:
8561 case SYMBOL_REF:
8562 case LABEL_REF:
8563 case CLOBBER:
8564 return;
8566 case SET:
8567 /* Unless we are setting a REG, count everything in SET_DEST. */
8568 if (GET_CODE (SET_DEST (x)) != REG)
8569 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8571 /* If SRC has side-effects, then we can't delete this insn, so the
8572 usage of SET_DEST inside SRC counts.
8574 ??? Strictly-speaking, we might be preserving this insn
8575 because some other SET has side-effects, but that's hard
8576 to do and can't happen now. */
8577 count_reg_usage (SET_SRC (x), counts,
8578 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8579 incr);
8580 return;
8582 case CALL_INSN:
8583 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8585 /* ... falls through ... */
8586 case INSN:
8587 case JUMP_INSN:
8588 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8590 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8591 use them. */
8593 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8594 return;
8596 case EXPR_LIST:
8597 case INSN_LIST:
8598 if (REG_NOTE_KIND (x) == REG_EQUAL
8599 || GET_CODE (XEXP (x,0)) == USE)
8600 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8601 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8602 return;
8605 fmt = GET_RTX_FORMAT (code);
8606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8608 if (fmt[i] == 'e')
8609 count_reg_usage (XEXP (x, i), counts, dest, incr);
8610 else if (fmt[i] == 'E')
8611 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8612 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8616 /* Scan all the insns and delete any that are dead; i.e., they store a register
8617 that is never used or they copy a register to itself.
8619 This is used to remove insns made obviously dead by cse. It improves the
8620 heuristics in loop since it won't try to move dead invariants out of loops
8621 or make givs for dead quantities. The remaining passes of the compilation
8622 are also sped up. */
8624 void
8625 delete_dead_from_cse (insns, nreg)
8626 rtx insns;
8627 int nreg;
8629 int *counts = (int *) alloca (nreg * sizeof (int));
8630 rtx insn, prev;
8631 rtx tem;
8632 int i;
8633 int in_libcall = 0;
8635 /* First count the number of times each register is used. */
8636 bzero ((char *) counts, sizeof (int) * nreg);
8637 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8638 count_reg_usage (insn, counts, NULL_RTX, 1);
8640 /* Go from the last insn to the first and delete insns that only set unused
8641 registers or copy a register to itself. As we delete an insn, remove
8642 usage counts for registers it uses. */
8643 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8645 int live_insn = 0;
8647 prev = prev_real_insn (insn);
8649 /* Don't delete any insns that are part of a libcall block.
8650 Flow or loop might get confused if we did that. Remember
8651 that we are scanning backwards. */
8652 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8653 in_libcall = 1;
8655 if (in_libcall)
8656 live_insn = 1;
8657 else if (GET_CODE (PATTERN (insn)) == SET)
8659 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8660 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8663 #ifdef HAVE_cc0
8664 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8665 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8666 && ((tem = next_nonnote_insn (insn)) == 0
8667 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8668 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8670 #endif
8671 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8672 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8673 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8674 || side_effects_p (SET_SRC (PATTERN (insn))))
8675 live_insn = 1;
8677 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8678 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8680 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8682 if (GET_CODE (elt) == SET)
8684 if (GET_CODE (SET_DEST (elt)) == REG
8685 && SET_DEST (elt) == SET_SRC (elt))
8688 #ifdef HAVE_cc0
8689 else if (GET_CODE (SET_DEST (elt)) == CC0
8690 && ! side_effects_p (SET_SRC (elt))
8691 && ((tem = next_nonnote_insn (insn)) == 0
8692 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8693 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8695 #endif
8696 else if (GET_CODE (SET_DEST (elt)) != REG
8697 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8698 || counts[REGNO (SET_DEST (elt))] != 0
8699 || side_effects_p (SET_SRC (elt)))
8700 live_insn = 1;
8702 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8703 live_insn = 1;
8705 else
8706 live_insn = 1;
8708 /* If this is a dead insn, delete it and show registers in it aren't
8709 being used. */
8711 if (! live_insn)
8713 count_reg_usage (insn, counts, NULL_RTX, -1);
8714 delete_insn (insn);
8717 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8718 in_libcall = 0;