At the suggestion of Richard Earnshaw I have changed GO_IF_LEGITIMATE_ADDRESS
[official-gcc.git] / gcc / cse.c
blobddaf689efaeedbd6511420cff13e638396fda560
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "expr.h"
35 #include <setjmp.h>
37 /* The basic idea of common subexpression elimination is to go
38 through the code, keeping a record of expressions that would
39 have the same value at the current scan point, and replacing
40 expressions encountered with the cheapest equivalent expression.
42 It is too complicated to keep track of the different possibilities
43 when control paths merge; so, at each label, we forget all that is
44 known and start fresh. This can be described as processing each
45 basic block separately. Note, however, that these are not quite
46 the same as the basic blocks found by a later pass and used for
47 data flow analysis and register packing. We do not need to start fresh
48 after a conditional jump instruction if there is no label there.
50 We use two data structures to record the equivalent expressions:
51 a hash table for most expressions, and several vectors together
52 with "quantity numbers" to record equivalent (pseudo) registers.
54 The use of the special data structure for registers is desirable
55 because it is faster. It is possible because registers references
56 contain a fairly small number, the register number, taken from
57 a contiguously allocated series, and two register references are
58 identical if they have the same number. General expressions
59 do not have any such thing, so the only way to retrieve the
60 information recorded on an expression other than a register
61 is to keep it in a hash table.
63 Registers and "quantity numbers":
65 At the start of each basic block, all of the (hardware and pseudo)
66 registers used in the function are given distinct quantity
67 numbers to indicate their contents. During scan, when the code
68 copies one register into another, we copy the quantity number.
69 When a register is loaded in any other way, we allocate a new
70 quantity number to describe the value generated by this operation.
71 `reg_qty' records what quantity a register is currently thought
72 of as containing.
74 All real quantity numbers are greater than or equal to `max_reg'.
75 If register N has not been assigned a quantity, reg_qty[N] will equal N.
77 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
78 variables should be referenced with an index below `max_reg'.
80 We also maintain a bidirectional chain of registers for each
81 quantity number. `qty_first_reg', `qty_last_reg',
82 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
84 The first register in a chain is the one whose lifespan is least local.
85 Among equals, it is the one that was seen first.
86 We replace any equivalent register with that one.
88 If two registers have the same quantity number, it must be true that
89 REG expressions with `qty_mode' must be in the hash table for both
90 registers and must be in the same class.
92 The converse is not true. Since hard registers may be referenced in
93 any mode, two REG expressions might be equivalent in the hash table
94 but not have the same quantity number if the quantity number of one
95 of the registers is not the same mode as those expressions.
97 Constants and quantity numbers
99 When a quantity has a known constant value, that value is stored
100 in the appropriate element of qty_const. This is in addition to
101 putting the constant in the hash table as is usual for non-regs.
103 Whether a reg or a constant is preferred is determined by the configuration
104 macro CONST_COSTS and will often depend on the constant value. In any
105 event, expressions containing constants can be simplified, by fold_rtx.
107 When a quantity has a known nearly constant value (such as an address
108 of a stack slot), that value is stored in the appropriate element
109 of qty_const.
111 Integer constants don't have a machine mode. However, cse
112 determines the intended machine mode from the destination
113 of the instruction that moves the constant. The machine mode
114 is recorded in the hash table along with the actual RTL
115 constant expression so that different modes are kept separate.
117 Other expressions:
119 To record known equivalences among expressions in general
120 we use a hash table called `table'. It has a fixed number of buckets
121 that contain chains of `struct table_elt' elements for expressions.
122 These chains connect the elements whose expressions have the same
123 hash codes.
125 Other chains through the same elements connect the elements which
126 currently have equivalent values.
128 Register references in an expression are canonicalized before hashing
129 the expression. This is done using `reg_qty' and `qty_first_reg'.
130 The hash code of a register reference is computed using the quantity
131 number, not the register number.
133 When the value of an expression changes, it is necessary to remove from the
134 hash table not just that expression but all expressions whose values
135 could be different as a result.
137 1. If the value changing is in memory, except in special cases
138 ANYTHING referring to memory could be changed. That is because
139 nobody knows where a pointer does not point.
140 The function `invalidate_memory' removes what is necessary.
142 The special cases are when the address is constant or is
143 a constant plus a fixed register such as the frame pointer
144 or a static chain pointer. When such addresses are stored in,
145 we can tell exactly which other such addresses must be invalidated
146 due to overlap. `invalidate' does this.
147 All expressions that refer to non-constant
148 memory addresses are also invalidated. `invalidate_memory' does this.
150 2. If the value changing is a register, all expressions
151 containing references to that register, and only those,
152 must be removed.
154 Because searching the entire hash table for expressions that contain
155 a register is very slow, we try to figure out when it isn't necessary.
156 Precisely, this is necessary only when expressions have been
157 entered in the hash table using this register, and then the value has
158 changed, and then another expression wants to be added to refer to
159 the register's new value. This sequence of circumstances is rare
160 within any one basic block.
162 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
163 reg_tick[i] is incremented whenever a value is stored in register i.
164 reg_in_table[i] holds -1 if no references to register i have been
165 entered in the table; otherwise, it contains the value reg_tick[i] had
166 when the references were entered. If we want to enter a reference
167 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
168 Until we want to enter a new entry, the mere fact that the two vectors
169 don't match makes the entries be ignored if anyone tries to match them.
171 Registers themselves are entered in the hash table as well as in
172 the equivalent-register chains. However, the vectors `reg_tick'
173 and `reg_in_table' do not apply to expressions which are simple
174 register references. These expressions are removed from the table
175 immediately when they become invalid, and this can be done even if
176 we do not immediately search for all the expressions that refer to
177 the register.
179 A CLOBBER rtx in an instruction invalidates its operand for further
180 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
181 invalidates everything that resides in memory.
183 Related expressions:
185 Constant expressions that differ only by an additive integer
186 are called related. When a constant expression is put in
187 the table, the related expression with no constant term
188 is also entered. These are made to point at each other
189 so that it is possible to find out if there exists any
190 register equivalent to an expression related to a given expression. */
192 /* One plus largest register number used in this function. */
194 static int max_reg;
196 /* One plus largest instruction UID used in this function at time of
197 cse_main call. */
199 static int max_insn_uid;
201 /* Length of vectors indexed by quantity number.
202 We know in advance we will not need a quantity number this big. */
204 static int max_qty;
206 /* Next quantity number to be allocated.
207 This is 1 + the largest number needed so far. */
209 static int next_qty;
211 /* Indexed by quantity number, gives the first (or last) register
212 in the chain of registers that currently contain this quantity. */
214 static int *qty_first_reg;
215 static int *qty_last_reg;
217 /* Index by quantity number, gives the mode of the quantity. */
219 static enum machine_mode *qty_mode;
221 /* Indexed by quantity number, gives the rtx of the constant value of the
222 quantity, or zero if it does not have a known value.
223 A sum of the frame pointer (or arg pointer) plus a constant
224 can also be entered here. */
226 static rtx *qty_const;
228 /* Indexed by qty number, gives the insn that stored the constant value
229 recorded in `qty_const'. */
231 static rtx *qty_const_insn;
233 /* The next three variables are used to track when a comparison between a
234 quantity and some constant or register has been passed. In that case, we
235 know the results of the comparison in case we see it again. These variables
236 record a comparison that is known to be true. */
238 /* Indexed by qty number, gives the rtx code of a comparison with a known
239 result involving this quantity. If none, it is UNKNOWN. */
240 static enum rtx_code *qty_comparison_code;
242 /* Indexed by qty number, gives the constant being compared against in a
243 comparison of known result. If no such comparison, it is undefined.
244 If the comparison is not with a constant, it is zero. */
246 static rtx *qty_comparison_const;
248 /* Indexed by qty number, gives the quantity being compared against in a
249 comparison of known result. If no such comparison, if it undefined.
250 If the comparison is not with a register, it is -1. */
252 static int *qty_comparison_qty;
254 #ifdef HAVE_cc0
255 /* For machines that have a CC0, we do not record its value in the hash
256 table since its use is guaranteed to be the insn immediately following
257 its definition and any other insn is presumed to invalidate it.
259 Instead, we store below the value last assigned to CC0. If it should
260 happen to be a constant, it is stored in preference to the actual
261 assigned value. In case it is a constant, we store the mode in which
262 the constant should be interpreted. */
264 static rtx prev_insn_cc0;
265 static enum machine_mode prev_insn_cc0_mode;
266 #endif
268 /* Previous actual insn. 0 if at first insn of basic block. */
270 static rtx prev_insn;
272 /* Insn being scanned. */
274 static rtx this_insn;
276 /* Index by register number, gives the quantity number
277 of the register's current contents. */
279 static int *reg_qty;
281 /* Index by register number, gives the number of the next (or
282 previous) register in the chain of registers sharing the same
283 value.
285 Or -1 if this register is at the end of the chain.
287 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
289 static int *reg_next_eqv;
290 static int *reg_prev_eqv;
292 /* Index by register number, gives the number of times
293 that register has been altered in the current basic block. */
295 static int *reg_tick;
297 /* Index by register number, gives the reg_tick value at which
298 rtx's containing this register are valid in the hash table.
299 If this does not equal the current reg_tick value, such expressions
300 existing in the hash table are invalid.
301 If this is -1, no expressions containing this register have been
302 entered in the table. */
304 static int *reg_in_table;
306 /* A HARD_REG_SET containing all the hard registers for which there is
307 currently a REG expression in the hash table. Note the difference
308 from the above variables, which indicate if the REG is mentioned in some
309 expression in the table. */
311 static HARD_REG_SET hard_regs_in_table;
313 /* A HARD_REG_SET containing all the hard registers that are invalidated
314 by a CALL_INSN. */
316 static HARD_REG_SET regs_invalidated_by_call;
318 /* Two vectors of ints:
319 one containing max_reg -1's; the other max_reg + 500 (an approximation
320 for max_qty) elements where element i contains i.
321 These are used to initialize various other vectors fast. */
323 static int *all_minus_one;
324 static int *consec_ints;
326 /* CUID of insn that starts the basic block currently being cse-processed. */
328 static int cse_basic_block_start;
330 /* CUID of insn that ends the basic block currently being cse-processed. */
332 static int cse_basic_block_end;
334 /* Vector mapping INSN_UIDs to cuids.
335 The cuids are like uids but increase monotonically always.
336 We use them to see whether a reg is used outside a given basic block. */
338 static int *uid_cuid;
340 /* Highest UID in UID_CUID. */
341 static int max_uid;
343 /* Get the cuid of an insn. */
345 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
347 /* Nonzero if cse has altered conditional jump insns
348 in such a way that jump optimization should be redone. */
350 static int cse_jumps_altered;
352 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
353 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
354 to put in the note. */
355 static int recorded_label_ref;
357 /* canon_hash stores 1 in do_not_record
358 if it notices a reference to CC0, PC, or some other volatile
359 subexpression. */
361 static int do_not_record;
363 #ifdef LOAD_EXTEND_OP
365 /* Scratch rtl used when looking for load-extended copy of a MEM. */
366 static rtx memory_extend_rtx;
367 #endif
369 /* canon_hash stores 1 in hash_arg_in_memory
370 if it notices a reference to memory within the expression being hashed. */
372 static int hash_arg_in_memory;
374 /* canon_hash stores 1 in hash_arg_in_struct
375 if it notices a reference to memory that's part of a structure. */
377 static int hash_arg_in_struct;
379 /* The hash table contains buckets which are chains of `struct table_elt's,
380 each recording one expression's information.
381 That expression is in the `exp' field.
383 Those elements with the same hash code are chained in both directions
384 through the `next_same_hash' and `prev_same_hash' fields.
386 Each set of expressions with equivalent values
387 are on a two-way chain through the `next_same_value'
388 and `prev_same_value' fields, and all point with
389 the `first_same_value' field at the first element in
390 that chain. The chain is in order of increasing cost.
391 Each element's cost value is in its `cost' field.
393 The `in_memory' field is nonzero for elements that
394 involve any reference to memory. These elements are removed
395 whenever a write is done to an unidentified location in memory.
396 To be safe, we assume that a memory address is unidentified unless
397 the address is either a symbol constant or a constant plus
398 the frame pointer or argument pointer.
400 The `in_struct' field is nonzero for elements that
401 involve any reference to memory inside a structure or array.
403 The `related_value' field is used to connect related expressions
404 (that differ by adding an integer).
405 The related expressions are chained in a circular fashion.
406 `related_value' is zero for expressions for which this
407 chain is not useful.
409 The `cost' field stores the cost of this element's expression.
411 The `is_const' flag is set if the element is a constant (including
412 a fixed address).
414 The `flag' field is used as a temporary during some search routines.
416 The `mode' field is usually the same as GET_MODE (`exp'), but
417 if `exp' is a CONST_INT and has no machine mode then the `mode'
418 field is the mode it was being used as. Each constant is
419 recorded separately for each mode it is used with. */
422 struct table_elt
424 rtx exp;
425 struct table_elt *next_same_hash;
426 struct table_elt *prev_same_hash;
427 struct table_elt *next_same_value;
428 struct table_elt *prev_same_value;
429 struct table_elt *first_same_value;
430 struct table_elt *related_value;
431 int cost;
432 enum machine_mode mode;
433 char in_memory;
434 char in_struct;
435 char is_const;
436 char flag;
439 /* We don't want a lot of buckets, because we rarely have very many
440 things stored in the hash table, and a lot of buckets slows
441 down a lot of loops that happen frequently. */
442 #define NBUCKETS 31
444 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
445 register (hard registers may require `do_not_record' to be set). */
447 #define HASH(X, M) \
448 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
449 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
450 : canon_hash (X, M) % NBUCKETS)
452 /* Determine whether register number N is considered a fixed register for CSE.
453 It is desirable to replace other regs with fixed regs, to reduce need for
454 non-fixed hard regs.
455 A reg wins if it is either the frame pointer or designated as fixed,
456 but not if it is an overlapping register. */
457 #ifdef OVERLAPPING_REGNO_P
458 #define FIXED_REGNO_P(N) \
459 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
460 || fixed_regs[N] || global_regs[N]) \
461 && ! OVERLAPPING_REGNO_P ((N)))
462 #else
463 #define FIXED_REGNO_P(N) \
464 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
465 || fixed_regs[N] || global_regs[N])
466 #endif
468 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
469 hard registers and pointers into the frame are the cheapest with a cost
470 of 0. Next come pseudos with a cost of one and other hard registers with
471 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473 #define CHEAP_REGNO(N) \
474 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
475 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
476 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
477 || ((N) < FIRST_PSEUDO_REGISTER \
478 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
480 /* A register is cheap if it is a user variable assigned to the register
481 or if its register number always corresponds to a cheap register. */
483 #define CHEAP_REG(N) \
484 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
485 || CHEAP_REGNO (REGNO (N)))
487 #define COST(X) \
488 (GET_CODE (X) == REG \
489 ? (CHEAP_REG (X) ? 0 \
490 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
491 : 2) \
492 : notreg_cost(X))
494 /* Determine if the quantity number for register X represents a valid index
495 into the `qty_...' variables. */
497 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
499 static struct table_elt *table[NBUCKETS];
501 /* Chain of `struct table_elt's made so far for this function
502 but currently removed from the table. */
504 static struct table_elt *free_element_chain;
506 /* Number of `struct table_elt' structures made so far for this function. */
508 static int n_elements_made;
510 /* Maximum value `n_elements_made' has had so far in this compilation
511 for functions previously processed. */
513 static int max_elements_made;
515 /* Surviving equivalence class when two equivalence classes are merged
516 by recording the effects of a jump in the last insn. Zero if the
517 last insn was not a conditional jump. */
519 static struct table_elt *last_jump_equiv_class;
521 /* Set to the cost of a constant pool reference if one was found for a
522 symbolic constant. If this was found, it means we should try to
523 convert constants into constant pool entries if they don't fit in
524 the insn. */
526 static int constant_pool_entries_cost;
528 /* Define maximum length of a branch path. */
530 #define PATHLENGTH 10
532 /* This data describes a block that will be processed by cse_basic_block. */
534 struct cse_basic_block_data {
535 /* Lowest CUID value of insns in block. */
536 int low_cuid;
537 /* Highest CUID value of insns in block. */
538 int high_cuid;
539 /* Total number of SETs in block. */
540 int nsets;
541 /* Last insn in the block. */
542 rtx last;
543 /* Size of current branch path, if any. */
544 int path_size;
545 /* Current branch path, indicating which branches will be taken. */
546 struct branch_path {
547 /* The branch insn. */
548 rtx branch;
549 /* Whether it should be taken or not. AROUND is the same as taken
550 except that it is used when the destination label is not preceded
551 by a BARRIER. */
552 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
553 } path[PATHLENGTH];
556 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
557 virtual regs here because the simplify_*_operation routines are called
558 by integrate.c, which is called before virtual register instantiation. */
560 #define FIXED_BASE_PLUS_P(X) \
561 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
562 || (X) == arg_pointer_rtx \
563 || (X) == virtual_stack_vars_rtx \
564 || (X) == virtual_incoming_args_rtx \
565 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
566 && (XEXP (X, 0) == frame_pointer_rtx \
567 || XEXP (X, 0) == hard_frame_pointer_rtx \
568 || XEXP (X, 0) == arg_pointer_rtx \
569 || XEXP (X, 0) == virtual_stack_vars_rtx \
570 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
571 || GET_CODE (X) == ADDRESSOF)
573 /* Similar, but also allows reference to the stack pointer.
575 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
576 arg_pointer_rtx by itself is nonzero, because on at least one machine,
577 the i960, the arg pointer is zero when it is unused. */
579 #define NONZERO_BASE_PLUS_P(X) \
580 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
581 || (X) == virtual_stack_vars_rtx \
582 || (X) == virtual_incoming_args_rtx \
583 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
584 && (XEXP (X, 0) == frame_pointer_rtx \
585 || XEXP (X, 0) == hard_frame_pointer_rtx \
586 || XEXP (X, 0) == arg_pointer_rtx \
587 || XEXP (X, 0) == virtual_stack_vars_rtx \
588 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
589 || (X) == stack_pointer_rtx \
590 || (X) == virtual_stack_dynamic_rtx \
591 || (X) == virtual_outgoing_args_rtx \
592 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
593 && (XEXP (X, 0) == stack_pointer_rtx \
594 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
595 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
596 || GET_CODE (X) == ADDRESSOF)
598 static int notreg_cost PROTO((rtx));
599 static void new_basic_block PROTO((void));
600 static void make_new_qty PROTO((int));
601 static void make_regs_eqv PROTO((int, int));
602 static void delete_reg_equiv PROTO((int));
603 static int mention_regs PROTO((rtx));
604 static int insert_regs PROTO((rtx, struct table_elt *, int));
605 static void free_element PROTO((struct table_elt *));
606 static void remove_from_table PROTO((struct table_elt *, unsigned));
607 static struct table_elt *get_element PROTO((void));
608 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
609 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
610 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
611 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
612 enum machine_mode));
613 static void merge_equiv_classes PROTO((struct table_elt *,
614 struct table_elt *));
615 static void invalidate PROTO((rtx, enum machine_mode));
616 static int cse_rtx_varies_p PROTO((rtx));
617 static void remove_invalid_refs PROTO((int));
618 static void rehash_using_reg PROTO((rtx));
619 static void invalidate_memory PROTO((void));
620 static void invalidate_for_call PROTO((void));
621 static rtx use_related_value PROTO((rtx, struct table_elt *));
622 static unsigned canon_hash PROTO((rtx, enum machine_mode));
623 static unsigned safe_hash PROTO((rtx, enum machine_mode));
624 static int exp_equiv_p PROTO((rtx, rtx, int, int));
625 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
626 HOST_WIDE_INT *,
627 HOST_WIDE_INT *));
628 static int refers_to_p PROTO((rtx, rtx));
629 static rtx canon_reg PROTO((rtx, rtx));
630 static void find_best_addr PROTO((rtx, rtx *));
631 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
632 enum machine_mode *,
633 enum machine_mode *));
634 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
635 rtx, rtx));
636 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
637 rtx, rtx));
638 static rtx fold_rtx PROTO((rtx, rtx));
639 static rtx equiv_constant PROTO((rtx));
640 static void record_jump_equiv PROTO((rtx, int));
641 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
642 rtx, rtx, int));
643 static void cse_insn PROTO((rtx, int));
644 static int note_mem_written PROTO((rtx));
645 static void invalidate_from_clobbers PROTO((rtx));
646 static rtx cse_process_notes PROTO((rtx, rtx));
647 static void cse_around_loop PROTO((rtx));
648 static void invalidate_skipped_set PROTO((rtx, rtx));
649 static void invalidate_skipped_block PROTO((rtx));
650 static void cse_check_loop_start PROTO((rtx, rtx));
651 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
652 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
653 static void count_reg_usage PROTO((rtx, int *, rtx, int));
655 extern int rtx_equal_function_value_matters;
657 /* Return an estimate of the cost of computing rtx X.
658 One use is in cse, to decide which expression to keep in the hash table.
659 Another is in rtl generation, to pick the cheapest way to multiply.
660 Other uses like the latter are expected in the future. */
662 /* Internal function, to compute cost when X is not a register; called
663 from COST macro to keep it simple. */
665 static int
666 notreg_cost (x)
667 rtx x;
669 return ((GET_CODE (x) == SUBREG
670 && GET_CODE (SUBREG_REG (x)) == REG
671 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
672 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
673 && (GET_MODE_SIZE (GET_MODE (x))
674 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
675 && subreg_lowpart_p (x)
676 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
677 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
678 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
679 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
680 : 2))
681 : rtx_cost (x, SET) * 2);
684 /* Return the right cost to give to an operation
685 to make the cost of the corresponding register-to-register instruction
686 N times that of a fast register-to-register instruction. */
688 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
691 rtx_cost (x, outer_code)
692 rtx x;
693 enum rtx_code outer_code;
695 register int i, j;
696 register enum rtx_code code;
697 register char *fmt;
698 register int total;
700 if (x == 0)
701 return 0;
703 /* Compute the default costs of certain things.
704 Note that RTX_COSTS can override the defaults. */
706 code = GET_CODE (x);
707 switch (code)
709 case MULT:
710 /* Count multiplication by 2**n as a shift,
711 because if we are considering it, we would output it as a shift. */
712 if (GET_CODE (XEXP (x, 1)) == CONST_INT
713 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
714 total = 2;
715 else
716 total = COSTS_N_INSNS (5);
717 break;
718 case DIV:
719 case UDIV:
720 case MOD:
721 case UMOD:
722 total = COSTS_N_INSNS (7);
723 break;
724 case USE:
725 /* Used in loop.c and combine.c as a marker. */
726 total = 0;
727 break;
728 case ASM_OPERANDS:
729 /* We don't want these to be used in substitutions because
730 we have no way of validating the resulting insn. So assign
731 anything containing an ASM_OPERANDS a very high cost. */
732 total = 1000;
733 break;
734 default:
735 total = 2;
738 switch (code)
740 case REG:
741 return ! CHEAP_REG (x);
743 case SUBREG:
744 /* If we can't tie these modes, make this expensive. The larger
745 the mode, the more expensive it is. */
746 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
747 return COSTS_N_INSNS (2
748 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
749 return 2;
750 #ifdef RTX_COSTS
751 RTX_COSTS (x, code, outer_code);
752 #endif
753 #ifdef CONST_COSTS
754 CONST_COSTS (x, code, outer_code);
755 #endif
757 default:
758 #ifdef DEFAULT_RTX_COSTS
759 DEFAULT_RTX_COSTS(x, code, outer_code);
760 #endif
761 break;
764 /* Sum the costs of the sub-rtx's, plus cost of this operation,
765 which is already in total. */
767 fmt = GET_RTX_FORMAT (code);
768 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
769 if (fmt[i] == 'e')
770 total += rtx_cost (XEXP (x, i), code);
771 else if (fmt[i] == 'E')
772 for (j = 0; j < XVECLEN (x, i); j++)
773 total += rtx_cost (XVECEXP (x, i, j), code);
775 return total;
778 /* Clear the hash table and initialize each register with its own quantity,
779 for a new basic block. */
781 static void
782 new_basic_block ()
784 register int i;
786 next_qty = max_reg;
788 bzero ((char *) reg_tick, max_reg * sizeof (int));
790 bcopy ((char *) all_minus_one, (char *) reg_in_table,
791 max_reg * sizeof (int));
792 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
793 CLEAR_HARD_REG_SET (hard_regs_in_table);
795 /* The per-quantity values used to be initialized here, but it is
796 much faster to initialize each as it is made in `make_new_qty'. */
798 for (i = 0; i < NBUCKETS; i++)
800 register struct table_elt *this, *next;
801 for (this = table[i]; this; this = next)
803 next = this->next_same_hash;
804 free_element (this);
808 bzero ((char *) table, sizeof table);
810 prev_insn = 0;
812 #ifdef HAVE_cc0
813 prev_insn_cc0 = 0;
814 #endif
817 /* Say that register REG contains a quantity not in any register before
818 and initialize that quantity. */
820 static void
821 make_new_qty (reg)
822 register int reg;
824 register int q;
826 if (next_qty >= max_qty)
827 abort ();
829 q = reg_qty[reg] = next_qty++;
830 qty_first_reg[q] = reg;
831 qty_last_reg[q] = reg;
832 qty_const[q] = qty_const_insn[q] = 0;
833 qty_comparison_code[q] = UNKNOWN;
835 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
838 /* Make reg NEW equivalent to reg OLD.
839 OLD is not changing; NEW is. */
841 static void
842 make_regs_eqv (new, old)
843 register int new, old;
845 register int lastr, firstr;
846 register int q = reg_qty[old];
848 /* Nothing should become eqv until it has a "non-invalid" qty number. */
849 if (! REGNO_QTY_VALID_P (old))
850 abort ();
852 reg_qty[new] = q;
853 firstr = qty_first_reg[q];
854 lastr = qty_last_reg[q];
856 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
857 hard regs. Among pseudos, if NEW will live longer than any other reg
858 of the same qty, and that is beyond the current basic block,
859 make it the new canonical replacement for this qty. */
860 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
861 /* Certain fixed registers might be of the class NO_REGS. This means
862 that not only can they not be allocated by the compiler, but
863 they cannot be used in substitutions or canonicalizations
864 either. */
865 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
866 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
867 || (new >= FIRST_PSEUDO_REGISTER
868 && (firstr < FIRST_PSEUDO_REGISTER
869 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
870 || (uid_cuid[REGNO_FIRST_UID (new)]
871 < cse_basic_block_start))
872 && (uid_cuid[REGNO_LAST_UID (new)]
873 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
875 reg_prev_eqv[firstr] = new;
876 reg_next_eqv[new] = firstr;
877 reg_prev_eqv[new] = -1;
878 qty_first_reg[q] = new;
880 else
882 /* If NEW is a hard reg (known to be non-fixed), insert at end.
883 Otherwise, insert before any non-fixed hard regs that are at the
884 end. Registers of class NO_REGS cannot be used as an
885 equivalent for anything. */
886 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
887 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
888 && new >= FIRST_PSEUDO_REGISTER)
889 lastr = reg_prev_eqv[lastr];
890 reg_next_eqv[new] = reg_next_eqv[lastr];
891 if (reg_next_eqv[lastr] >= 0)
892 reg_prev_eqv[reg_next_eqv[lastr]] = new;
893 else
894 qty_last_reg[q] = new;
895 reg_next_eqv[lastr] = new;
896 reg_prev_eqv[new] = lastr;
900 /* Remove REG from its equivalence class. */
902 static void
903 delete_reg_equiv (reg)
904 register int reg;
906 register int q = reg_qty[reg];
907 register int p, n;
909 /* If invalid, do nothing. */
910 if (q == reg)
911 return;
913 p = reg_prev_eqv[reg];
914 n = reg_next_eqv[reg];
916 if (n != -1)
917 reg_prev_eqv[n] = p;
918 else
919 qty_last_reg[q] = p;
920 if (p != -1)
921 reg_next_eqv[p] = n;
922 else
923 qty_first_reg[q] = n;
925 reg_qty[reg] = reg;
928 /* Remove any invalid expressions from the hash table
929 that refer to any of the registers contained in expression X.
931 Make sure that newly inserted references to those registers
932 as subexpressions will be considered valid.
934 mention_regs is not called when a register itself
935 is being stored in the table.
937 Return 1 if we have done something that may have changed the hash code
938 of X. */
940 static int
941 mention_regs (x)
942 rtx x;
944 register enum rtx_code code;
945 register int i, j;
946 register char *fmt;
947 register int changed = 0;
949 if (x == 0)
950 return 0;
952 code = GET_CODE (x);
953 if (code == REG)
955 register int regno = REGNO (x);
956 register int endregno
957 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
958 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
959 int i;
961 for (i = regno; i < endregno; i++)
963 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
964 remove_invalid_refs (i);
966 reg_in_table[i] = reg_tick[i];
969 return 0;
972 /* If X is a comparison or a COMPARE and either operand is a register
973 that does not have a quantity, give it one. This is so that a later
974 call to record_jump_equiv won't cause X to be assigned a different
975 hash code and not found in the table after that call.
977 It is not necessary to do this here, since rehash_using_reg can
978 fix up the table later, but doing this here eliminates the need to
979 call that expensive function in the most common case where the only
980 use of the register is in the comparison. */
982 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
984 if (GET_CODE (XEXP (x, 0)) == REG
985 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
986 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
988 rehash_using_reg (XEXP (x, 0));
989 changed = 1;
992 if (GET_CODE (XEXP (x, 1)) == REG
993 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
994 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
996 rehash_using_reg (XEXP (x, 1));
997 changed = 1;
1001 fmt = GET_RTX_FORMAT (code);
1002 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1003 if (fmt[i] == 'e')
1004 changed |= mention_regs (XEXP (x, i));
1005 else if (fmt[i] == 'E')
1006 for (j = 0; j < XVECLEN (x, i); j++)
1007 changed |= mention_regs (XVECEXP (x, i, j));
1009 return changed;
1012 /* Update the register quantities for inserting X into the hash table
1013 with a value equivalent to CLASSP.
1014 (If the class does not contain a REG, it is irrelevant.)
1015 If MODIFIED is nonzero, X is a destination; it is being modified.
1016 Note that delete_reg_equiv should be called on a register
1017 before insert_regs is done on that register with MODIFIED != 0.
1019 Nonzero value means that elements of reg_qty have changed
1020 so X's hash code may be different. */
1022 static int
1023 insert_regs (x, classp, modified)
1024 rtx x;
1025 struct table_elt *classp;
1026 int modified;
1028 if (GET_CODE (x) == REG)
1030 register int regno = REGNO (x);
1032 /* If REGNO is in the equivalence table already but is of the
1033 wrong mode for that equivalence, don't do anything here. */
1035 if (REGNO_QTY_VALID_P (regno)
1036 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1037 return 0;
1039 if (modified || ! REGNO_QTY_VALID_P (regno))
1041 if (classp)
1042 for (classp = classp->first_same_value;
1043 classp != 0;
1044 classp = classp->next_same_value)
1045 if (GET_CODE (classp->exp) == REG
1046 && GET_MODE (classp->exp) == GET_MODE (x))
1048 make_regs_eqv (regno, REGNO (classp->exp));
1049 return 1;
1052 make_new_qty (regno);
1053 qty_mode[reg_qty[regno]] = GET_MODE (x);
1054 return 1;
1057 return 0;
1060 /* If X is a SUBREG, we will likely be inserting the inner register in the
1061 table. If that register doesn't have an assigned quantity number at
1062 this point but does later, the insertion that we will be doing now will
1063 not be accessible because its hash code will have changed. So assign
1064 a quantity number now. */
1066 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1067 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1069 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1070 mention_regs (SUBREG_REG (x));
1071 return 1;
1073 else
1074 return mention_regs (x);
1077 /* Look in or update the hash table. */
1079 /* Put the element ELT on the list of free elements. */
1081 static void
1082 free_element (elt)
1083 struct table_elt *elt;
1085 elt->next_same_hash = free_element_chain;
1086 free_element_chain = elt;
1089 /* Return an element that is free for use. */
1091 static struct table_elt *
1092 get_element ()
1094 struct table_elt *elt = free_element_chain;
1095 if (elt)
1097 free_element_chain = elt->next_same_hash;
1098 return elt;
1100 n_elements_made++;
1101 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1104 /* Remove table element ELT from use in the table.
1105 HASH is its hash code, made using the HASH macro.
1106 It's an argument because often that is known in advance
1107 and we save much time not recomputing it. */
1109 static void
1110 remove_from_table (elt, hash)
1111 register struct table_elt *elt;
1112 unsigned hash;
1114 if (elt == 0)
1115 return;
1117 /* Mark this element as removed. See cse_insn. */
1118 elt->first_same_value = 0;
1120 /* Remove the table element from its equivalence class. */
1123 register struct table_elt *prev = elt->prev_same_value;
1124 register struct table_elt *next = elt->next_same_value;
1126 if (next) next->prev_same_value = prev;
1128 if (prev)
1129 prev->next_same_value = next;
1130 else
1132 register struct table_elt *newfirst = next;
1133 while (next)
1135 next->first_same_value = newfirst;
1136 next = next->next_same_value;
1141 /* Remove the table element from its hash bucket. */
1144 register struct table_elt *prev = elt->prev_same_hash;
1145 register struct table_elt *next = elt->next_same_hash;
1147 if (next) next->prev_same_hash = prev;
1149 if (prev)
1150 prev->next_same_hash = next;
1151 else if (table[hash] == elt)
1152 table[hash] = next;
1153 else
1155 /* This entry is not in the proper hash bucket. This can happen
1156 when two classes were merged by `merge_equiv_classes'. Search
1157 for the hash bucket that it heads. This happens only very
1158 rarely, so the cost is acceptable. */
1159 for (hash = 0; hash < NBUCKETS; hash++)
1160 if (table[hash] == elt)
1161 table[hash] = next;
1165 /* Remove the table element from its related-value circular chain. */
1167 if (elt->related_value != 0 && elt->related_value != elt)
1169 register struct table_elt *p = elt->related_value;
1170 while (p->related_value != elt)
1171 p = p->related_value;
1172 p->related_value = elt->related_value;
1173 if (p->related_value == p)
1174 p->related_value = 0;
1177 free_element (elt);
1180 /* Look up X in the hash table and return its table element,
1181 or 0 if X is not in the table.
1183 MODE is the machine-mode of X, or if X is an integer constant
1184 with VOIDmode then MODE is the mode with which X will be used.
1186 Here we are satisfied to find an expression whose tree structure
1187 looks like X. */
1189 static struct table_elt *
1190 lookup (x, hash, mode)
1191 rtx x;
1192 unsigned hash;
1193 enum machine_mode mode;
1195 register struct table_elt *p;
1197 for (p = table[hash]; p; p = p->next_same_hash)
1198 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1199 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1200 return p;
1202 return 0;
1205 /* Like `lookup' but don't care whether the table element uses invalid regs.
1206 Also ignore discrepancies in the machine mode of a register. */
1208 static struct table_elt *
1209 lookup_for_remove (x, hash, mode)
1210 rtx x;
1211 unsigned hash;
1212 enum machine_mode mode;
1214 register struct table_elt *p;
1216 if (GET_CODE (x) == REG)
1218 int regno = REGNO (x);
1219 /* Don't check the machine mode when comparing registers;
1220 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1221 for (p = table[hash]; p; p = p->next_same_hash)
1222 if (GET_CODE (p->exp) == REG
1223 && REGNO (p->exp) == regno)
1224 return p;
1226 else
1228 for (p = table[hash]; p; p = p->next_same_hash)
1229 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1230 return p;
1233 return 0;
1236 /* Look for an expression equivalent to X and with code CODE.
1237 If one is found, return that expression. */
1239 static rtx
1240 lookup_as_function (x, code)
1241 rtx x;
1242 enum rtx_code code;
1244 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1245 GET_MODE (x));
1246 if (p == 0)
1247 return 0;
1249 for (p = p->first_same_value; p; p = p->next_same_value)
1251 if (GET_CODE (p->exp) == code
1252 /* Make sure this is a valid entry in the table. */
1253 && exp_equiv_p (p->exp, p->exp, 1, 0))
1254 return p->exp;
1257 return 0;
1260 /* Insert X in the hash table, assuming HASH is its hash code
1261 and CLASSP is an element of the class it should go in
1262 (or 0 if a new class should be made).
1263 It is inserted at the proper position to keep the class in
1264 the order cheapest first.
1266 MODE is the machine-mode of X, or if X is an integer constant
1267 with VOIDmode then MODE is the mode with which X will be used.
1269 For elements of equal cheapness, the most recent one
1270 goes in front, except that the first element in the list
1271 remains first unless a cheaper element is added. The order of
1272 pseudo-registers does not matter, as canon_reg will be called to
1273 find the cheapest when a register is retrieved from the table.
1275 The in_memory field in the hash table element is set to 0.
1276 The caller must set it nonzero if appropriate.
1278 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1279 and if insert_regs returns a nonzero value
1280 you must then recompute its hash code before calling here.
1282 If necessary, update table showing constant values of quantities. */
1284 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1286 static struct table_elt *
1287 insert (x, classp, hash, mode)
1288 register rtx x;
1289 register struct table_elt *classp;
1290 unsigned hash;
1291 enum machine_mode mode;
1293 register struct table_elt *elt;
1295 /* If X is a register and we haven't made a quantity for it,
1296 something is wrong. */
1297 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1298 abort ();
1300 /* If X is a hard register, show it is being put in the table. */
1301 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1303 int regno = REGNO (x);
1304 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1305 int i;
1307 for (i = regno; i < endregno; i++)
1308 SET_HARD_REG_BIT (hard_regs_in_table, i);
1311 /* If X is a label, show we recorded it. */
1312 if (GET_CODE (x) == LABEL_REF
1313 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1314 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1315 recorded_label_ref = 1;
1317 /* Put an element for X into the right hash bucket. */
1319 elt = get_element ();
1320 elt->exp = x;
1321 elt->cost = COST (x);
1322 elt->next_same_value = 0;
1323 elt->prev_same_value = 0;
1324 elt->next_same_hash = table[hash];
1325 elt->prev_same_hash = 0;
1326 elt->related_value = 0;
1327 elt->in_memory = 0;
1328 elt->mode = mode;
1329 elt->is_const = (CONSTANT_P (x)
1330 /* GNU C++ takes advantage of this for `this'
1331 (and other const values). */
1332 || (RTX_UNCHANGING_P (x)
1333 && GET_CODE (x) == REG
1334 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1335 || FIXED_BASE_PLUS_P (x));
1337 if (table[hash])
1338 table[hash]->prev_same_hash = elt;
1339 table[hash] = elt;
1341 /* Put it into the proper value-class. */
1342 if (classp)
1344 classp = classp->first_same_value;
1345 if (CHEAPER (elt, classp))
1346 /* Insert at the head of the class */
1348 register struct table_elt *p;
1349 elt->next_same_value = classp;
1350 classp->prev_same_value = elt;
1351 elt->first_same_value = elt;
1353 for (p = classp; p; p = p->next_same_value)
1354 p->first_same_value = elt;
1356 else
1358 /* Insert not at head of the class. */
1359 /* Put it after the last element cheaper than X. */
1360 register struct table_elt *p, *next;
1361 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1362 p = next);
1363 /* Put it after P and before NEXT. */
1364 elt->next_same_value = next;
1365 if (next)
1366 next->prev_same_value = elt;
1367 elt->prev_same_value = p;
1368 p->next_same_value = elt;
1369 elt->first_same_value = classp;
1372 else
1373 elt->first_same_value = elt;
1375 /* If this is a constant being set equivalent to a register or a register
1376 being set equivalent to a constant, note the constant equivalence.
1378 If this is a constant, it cannot be equivalent to a different constant,
1379 and a constant is the only thing that can be cheaper than a register. So
1380 we know the register is the head of the class (before the constant was
1381 inserted).
1383 If this is a register that is not already known equivalent to a
1384 constant, we must check the entire class.
1386 If this is a register that is already known equivalent to an insn,
1387 update `qty_const_insn' to show that `this_insn' is the latest
1388 insn making that quantity equivalent to the constant. */
1390 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1391 && GET_CODE (x) != REG)
1393 qty_const[reg_qty[REGNO (classp->exp)]]
1394 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1395 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1398 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1399 && ! elt->is_const)
1401 register struct table_elt *p;
1403 for (p = classp; p != 0; p = p->next_same_value)
1405 if (p->is_const && GET_CODE (p->exp) != REG)
1407 qty_const[reg_qty[REGNO (x)]]
1408 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1409 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1410 break;
1415 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1416 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1417 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1419 /* If this is a constant with symbolic value,
1420 and it has a term with an explicit integer value,
1421 link it up with related expressions. */
1422 if (GET_CODE (x) == CONST)
1424 rtx subexp = get_related_value (x);
1425 unsigned subhash;
1426 struct table_elt *subelt, *subelt_prev;
1428 if (subexp != 0)
1430 /* Get the integer-free subexpression in the hash table. */
1431 subhash = safe_hash (subexp, mode) % NBUCKETS;
1432 subelt = lookup (subexp, subhash, mode);
1433 if (subelt == 0)
1434 subelt = insert (subexp, NULL_PTR, subhash, mode);
1435 /* Initialize SUBELT's circular chain if it has none. */
1436 if (subelt->related_value == 0)
1437 subelt->related_value = subelt;
1438 /* Find the element in the circular chain that precedes SUBELT. */
1439 subelt_prev = subelt;
1440 while (subelt_prev->related_value != subelt)
1441 subelt_prev = subelt_prev->related_value;
1442 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1443 This way the element that follows SUBELT is the oldest one. */
1444 elt->related_value = subelt_prev->related_value;
1445 subelt_prev->related_value = elt;
1449 return elt;
1452 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1453 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1454 the two classes equivalent.
1456 CLASS1 will be the surviving class; CLASS2 should not be used after this
1457 call.
1459 Any invalid entries in CLASS2 will not be copied. */
1461 static void
1462 merge_equiv_classes (class1, class2)
1463 struct table_elt *class1, *class2;
1465 struct table_elt *elt, *next, *new;
1467 /* Ensure we start with the head of the classes. */
1468 class1 = class1->first_same_value;
1469 class2 = class2->first_same_value;
1471 /* If they were already equal, forget it. */
1472 if (class1 == class2)
1473 return;
1475 for (elt = class2; elt; elt = next)
1477 unsigned hash;
1478 rtx exp = elt->exp;
1479 enum machine_mode mode = elt->mode;
1481 next = elt->next_same_value;
1483 /* Remove old entry, make a new one in CLASS1's class.
1484 Don't do this for invalid entries as we cannot find their
1485 hash code (it also isn't necessary). */
1486 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1488 hash_arg_in_memory = 0;
1489 hash_arg_in_struct = 0;
1490 hash = HASH (exp, mode);
1492 if (GET_CODE (exp) == REG)
1493 delete_reg_equiv (REGNO (exp));
1495 remove_from_table (elt, hash);
1497 if (insert_regs (exp, class1, 0))
1499 rehash_using_reg (exp);
1500 hash = HASH (exp, mode);
1502 new = insert (exp, class1, hash, mode);
1503 new->in_memory = hash_arg_in_memory;
1504 new->in_struct = hash_arg_in_struct;
1509 /* Remove from the hash table, or mark as invalid,
1510 all expressions whose values could be altered by storing in X.
1511 X is a register, a subreg, or a memory reference with nonvarying address
1512 (because, when a memory reference with a varying address is stored in,
1513 all memory references are removed by invalidate_memory
1514 so specific invalidation is superfluous).
1515 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1516 instead of just the amount indicated by the mode of X. This is only used
1517 for bitfield stores into memory.
1519 A nonvarying address may be just a register or just
1520 a symbol reference, or it may be either of those plus
1521 a numeric offset. */
1523 static void
1524 invalidate (x, full_mode)
1525 rtx x;
1526 enum machine_mode full_mode;
1528 register int i;
1529 register struct table_elt *p;
1531 /* If X is a register, dependencies on its contents
1532 are recorded through the qty number mechanism.
1533 Just change the qty number of the register,
1534 mark it as invalid for expressions that refer to it,
1535 and remove it itself. */
1537 if (GET_CODE (x) == REG)
1539 register int regno = REGNO (x);
1540 register unsigned hash = HASH (x, GET_MODE (x));
1542 /* Remove REGNO from any quantity list it might be on and indicate
1543 that it's value might have changed. If it is a pseudo, remove its
1544 entry from the hash table.
1546 For a hard register, we do the first two actions above for any
1547 additional hard registers corresponding to X. Then, if any of these
1548 registers are in the table, we must remove any REG entries that
1549 overlap these registers. */
1551 delete_reg_equiv (regno);
1552 reg_tick[regno]++;
1554 if (regno >= FIRST_PSEUDO_REGISTER)
1556 /* Because a register can be referenced in more than one mode,
1557 we might have to remove more than one table entry. */
1559 struct table_elt *elt;
1561 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1562 remove_from_table (elt, hash);
1564 else
1566 HOST_WIDE_INT in_table
1567 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1568 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1569 int tregno, tendregno;
1570 register struct table_elt *p, *next;
1572 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1574 for (i = regno + 1; i < endregno; i++)
1576 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1577 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1578 delete_reg_equiv (i);
1579 reg_tick[i]++;
1582 if (in_table)
1583 for (hash = 0; hash < NBUCKETS; hash++)
1584 for (p = table[hash]; p; p = next)
1586 next = p->next_same_hash;
1588 if (GET_CODE (p->exp) != REG
1589 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1590 continue;
1592 tregno = REGNO (p->exp);
1593 tendregno
1594 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1595 if (tendregno > regno && tregno < endregno)
1596 remove_from_table (p, hash);
1600 return;
1603 if (GET_CODE (x) == SUBREG)
1605 if (GET_CODE (SUBREG_REG (x)) != REG)
1606 abort ();
1607 invalidate (SUBREG_REG (x), VOIDmode);
1608 return;
1611 /* X is not a register; it must be a memory reference with
1612 a nonvarying address. Remove all hash table elements
1613 that refer to overlapping pieces of memory. */
1615 if (GET_CODE (x) != MEM)
1616 abort ();
1618 if (full_mode == VOIDmode)
1619 full_mode = GET_MODE (x);
1621 for (i = 0; i < NBUCKETS; i++)
1623 register struct table_elt *next;
1624 for (p = table[i]; p; p = next)
1626 next = p->next_same_hash;
1627 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1628 than checking all the aliases). */
1629 if (p->in_memory
1630 && (GET_CODE (p->exp) != MEM
1631 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1632 remove_from_table (p, i);
1637 /* Remove all expressions that refer to register REGNO,
1638 since they are already invalid, and we are about to
1639 mark that register valid again and don't want the old
1640 expressions to reappear as valid. */
1642 static void
1643 remove_invalid_refs (regno)
1644 int regno;
1646 register int i;
1647 register struct table_elt *p, *next;
1649 for (i = 0; i < NBUCKETS; i++)
1650 for (p = table[i]; p; p = next)
1652 next = p->next_same_hash;
1653 if (GET_CODE (p->exp) != REG
1654 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1655 remove_from_table (p, i);
1659 /* Recompute the hash codes of any valid entries in the hash table that
1660 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1662 This is called when we make a jump equivalence. */
1664 static void
1665 rehash_using_reg (x)
1666 rtx x;
1668 int i;
1669 struct table_elt *p, *next;
1670 unsigned hash;
1672 if (GET_CODE (x) == SUBREG)
1673 x = SUBREG_REG (x);
1675 /* If X is not a register or if the register is known not to be in any
1676 valid entries in the table, we have no work to do. */
1678 if (GET_CODE (x) != REG
1679 || reg_in_table[REGNO (x)] < 0
1680 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1681 return;
1683 /* Scan all hash chains looking for valid entries that mention X.
1684 If we find one and it is in the wrong hash chain, move it. We can skip
1685 objects that are registers, since they are handled specially. */
1687 for (i = 0; i < NBUCKETS; i++)
1688 for (p = table[i]; p; p = next)
1690 next = p->next_same_hash;
1691 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1692 && exp_equiv_p (p->exp, p->exp, 1, 0)
1693 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1695 if (p->next_same_hash)
1696 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1698 if (p->prev_same_hash)
1699 p->prev_same_hash->next_same_hash = p->next_same_hash;
1700 else
1701 table[i] = p->next_same_hash;
1703 p->next_same_hash = table[hash];
1704 p->prev_same_hash = 0;
1705 if (table[hash])
1706 table[hash]->prev_same_hash = p;
1707 table[hash] = p;
1712 /* Remove from the hash table any expression that is a call-clobbered
1713 register. Also update their TICK values. */
1715 static void
1716 invalidate_for_call ()
1718 int regno, endregno;
1719 int i;
1720 unsigned hash;
1721 struct table_elt *p, *next;
1722 int in_table = 0;
1724 /* Go through all the hard registers. For each that is clobbered in
1725 a CALL_INSN, remove the register from quantity chains and update
1726 reg_tick if defined. Also see if any of these registers is currently
1727 in the table. */
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1732 delete_reg_equiv (regno);
1733 if (reg_tick[regno] >= 0)
1734 reg_tick[regno]++;
1736 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1739 /* In the case where we have no call-clobbered hard registers in the
1740 table, we are done. Otherwise, scan the table and remove any
1741 entry that overlaps a call-clobbered register. */
1743 if (in_table)
1744 for (hash = 0; hash < NBUCKETS; hash++)
1745 for (p = table[hash]; p; p = next)
1747 next = p->next_same_hash;
1749 if (p->in_memory)
1751 remove_from_table (p, hash);
1752 continue;
1755 if (GET_CODE (p->exp) != REG
1756 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1757 continue;
1759 regno = REGNO (p->exp);
1760 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1762 for (i = regno; i < endregno; i++)
1763 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1765 remove_from_table (p, hash);
1766 break;
1771 /* Given an expression X of type CONST,
1772 and ELT which is its table entry (or 0 if it
1773 is not in the hash table),
1774 return an alternate expression for X as a register plus integer.
1775 If none can be found, return 0. */
1777 static rtx
1778 use_related_value (x, elt)
1779 rtx x;
1780 struct table_elt *elt;
1782 register struct table_elt *relt = 0;
1783 register struct table_elt *p, *q;
1784 HOST_WIDE_INT offset;
1786 /* First, is there anything related known?
1787 If we have a table element, we can tell from that.
1788 Otherwise, must look it up. */
1790 if (elt != 0 && elt->related_value != 0)
1791 relt = elt;
1792 else if (elt == 0 && GET_CODE (x) == CONST)
1794 rtx subexp = get_related_value (x);
1795 if (subexp != 0)
1796 relt = lookup (subexp,
1797 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1798 GET_MODE (subexp));
1801 if (relt == 0)
1802 return 0;
1804 /* Search all related table entries for one that has an
1805 equivalent register. */
1807 p = relt;
1808 while (1)
1810 /* This loop is strange in that it is executed in two different cases.
1811 The first is when X is already in the table. Then it is searching
1812 the RELATED_VALUE list of X's class (RELT). The second case is when
1813 X is not in the table. Then RELT points to a class for the related
1814 value.
1816 Ensure that, whatever case we are in, that we ignore classes that have
1817 the same value as X. */
1819 if (rtx_equal_p (x, p->exp))
1820 q = 0;
1821 else
1822 for (q = p->first_same_value; q; q = q->next_same_value)
1823 if (GET_CODE (q->exp) == REG)
1824 break;
1826 if (q)
1827 break;
1829 p = p->related_value;
1831 /* We went all the way around, so there is nothing to be found.
1832 Alternatively, perhaps RELT was in the table for some other reason
1833 and it has no related values recorded. */
1834 if (p == relt || p == 0)
1835 break;
1838 if (q == 0)
1839 return 0;
1841 offset = (get_integer_term (x) - get_integer_term (p->exp));
1842 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1843 return plus_constant (q->exp, offset);
1846 /* Hash an rtx. We are careful to make sure the value is never negative.
1847 Equivalent registers hash identically.
1848 MODE is used in hashing for CONST_INTs only;
1849 otherwise the mode of X is used.
1851 Store 1 in do_not_record if any subexpression is volatile.
1853 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1854 which does not have the RTX_UNCHANGING_P bit set.
1855 In this case, also store 1 in hash_arg_in_struct
1856 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1858 Note that cse_insn knows that the hash code of a MEM expression
1859 is just (int) MEM plus the hash code of the address. */
1861 static unsigned
1862 canon_hash (x, mode)
1863 rtx x;
1864 enum machine_mode mode;
1866 register int i, j;
1867 register unsigned hash = 0;
1868 register enum rtx_code code;
1869 register char *fmt;
1871 /* repeat is used to turn tail-recursion into iteration. */
1872 repeat:
1873 if (x == 0)
1874 return hash;
1876 code = GET_CODE (x);
1877 switch (code)
1879 case REG:
1881 register int regno = REGNO (x);
1883 /* On some machines, we can't record any non-fixed hard register,
1884 because extending its life will cause reload problems. We
1885 consider ap, fp, and sp to be fixed for this purpose.
1886 On all machines, we can't record any global registers. */
1888 if (regno < FIRST_PSEUDO_REGISTER
1889 && (global_regs[regno]
1890 || (SMALL_REGISTER_CLASSES
1891 && ! fixed_regs[regno]
1892 && regno != FRAME_POINTER_REGNUM
1893 && regno != HARD_FRAME_POINTER_REGNUM
1894 && regno != ARG_POINTER_REGNUM
1895 && regno != STACK_POINTER_REGNUM)))
1897 do_not_record = 1;
1898 return 0;
1900 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1901 return hash;
1904 case CONST_INT:
1906 unsigned HOST_WIDE_INT tem = INTVAL (x);
1907 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1908 return hash;
1911 case CONST_DOUBLE:
1912 /* This is like the general case, except that it only counts
1913 the integers representing the constant. */
1914 hash += (unsigned) code + (unsigned) GET_MODE (x);
1915 if (GET_MODE (x) != VOIDmode)
1916 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1918 unsigned tem = XINT (x, i);
1919 hash += tem;
1921 else
1922 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1923 + (unsigned) CONST_DOUBLE_HIGH (x));
1924 return hash;
1926 /* Assume there is only one rtx object for any given label. */
1927 case LABEL_REF:
1928 hash
1929 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
1930 return hash;
1932 case SYMBOL_REF:
1933 hash
1934 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
1935 return hash;
1937 case MEM:
1938 if (MEM_VOLATILE_P (x))
1940 do_not_record = 1;
1941 return 0;
1943 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
1945 hash_arg_in_memory = 1;
1946 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1948 /* Now that we have already found this special case,
1949 might as well speed it up as much as possible. */
1950 hash += (unsigned) MEM;
1951 x = XEXP (x, 0);
1952 goto repeat;
1954 case PRE_DEC:
1955 case PRE_INC:
1956 case POST_DEC:
1957 case POST_INC:
1958 case PC:
1959 case CC0:
1960 case CALL:
1961 case UNSPEC_VOLATILE:
1962 do_not_record = 1;
1963 return 0;
1965 case ASM_OPERANDS:
1966 if (MEM_VOLATILE_P (x))
1968 do_not_record = 1;
1969 return 0;
1971 break;
1973 default:
1974 break;
1977 i = GET_RTX_LENGTH (code) - 1;
1978 hash += (unsigned) code + (unsigned) GET_MODE (x);
1979 fmt = GET_RTX_FORMAT (code);
1980 for (; i >= 0; i--)
1982 if (fmt[i] == 'e')
1984 rtx tem = XEXP (x, i);
1986 /* If we are about to do the last recursive call
1987 needed at this level, change it into iteration.
1988 This function is called enough to be worth it. */
1989 if (i == 0)
1991 x = tem;
1992 goto repeat;
1994 hash += canon_hash (tem, 0);
1996 else if (fmt[i] == 'E')
1997 for (j = 0; j < XVECLEN (x, i); j++)
1998 hash += canon_hash (XVECEXP (x, i, j), 0);
1999 else if (fmt[i] == 's')
2001 register unsigned char *p = (unsigned char *) XSTR (x, i);
2002 if (p)
2003 while (*p)
2004 hash += *p++;
2006 else if (fmt[i] == 'i')
2008 register unsigned tem = XINT (x, i);
2009 hash += tem;
2011 else if (fmt[i] == '0')
2012 /* unused */;
2013 else
2014 abort ();
2016 return hash;
2019 /* Like canon_hash but with no side effects. */
2021 static unsigned
2022 safe_hash (x, mode)
2023 rtx x;
2024 enum machine_mode mode;
2026 int save_do_not_record = do_not_record;
2027 int save_hash_arg_in_memory = hash_arg_in_memory;
2028 int save_hash_arg_in_struct = hash_arg_in_struct;
2029 unsigned hash = canon_hash (x, mode);
2030 hash_arg_in_memory = save_hash_arg_in_memory;
2031 hash_arg_in_struct = save_hash_arg_in_struct;
2032 do_not_record = save_do_not_record;
2033 return hash;
2036 /* Return 1 iff X and Y would canonicalize into the same thing,
2037 without actually constructing the canonicalization of either one.
2038 If VALIDATE is nonzero,
2039 we assume X is an expression being processed from the rtl
2040 and Y was found in the hash table. We check register refs
2041 in Y for being marked as valid.
2043 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2044 that is known to be in the register. Ordinarily, we don't allow them
2045 to match, because letting them match would cause unpredictable results
2046 in all the places that search a hash table chain for an equivalent
2047 for a given value. A possible equivalent that has different structure
2048 has its hash code computed from different data. Whether the hash code
2049 is the same as that of the the given value is pure luck. */
2051 static int
2052 exp_equiv_p (x, y, validate, equal_values)
2053 rtx x, y;
2054 int validate;
2055 int equal_values;
2057 register int i, j;
2058 register enum rtx_code code;
2059 register char *fmt;
2061 /* Note: it is incorrect to assume an expression is equivalent to itself
2062 if VALIDATE is nonzero. */
2063 if (x == y && !validate)
2064 return 1;
2065 if (x == 0 || y == 0)
2066 return x == y;
2068 code = GET_CODE (x);
2069 if (code != GET_CODE (y))
2071 if (!equal_values)
2072 return 0;
2074 /* If X is a constant and Y is a register or vice versa, they may be
2075 equivalent. We only have to validate if Y is a register. */
2076 if (CONSTANT_P (x) && GET_CODE (y) == REG
2077 && REGNO_QTY_VALID_P (REGNO (y))
2078 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2079 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2080 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2081 return 1;
2083 if (CONSTANT_P (y) && code == REG
2084 && REGNO_QTY_VALID_P (REGNO (x))
2085 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2086 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2087 return 1;
2089 return 0;
2092 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2093 if (GET_MODE (x) != GET_MODE (y))
2094 return 0;
2096 switch (code)
2098 case PC:
2099 case CC0:
2100 return x == y;
2102 case CONST_INT:
2103 return INTVAL (x) == INTVAL (y);
2105 case LABEL_REF:
2106 return XEXP (x, 0) == XEXP (y, 0);
2108 case SYMBOL_REF:
2109 return XSTR (x, 0) == XSTR (y, 0);
2111 case REG:
2113 int regno = REGNO (y);
2114 int endregno
2115 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2116 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2117 int i;
2119 /* If the quantities are not the same, the expressions are not
2120 equivalent. If there are and we are not to validate, they
2121 are equivalent. Otherwise, ensure all regs are up-to-date. */
2123 if (reg_qty[REGNO (x)] != reg_qty[regno])
2124 return 0;
2126 if (! validate)
2127 return 1;
2129 for (i = regno; i < endregno; i++)
2130 if (reg_in_table[i] != reg_tick[i])
2131 return 0;
2133 return 1;
2136 /* For commutative operations, check both orders. */
2137 case PLUS:
2138 case MULT:
2139 case AND:
2140 case IOR:
2141 case XOR:
2142 case NE:
2143 case EQ:
2144 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2145 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2146 validate, equal_values))
2147 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2148 validate, equal_values)
2149 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2150 validate, equal_values)));
2152 default:
2153 break;
2156 /* Compare the elements. If any pair of corresponding elements
2157 fail to match, return 0 for the whole things. */
2159 fmt = GET_RTX_FORMAT (code);
2160 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2162 switch (fmt[i])
2164 case 'e':
2165 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2166 return 0;
2167 break;
2169 case 'E':
2170 if (XVECLEN (x, i) != XVECLEN (y, i))
2171 return 0;
2172 for (j = 0; j < XVECLEN (x, i); j++)
2173 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2174 validate, equal_values))
2175 return 0;
2176 break;
2178 case 's':
2179 if (strcmp (XSTR (x, i), XSTR (y, i)))
2180 return 0;
2181 break;
2183 case 'i':
2184 if (XINT (x, i) != XINT (y, i))
2185 return 0;
2186 break;
2188 case 'w':
2189 if (XWINT (x, i) != XWINT (y, i))
2190 return 0;
2191 break;
2193 case '0':
2194 break;
2196 default:
2197 abort ();
2201 return 1;
2204 /* Return 1 iff any subexpression of X matches Y.
2205 Here we do not require that X or Y be valid (for registers referred to)
2206 for being in the hash table. */
2208 static int
2209 refers_to_p (x, y)
2210 rtx x, y;
2212 register int i;
2213 register enum rtx_code code;
2214 register char *fmt;
2216 repeat:
2217 if (x == y)
2218 return 1;
2219 if (x == 0 || y == 0)
2220 return 0;
2222 code = GET_CODE (x);
2223 /* If X as a whole has the same code as Y, they may match.
2224 If so, return 1. */
2225 if (code == GET_CODE (y))
2227 if (exp_equiv_p (x, y, 0, 1))
2228 return 1;
2231 /* X does not match, so try its subexpressions. */
2233 fmt = GET_RTX_FORMAT (code);
2234 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2235 if (fmt[i] == 'e')
2237 if (i == 0)
2239 x = XEXP (x, 0);
2240 goto repeat;
2242 else
2243 if (refers_to_p (XEXP (x, i), y))
2244 return 1;
2246 else if (fmt[i] == 'E')
2248 int j;
2249 for (j = 0; j < XVECLEN (x, i); j++)
2250 if (refers_to_p (XVECEXP (x, i, j), y))
2251 return 1;
2254 return 0;
2257 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2258 set PBASE, PSTART, and PEND which correspond to the base of the address,
2259 the starting offset, and ending offset respectively.
2261 ADDR is known to be a nonvarying address. */
2263 /* ??? Despite what the comments say, this function is in fact frequently
2264 passed varying addresses. This does not appear to cause any problems. */
2266 static void
2267 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2268 rtx addr;
2269 int size;
2270 rtx *pbase;
2271 HOST_WIDE_INT *pstart, *pend;
2273 rtx base;
2274 HOST_WIDE_INT start, end;
2276 base = addr;
2277 start = 0;
2278 end = 0;
2280 if (flag_pic && GET_CODE (base) == PLUS
2281 && XEXP (base, 0) == pic_offset_table_rtx)
2282 base = XEXP (base, 1);
2284 /* Registers with nonvarying addresses usually have constant equivalents;
2285 but the frame pointer register is also possible. */
2286 if (GET_CODE (base) == REG
2287 && qty_const != 0
2288 && REGNO_QTY_VALID_P (REGNO (base))
2289 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2290 && qty_const[reg_qty[REGNO (base)]] != 0)
2291 base = qty_const[reg_qty[REGNO (base)]];
2292 else if (GET_CODE (base) == PLUS
2293 && GET_CODE (XEXP (base, 1)) == CONST_INT
2294 && GET_CODE (XEXP (base, 0)) == REG
2295 && qty_const != 0
2296 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2297 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2298 == GET_MODE (XEXP (base, 0)))
2299 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2301 start = INTVAL (XEXP (base, 1));
2302 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2304 /* This can happen as the result of virtual register instantiation,
2305 if the initial offset is too large to be a valid address. */
2306 else if (GET_CODE (base) == PLUS
2307 && GET_CODE (XEXP (base, 0)) == REG
2308 && GET_CODE (XEXP (base, 1)) == REG
2309 && qty_const != 0
2310 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2311 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2312 == GET_MODE (XEXP (base, 0)))
2313 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2314 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2315 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2316 == GET_MODE (XEXP (base, 1)))
2317 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2319 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2320 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2322 /* One of the two values must be a constant. */
2323 if (GET_CODE (base) != CONST_INT)
2325 if (GET_CODE (tem) != CONST_INT)
2326 abort ();
2327 start = INTVAL (tem);
2329 else
2331 start = INTVAL (base);
2332 base = tem;
2336 /* Handle everything that we can find inside an address that has been
2337 viewed as constant. */
2339 while (1)
2341 /* If no part of this switch does a "continue", the code outside
2342 will exit this loop. */
2344 switch (GET_CODE (base))
2346 case LO_SUM:
2347 /* By definition, operand1 of a LO_SUM is the associated constant
2348 address. Use the associated constant address as the base
2349 instead. */
2350 base = XEXP (base, 1);
2351 continue;
2353 case CONST:
2354 /* Strip off CONST. */
2355 base = XEXP (base, 0);
2356 continue;
2358 case PLUS:
2359 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2361 start += INTVAL (XEXP (base, 1));
2362 base = XEXP (base, 0);
2363 continue;
2365 break;
2367 case AND:
2368 /* Handle the case of an AND which is the negative of a power of
2369 two. This is used to represent unaligned memory operations. */
2370 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2371 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2373 set_nonvarying_address_components (XEXP (base, 0), size,
2374 pbase, pstart, pend);
2376 /* Assume the worst misalignment. START is affected, but not
2377 END, so compensate but adjusting SIZE. Don't lose any
2378 constant we already had. */
2380 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2381 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2382 end += *pend;
2383 base = *pbase;
2385 break;
2387 default:
2388 break;
2391 break;
2394 if (GET_CODE (base) == CONST_INT)
2396 start += INTVAL (base);
2397 base = const0_rtx;
2400 end = start + size;
2402 /* Set the return values. */
2403 *pbase = base;
2404 *pstart = start;
2405 *pend = end;
2408 /* Return 1 if X has a value that can vary even between two
2409 executions of the program. 0 means X can be compared reliably
2410 against certain constants or near-constants. */
2412 static int
2413 cse_rtx_varies_p (x)
2414 register rtx x;
2416 /* We need not check for X and the equivalence class being of the same
2417 mode because if X is equivalent to a constant in some mode, it
2418 doesn't vary in any mode. */
2420 if (GET_CODE (x) == REG
2421 && REGNO_QTY_VALID_P (REGNO (x))
2422 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2423 && qty_const[reg_qty[REGNO (x)]] != 0)
2424 return 0;
2426 if (GET_CODE (x) == PLUS
2427 && GET_CODE (XEXP (x, 1)) == CONST_INT
2428 && GET_CODE (XEXP (x, 0)) == REG
2429 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2430 && (GET_MODE (XEXP (x, 0))
2431 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2432 && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
2433 return 0;
2435 /* This can happen as the result of virtual register instantiation, if
2436 the initial constant is too large to be a valid address. This gives
2437 us a three instruction sequence, load large offset into a register,
2438 load fp minus a constant into a register, then a MEM which is the
2439 sum of the two `constant' registers. */
2440 if (GET_CODE (x) == PLUS
2441 && GET_CODE (XEXP (x, 0)) == REG
2442 && GET_CODE (XEXP (x, 1)) == REG
2443 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2444 && (GET_MODE (XEXP (x, 0))
2445 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2446 && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2447 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2448 && (GET_MODE (XEXP (x, 1))
2449 == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2450 && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
2451 return 0;
2453 return rtx_varies_p (x);
2456 /* Canonicalize an expression:
2457 replace each register reference inside it
2458 with the "oldest" equivalent register.
2460 If INSN is non-zero and we are replacing a pseudo with a hard register
2461 or vice versa, validate_change is used to ensure that INSN remains valid
2462 after we make our substitution. The calls are made with IN_GROUP non-zero
2463 so apply_change_group must be called upon the outermost return from this
2464 function (unless INSN is zero). The result of apply_change_group can
2465 generally be discarded since the changes we are making are optional. */
2467 static rtx
2468 canon_reg (x, insn)
2469 rtx x;
2470 rtx insn;
2472 register int i;
2473 register enum rtx_code code;
2474 register char *fmt;
2476 if (x == 0)
2477 return x;
2479 code = GET_CODE (x);
2480 switch (code)
2482 case PC:
2483 case CC0:
2484 case CONST:
2485 case CONST_INT:
2486 case CONST_DOUBLE:
2487 case SYMBOL_REF:
2488 case LABEL_REF:
2489 case ADDR_VEC:
2490 case ADDR_DIFF_VEC:
2491 return x;
2493 case REG:
2495 register int first;
2497 /* Never replace a hard reg, because hard regs can appear
2498 in more than one machine mode, and we must preserve the mode
2499 of each occurrence. Also, some hard regs appear in
2500 MEMs that are shared and mustn't be altered. Don't try to
2501 replace any reg that maps to a reg of class NO_REGS. */
2502 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2503 || ! REGNO_QTY_VALID_P (REGNO (x)))
2504 return x;
2506 first = qty_first_reg[reg_qty[REGNO (x)]];
2507 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2508 : REGNO_REG_CLASS (first) == NO_REGS ? x
2509 : gen_rtx_REG (qty_mode[reg_qty[REGNO (x)]], first));
2512 default:
2513 break;
2516 fmt = GET_RTX_FORMAT (code);
2517 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2519 register int j;
2521 if (fmt[i] == 'e')
2523 rtx new = canon_reg (XEXP (x, i), insn);
2524 int insn_code;
2526 /* If replacing pseudo with hard reg or vice versa, ensure the
2527 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2528 if (insn != 0 && new != 0
2529 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2530 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2531 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2532 || (insn_code = recog_memoized (insn)) < 0
2533 || insn_n_dups[insn_code] > 0))
2534 validate_change (insn, &XEXP (x, i), new, 1);
2535 else
2536 XEXP (x, i) = new;
2538 else if (fmt[i] == 'E')
2539 for (j = 0; j < XVECLEN (x, i); j++)
2540 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2543 return x;
2546 /* LOC is a location within INSN that is an operand address (the contents of
2547 a MEM). Find the best equivalent address to use that is valid for this
2548 insn.
2550 On most CISC machines, complicated address modes are costly, and rtx_cost
2551 is a good approximation for that cost. However, most RISC machines have
2552 only a few (usually only one) memory reference formats. If an address is
2553 valid at all, it is often just as cheap as any other address. Hence, for
2554 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2555 costs of various addresses. For two addresses of equal cost, choose the one
2556 with the highest `rtx_cost' value as that has the potential of eliminating
2557 the most insns. For equal costs, we choose the first in the equivalence
2558 class. Note that we ignore the fact that pseudo registers are cheaper
2559 than hard registers here because we would also prefer the pseudo registers.
2562 static void
2563 find_best_addr (insn, loc)
2564 rtx insn;
2565 rtx *loc;
2567 struct table_elt *elt, *p;
2568 rtx addr = *loc;
2569 int found_better = 1;
2570 int save_do_not_record = do_not_record;
2571 int save_hash_arg_in_memory = hash_arg_in_memory;
2572 int save_hash_arg_in_struct = hash_arg_in_struct;
2573 int addr_volatile;
2574 int regno;
2575 unsigned hash;
2577 /* Do not try to replace constant addresses or addresses of local and
2578 argument slots. These MEM expressions are made only once and inserted
2579 in many instructions, as well as being used to control symbol table
2580 output. It is not safe to clobber them.
2582 There are some uncommon cases where the address is already in a register
2583 for some reason, but we cannot take advantage of that because we have
2584 no easy way to unshare the MEM. In addition, looking up all stack
2585 addresses is costly. */
2586 if ((GET_CODE (addr) == PLUS
2587 && GET_CODE (XEXP (addr, 0)) == REG
2588 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2589 && (regno = REGNO (XEXP (addr, 0)),
2590 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2591 || regno == ARG_POINTER_REGNUM))
2592 || (GET_CODE (addr) == REG
2593 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2594 || regno == HARD_FRAME_POINTER_REGNUM
2595 || regno == ARG_POINTER_REGNUM))
2596 || GET_CODE (addr) == ADDRESSOF
2597 || CONSTANT_ADDRESS_P (addr))
2598 return;
2600 /* If this address is not simply a register, try to fold it. This will
2601 sometimes simplify the expression. Many simplifications
2602 will not be valid, but some, usually applying the associative rule, will
2603 be valid and produce better code. */
2604 if (GET_CODE (addr) != REG)
2606 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2608 if (1
2609 #ifdef ADDRESS_COST
2610 && (ADDRESS_COST (folded) < ADDRESS_COST (addr)
2611 || (ADDRESS_COST (folded) == ADDRESS_COST (addr)
2612 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2613 #else
2614 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2615 #endif
2616 && validate_change (insn, loc, folded, 0))
2617 addr = folded;
2620 /* If this address is not in the hash table, we can't look for equivalences
2621 of the whole address. Also, ignore if volatile. */
2623 do_not_record = 0;
2624 hash = HASH (addr, Pmode);
2625 addr_volatile = do_not_record;
2626 do_not_record = save_do_not_record;
2627 hash_arg_in_memory = save_hash_arg_in_memory;
2628 hash_arg_in_struct = save_hash_arg_in_struct;
2630 if (addr_volatile)
2631 return;
2633 elt = lookup (addr, hash, Pmode);
2635 #ifndef ADDRESS_COST
2636 if (elt)
2638 int our_cost = elt->cost;
2640 /* Find the lowest cost below ours that works. */
2641 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2642 if (elt->cost < our_cost
2643 && (GET_CODE (elt->exp) == REG
2644 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2645 && validate_change (insn, loc,
2646 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2647 return;
2649 #else
2651 if (elt)
2653 /* We need to find the best (under the criteria documented above) entry
2654 in the class that is valid. We use the `flag' field to indicate
2655 choices that were invalid and iterate until we can't find a better
2656 one that hasn't already been tried. */
2658 for (p = elt->first_same_value; p; p = p->next_same_value)
2659 p->flag = 0;
2661 while (found_better)
2663 int best_addr_cost = ADDRESS_COST (*loc);
2664 int best_rtx_cost = (elt->cost + 1) >> 1;
2665 struct table_elt *best_elt = elt;
2667 found_better = 0;
2668 for (p = elt->first_same_value; p; p = p->next_same_value)
2669 if (! p->flag
2670 && (GET_CODE (p->exp) == REG
2671 || exp_equiv_p (p->exp, p->exp, 1, 0))
2672 && (ADDRESS_COST (p->exp) < best_addr_cost
2673 || (ADDRESS_COST (p->exp) == best_addr_cost
2674 && (p->cost + 1) >> 1 > best_rtx_cost)))
2676 found_better = 1;
2677 best_addr_cost = ADDRESS_COST (p->exp);
2678 best_rtx_cost = (p->cost + 1) >> 1;
2679 best_elt = p;
2682 if (found_better)
2684 if (validate_change (insn, loc,
2685 canon_reg (copy_rtx (best_elt->exp),
2686 NULL_RTX), 0))
2687 return;
2688 else
2689 best_elt->flag = 1;
2694 /* If the address is a binary operation with the first operand a register
2695 and the second a constant, do the same as above, but looking for
2696 equivalences of the register. Then try to simplify before checking for
2697 the best address to use. This catches a few cases: First is when we
2698 have REG+const and the register is another REG+const. We can often merge
2699 the constants and eliminate one insn and one register. It may also be
2700 that a machine has a cheap REG+REG+const. Finally, this improves the
2701 code on the Alpha for unaligned byte stores. */
2703 if (flag_expensive_optimizations
2704 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2705 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2706 && GET_CODE (XEXP (*loc, 0)) == REG
2707 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2709 rtx c = XEXP (*loc, 1);
2711 do_not_record = 0;
2712 hash = HASH (XEXP (*loc, 0), Pmode);
2713 do_not_record = save_do_not_record;
2714 hash_arg_in_memory = save_hash_arg_in_memory;
2715 hash_arg_in_struct = save_hash_arg_in_struct;
2717 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2718 if (elt == 0)
2719 return;
2721 /* We need to find the best (under the criteria documented above) entry
2722 in the class that is valid. We use the `flag' field to indicate
2723 choices that were invalid and iterate until we can't find a better
2724 one that hasn't already been tried. */
2726 for (p = elt->first_same_value; p; p = p->next_same_value)
2727 p->flag = 0;
2729 while (found_better)
2731 int best_addr_cost = ADDRESS_COST (*loc);
2732 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2733 struct table_elt *best_elt = elt;
2734 rtx best_rtx = *loc;
2735 int count;
2737 /* This is at worst case an O(n^2) algorithm, so limit our search
2738 to the first 32 elements on the list. This avoids trouble
2739 compiling code with very long basic blocks that can easily
2740 call cse_gen_binary so many times that we run out of memory. */
2742 found_better = 0;
2743 for (p = elt->first_same_value, count = 0;
2744 p && count < 32;
2745 p = p->next_same_value, count++)
2746 if (! p->flag
2747 && (GET_CODE (p->exp) == REG
2748 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2750 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2752 if ((ADDRESS_COST (new) < best_addr_cost
2753 || (ADDRESS_COST (new) == best_addr_cost
2754 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2756 found_better = 1;
2757 best_addr_cost = ADDRESS_COST (new);
2758 best_rtx_cost = (COST (new) + 1) >> 1;
2759 best_elt = p;
2760 best_rtx = new;
2764 if (found_better)
2766 if (validate_change (insn, loc,
2767 canon_reg (copy_rtx (best_rtx),
2768 NULL_RTX), 0))
2769 return;
2770 else
2771 best_elt->flag = 1;
2775 #endif
2778 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2779 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2780 what values are being compared.
2782 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2783 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2784 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2785 compared to produce cc0.
2787 The return value is the comparison operator and is either the code of
2788 A or the code corresponding to the inverse of the comparison. */
2790 static enum rtx_code
2791 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2792 enum rtx_code code;
2793 rtx *parg1, *parg2;
2794 enum machine_mode *pmode1, *pmode2;
2796 rtx arg1, arg2;
2798 arg1 = *parg1, arg2 = *parg2;
2800 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2802 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2804 /* Set non-zero when we find something of interest. */
2805 rtx x = 0;
2806 int reverse_code = 0;
2807 struct table_elt *p = 0;
2809 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2810 On machines with CC0, this is the only case that can occur, since
2811 fold_rtx will return the COMPARE or item being compared with zero
2812 when given CC0. */
2814 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2815 x = arg1;
2817 /* If ARG1 is a comparison operator and CODE is testing for
2818 STORE_FLAG_VALUE, get the inner arguments. */
2820 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2822 if (code == NE
2823 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2824 && code == LT && STORE_FLAG_VALUE == -1)
2825 #ifdef FLOAT_STORE_FLAG_VALUE
2826 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2827 && FLOAT_STORE_FLAG_VALUE < 0)
2828 #endif
2830 x = arg1;
2831 else if (code == EQ
2832 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2833 && code == GE && STORE_FLAG_VALUE == -1)
2834 #ifdef FLOAT_STORE_FLAG_VALUE
2835 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2836 && FLOAT_STORE_FLAG_VALUE < 0)
2837 #endif
2839 x = arg1, reverse_code = 1;
2842 /* ??? We could also check for
2844 (ne (and (eq (...) (const_int 1))) (const_int 0))
2846 and related forms, but let's wait until we see them occurring. */
2848 if (x == 0)
2849 /* Look up ARG1 in the hash table and see if it has an equivalence
2850 that lets us see what is being compared. */
2851 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2852 GET_MODE (arg1));
2853 if (p) p = p->first_same_value;
2855 for (; p; p = p->next_same_value)
2857 enum machine_mode inner_mode = GET_MODE (p->exp);
2859 /* If the entry isn't valid, skip it. */
2860 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2861 continue;
2863 if (GET_CODE (p->exp) == COMPARE
2864 /* Another possibility is that this machine has a compare insn
2865 that includes the comparison code. In that case, ARG1 would
2866 be equivalent to a comparison operation that would set ARG1 to
2867 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2868 ORIG_CODE is the actual comparison being done; if it is an EQ,
2869 we must reverse ORIG_CODE. On machine with a negative value
2870 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2871 || ((code == NE
2872 || (code == LT
2873 && GET_MODE_CLASS (inner_mode) == MODE_INT
2874 && (GET_MODE_BITSIZE (inner_mode)
2875 <= HOST_BITS_PER_WIDE_INT)
2876 && (STORE_FLAG_VALUE
2877 & ((HOST_WIDE_INT) 1
2878 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2879 #ifdef FLOAT_STORE_FLAG_VALUE
2880 || (code == LT
2881 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2882 && FLOAT_STORE_FLAG_VALUE < 0)
2883 #endif
2885 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2887 x = p->exp;
2888 break;
2890 else if ((code == EQ
2891 || (code == GE
2892 && GET_MODE_CLASS (inner_mode) == MODE_INT
2893 && (GET_MODE_BITSIZE (inner_mode)
2894 <= HOST_BITS_PER_WIDE_INT)
2895 && (STORE_FLAG_VALUE
2896 & ((HOST_WIDE_INT) 1
2897 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2898 #ifdef FLOAT_STORE_FLAG_VALUE
2899 || (code == GE
2900 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2901 && FLOAT_STORE_FLAG_VALUE < 0)
2902 #endif
2904 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2906 reverse_code = 1;
2907 x = p->exp;
2908 break;
2911 /* If this is fp + constant, the equivalent is a better operand since
2912 it may let us predict the value of the comparison. */
2913 else if (NONZERO_BASE_PLUS_P (p->exp))
2915 arg1 = p->exp;
2916 continue;
2920 /* If we didn't find a useful equivalence for ARG1, we are done.
2921 Otherwise, set up for the next iteration. */
2922 if (x == 0)
2923 break;
2925 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2926 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2927 code = GET_CODE (x);
2929 if (reverse_code)
2930 code = reverse_condition (code);
2933 /* Return our results. Return the modes from before fold_rtx
2934 because fold_rtx might produce const_int, and then it's too late. */
2935 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2936 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2938 return code;
2941 /* Try to simplify a unary operation CODE whose output mode is to be
2942 MODE with input operand OP whose mode was originally OP_MODE.
2943 Return zero if no simplification can be made. */
2946 simplify_unary_operation (code, mode, op, op_mode)
2947 enum rtx_code code;
2948 enum machine_mode mode;
2949 rtx op;
2950 enum machine_mode op_mode;
2952 register int width = GET_MODE_BITSIZE (mode);
2954 /* The order of these tests is critical so that, for example, we don't
2955 check the wrong mode (input vs. output) for a conversion operation,
2956 such as FIX. At some point, this should be simplified. */
2958 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2960 if (code == FLOAT && GET_MODE (op) == VOIDmode
2961 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2963 HOST_WIDE_INT hv, lv;
2964 REAL_VALUE_TYPE d;
2966 if (GET_CODE (op) == CONST_INT)
2967 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2968 else
2969 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
2971 #ifdef REAL_ARITHMETIC
2972 REAL_VALUE_FROM_INT (d, lv, hv, mode);
2973 #else
2974 if (hv < 0)
2976 d = (double) (~ hv);
2977 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2978 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2979 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
2980 d = (- d - 1.0);
2982 else
2984 d = (double) hv;
2985 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2986 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2987 d += (double) (unsigned HOST_WIDE_INT) lv;
2989 #endif /* REAL_ARITHMETIC */
2990 d = real_value_truncate (mode, d);
2991 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2993 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2994 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2996 HOST_WIDE_INT hv, lv;
2997 REAL_VALUE_TYPE d;
2999 if (GET_CODE (op) == CONST_INT)
3000 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3001 else
3002 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3004 if (op_mode == VOIDmode)
3006 /* We don't know how to interpret negative-looking numbers in
3007 this case, so don't try to fold those. */
3008 if (hv < 0)
3009 return 0;
3011 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3013 else
3014 hv = 0, lv &= GET_MODE_MASK (op_mode);
3016 #ifdef REAL_ARITHMETIC
3017 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3018 #else
3020 d = (double) (unsigned HOST_WIDE_INT) hv;
3021 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3022 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3023 d += (double) (unsigned HOST_WIDE_INT) lv;
3024 #endif /* REAL_ARITHMETIC */
3025 d = real_value_truncate (mode, d);
3026 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3028 #endif
3030 if (GET_CODE (op) == CONST_INT
3031 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3033 register HOST_WIDE_INT arg0 = INTVAL (op);
3034 register HOST_WIDE_INT val;
3036 switch (code)
3038 case NOT:
3039 val = ~ arg0;
3040 break;
3042 case NEG:
3043 val = - arg0;
3044 break;
3046 case ABS:
3047 val = (arg0 >= 0 ? arg0 : - arg0);
3048 break;
3050 case FFS:
3051 /* Don't use ffs here. Instead, get low order bit and then its
3052 number. If arg0 is zero, this will return 0, as desired. */
3053 arg0 &= GET_MODE_MASK (mode);
3054 val = exact_log2 (arg0 & (- arg0)) + 1;
3055 break;
3057 case TRUNCATE:
3058 val = arg0;
3059 break;
3061 case ZERO_EXTEND:
3062 if (op_mode == VOIDmode)
3063 op_mode = mode;
3064 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3066 /* If we were really extending the mode,
3067 we would have to distinguish between zero-extension
3068 and sign-extension. */
3069 if (width != GET_MODE_BITSIZE (op_mode))
3070 abort ();
3071 val = arg0;
3073 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3074 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3075 else
3076 return 0;
3077 break;
3079 case SIGN_EXTEND:
3080 if (op_mode == VOIDmode)
3081 op_mode = mode;
3082 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3084 /* If we were really extending the mode,
3085 we would have to distinguish between zero-extension
3086 and sign-extension. */
3087 if (width != GET_MODE_BITSIZE (op_mode))
3088 abort ();
3089 val = arg0;
3091 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3094 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3095 if (val
3096 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3097 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3099 else
3100 return 0;
3101 break;
3103 case SQRT:
3104 return 0;
3106 default:
3107 abort ();
3110 /* Clear the bits that don't belong in our mode,
3111 unless they and our sign bit are all one.
3112 So we get either a reasonable negative value or a reasonable
3113 unsigned value for this mode. */
3114 if (width < HOST_BITS_PER_WIDE_INT
3115 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3116 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3117 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3119 return GEN_INT (val);
3122 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3123 for a DImode operation on a CONST_INT. */
3124 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3125 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3127 HOST_WIDE_INT l1, h1, lv, hv;
3129 if (GET_CODE (op) == CONST_DOUBLE)
3130 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3131 else
3132 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3134 switch (code)
3136 case NOT:
3137 lv = ~ l1;
3138 hv = ~ h1;
3139 break;
3141 case NEG:
3142 neg_double (l1, h1, &lv, &hv);
3143 break;
3145 case ABS:
3146 if (h1 < 0)
3147 neg_double (l1, h1, &lv, &hv);
3148 else
3149 lv = l1, hv = h1;
3150 break;
3152 case FFS:
3153 hv = 0;
3154 if (l1 == 0)
3155 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3156 else
3157 lv = exact_log2 (l1 & (-l1)) + 1;
3158 break;
3160 case TRUNCATE:
3161 /* This is just a change-of-mode, so do nothing. */
3162 lv = l1, hv = h1;
3163 break;
3165 case ZERO_EXTEND:
3166 if (op_mode == VOIDmode
3167 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3168 return 0;
3170 hv = 0;
3171 lv = l1 & GET_MODE_MASK (op_mode);
3172 break;
3174 case SIGN_EXTEND:
3175 if (op_mode == VOIDmode
3176 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3177 return 0;
3178 else
3180 lv = l1 & GET_MODE_MASK (op_mode);
3181 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3182 && (lv & ((HOST_WIDE_INT) 1
3183 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3184 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3186 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3188 break;
3190 case SQRT:
3191 return 0;
3193 default:
3194 return 0;
3197 return immed_double_const (lv, hv, mode);
3200 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3201 else if (GET_CODE (op) == CONST_DOUBLE
3202 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3204 REAL_VALUE_TYPE d;
3205 jmp_buf handler;
3206 rtx x;
3208 if (setjmp (handler))
3209 /* There used to be a warning here, but that is inadvisable.
3210 People may want to cause traps, and the natural way
3211 to do it should not get a warning. */
3212 return 0;
3214 set_float_handler (handler);
3216 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3218 switch (code)
3220 case NEG:
3221 d = REAL_VALUE_NEGATE (d);
3222 break;
3224 case ABS:
3225 if (REAL_VALUE_NEGATIVE (d))
3226 d = REAL_VALUE_NEGATE (d);
3227 break;
3229 case FLOAT_TRUNCATE:
3230 d = real_value_truncate (mode, d);
3231 break;
3233 case FLOAT_EXTEND:
3234 /* All this does is change the mode. */
3235 break;
3237 case FIX:
3238 d = REAL_VALUE_RNDZINT (d);
3239 break;
3241 case UNSIGNED_FIX:
3242 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3243 break;
3245 case SQRT:
3246 return 0;
3248 default:
3249 abort ();
3252 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3253 set_float_handler (NULL_PTR);
3254 return x;
3257 else if (GET_CODE (op) == CONST_DOUBLE
3258 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3259 && GET_MODE_CLASS (mode) == MODE_INT
3260 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3262 REAL_VALUE_TYPE d;
3263 jmp_buf handler;
3264 HOST_WIDE_INT val;
3266 if (setjmp (handler))
3267 return 0;
3269 set_float_handler (handler);
3271 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3273 switch (code)
3275 case FIX:
3276 val = REAL_VALUE_FIX (d);
3277 break;
3279 case UNSIGNED_FIX:
3280 val = REAL_VALUE_UNSIGNED_FIX (d);
3281 break;
3283 default:
3284 abort ();
3287 set_float_handler (NULL_PTR);
3289 /* Clear the bits that don't belong in our mode,
3290 unless they and our sign bit are all one.
3291 So we get either a reasonable negative value or a reasonable
3292 unsigned value for this mode. */
3293 if (width < HOST_BITS_PER_WIDE_INT
3294 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3295 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3296 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3298 /* If this would be an entire word for the target, but is not for
3299 the host, then sign-extend on the host so that the number will look
3300 the same way on the host that it would on the target.
3302 For example, when building a 64 bit alpha hosted 32 bit sparc
3303 targeted compiler, then we want the 32 bit unsigned value -1 to be
3304 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3305 The later confuses the sparc backend. */
3307 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3308 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3309 val |= ((HOST_WIDE_INT) (-1) << width);
3311 return GEN_INT (val);
3313 #endif
3314 /* This was formerly used only for non-IEEE float.
3315 eggert@twinsun.com says it is safe for IEEE also. */
3316 else
3318 /* There are some simplifications we can do even if the operands
3319 aren't constant. */
3320 switch (code)
3322 case NEG:
3323 case NOT:
3324 /* (not (not X)) == X, similarly for NEG. */
3325 if (GET_CODE (op) == code)
3326 return XEXP (op, 0);
3327 break;
3329 case SIGN_EXTEND:
3330 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3331 becomes just the MINUS if its mode is MODE. This allows
3332 folding switch statements on machines using casesi (such as
3333 the Vax). */
3334 if (GET_CODE (op) == TRUNCATE
3335 && GET_MODE (XEXP (op, 0)) == mode
3336 && GET_CODE (XEXP (op, 0)) == MINUS
3337 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3338 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3339 return XEXP (op, 0);
3341 #ifdef POINTERS_EXTEND_UNSIGNED
3342 if (! POINTERS_EXTEND_UNSIGNED
3343 && mode == Pmode && GET_MODE (op) == ptr_mode
3344 && CONSTANT_P (op))
3345 return convert_memory_address (Pmode, op);
3346 #endif
3347 break;
3349 #ifdef POINTERS_EXTEND_UNSIGNED
3350 case ZERO_EXTEND:
3351 if (POINTERS_EXTEND_UNSIGNED
3352 && mode == Pmode && GET_MODE (op) == ptr_mode
3353 && CONSTANT_P (op))
3354 return convert_memory_address (Pmode, op);
3355 break;
3356 #endif
3358 default:
3359 break;
3362 return 0;
3366 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3367 and OP1. Return 0 if no simplification is possible.
3369 Don't use this for relational operations such as EQ or LT.
3370 Use simplify_relational_operation instead. */
3373 simplify_binary_operation (code, mode, op0, op1)
3374 enum rtx_code code;
3375 enum machine_mode mode;
3376 rtx op0, op1;
3378 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3379 HOST_WIDE_INT val;
3380 int width = GET_MODE_BITSIZE (mode);
3381 rtx tem;
3383 /* Relational operations don't work here. We must know the mode
3384 of the operands in order to do the comparison correctly.
3385 Assuming a full word can give incorrect results.
3386 Consider comparing 128 with -128 in QImode. */
3388 if (GET_RTX_CLASS (code) == '<')
3389 abort ();
3391 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3392 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3393 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3394 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3396 REAL_VALUE_TYPE f0, f1, value;
3397 jmp_buf handler;
3399 if (setjmp (handler))
3400 return 0;
3402 set_float_handler (handler);
3404 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3405 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3406 f0 = real_value_truncate (mode, f0);
3407 f1 = real_value_truncate (mode, f1);
3409 #ifdef REAL_ARITHMETIC
3410 #ifndef REAL_INFINITY
3411 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3412 return 0;
3413 #endif
3414 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3415 #else
3416 switch (code)
3418 case PLUS:
3419 value = f0 + f1;
3420 break;
3421 case MINUS:
3422 value = f0 - f1;
3423 break;
3424 case MULT:
3425 value = f0 * f1;
3426 break;
3427 case DIV:
3428 #ifndef REAL_INFINITY
3429 if (f1 == 0)
3430 return 0;
3431 #endif
3432 value = f0 / f1;
3433 break;
3434 case SMIN:
3435 value = MIN (f0, f1);
3436 break;
3437 case SMAX:
3438 value = MAX (f0, f1);
3439 break;
3440 default:
3441 abort ();
3443 #endif
3445 value = real_value_truncate (mode, value);
3446 set_float_handler (NULL_PTR);
3447 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3449 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3451 /* We can fold some multi-word operations. */
3452 if (GET_MODE_CLASS (mode) == MODE_INT
3453 && width == HOST_BITS_PER_WIDE_INT * 2
3454 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3455 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3457 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3459 if (GET_CODE (op0) == CONST_DOUBLE)
3460 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3461 else
3462 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3464 if (GET_CODE (op1) == CONST_DOUBLE)
3465 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3466 else
3467 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3469 switch (code)
3471 case MINUS:
3472 /* A - B == A + (-B). */
3473 neg_double (l2, h2, &lv, &hv);
3474 l2 = lv, h2 = hv;
3476 /* .. fall through ... */
3478 case PLUS:
3479 add_double (l1, h1, l2, h2, &lv, &hv);
3480 break;
3482 case MULT:
3483 mul_double (l1, h1, l2, h2, &lv, &hv);
3484 break;
3486 case DIV: case MOD: case UDIV: case UMOD:
3487 /* We'd need to include tree.h to do this and it doesn't seem worth
3488 it. */
3489 return 0;
3491 case AND:
3492 lv = l1 & l2, hv = h1 & h2;
3493 break;
3495 case IOR:
3496 lv = l1 | l2, hv = h1 | h2;
3497 break;
3499 case XOR:
3500 lv = l1 ^ l2, hv = h1 ^ h2;
3501 break;
3503 case SMIN:
3504 if (h1 < h2
3505 || (h1 == h2
3506 && ((unsigned HOST_WIDE_INT) l1
3507 < (unsigned HOST_WIDE_INT) l2)))
3508 lv = l1, hv = h1;
3509 else
3510 lv = l2, hv = h2;
3511 break;
3513 case SMAX:
3514 if (h1 > h2
3515 || (h1 == h2
3516 && ((unsigned HOST_WIDE_INT) l1
3517 > (unsigned HOST_WIDE_INT) l2)))
3518 lv = l1, hv = h1;
3519 else
3520 lv = l2, hv = h2;
3521 break;
3523 case UMIN:
3524 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3525 || (h1 == h2
3526 && ((unsigned HOST_WIDE_INT) l1
3527 < (unsigned HOST_WIDE_INT) l2)))
3528 lv = l1, hv = h1;
3529 else
3530 lv = l2, hv = h2;
3531 break;
3533 case UMAX:
3534 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3535 || (h1 == h2
3536 && ((unsigned HOST_WIDE_INT) l1
3537 > (unsigned HOST_WIDE_INT) l2)))
3538 lv = l1, hv = h1;
3539 else
3540 lv = l2, hv = h2;
3541 break;
3543 case LSHIFTRT: case ASHIFTRT:
3544 case ASHIFT:
3545 case ROTATE: case ROTATERT:
3546 #ifdef SHIFT_COUNT_TRUNCATED
3547 if (SHIFT_COUNT_TRUNCATED)
3548 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3549 #endif
3551 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3552 return 0;
3554 if (code == LSHIFTRT || code == ASHIFTRT)
3555 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3556 code == ASHIFTRT);
3557 else if (code == ASHIFT)
3558 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3559 else if (code == ROTATE)
3560 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3561 else /* code == ROTATERT */
3562 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3563 break;
3565 default:
3566 return 0;
3569 return immed_double_const (lv, hv, mode);
3572 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3573 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3575 /* Even if we can't compute a constant result,
3576 there are some cases worth simplifying. */
3578 switch (code)
3580 case PLUS:
3581 /* In IEEE floating point, x+0 is not the same as x. Similarly
3582 for the other optimizations below. */
3583 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3584 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3585 break;
3587 if (op1 == CONST0_RTX (mode))
3588 return op0;
3590 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3591 if (GET_CODE (op0) == NEG)
3592 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3593 else if (GET_CODE (op1) == NEG)
3594 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3596 /* Handle both-operands-constant cases. We can only add
3597 CONST_INTs to constants since the sum of relocatable symbols
3598 can't be handled by most assemblers. Don't add CONST_INT
3599 to CONST_INT since overflow won't be computed properly if wider
3600 than HOST_BITS_PER_WIDE_INT. */
3602 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3603 && GET_CODE (op1) == CONST_INT)
3604 return plus_constant (op0, INTVAL (op1));
3605 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3606 && GET_CODE (op0) == CONST_INT)
3607 return plus_constant (op1, INTVAL (op0));
3609 /* See if this is something like X * C - X or vice versa or
3610 if the multiplication is written as a shift. If so, we can
3611 distribute and make a new multiply, shift, or maybe just
3612 have X (if C is 2 in the example above). But don't make
3613 real multiply if we didn't have one before. */
3615 if (! FLOAT_MODE_P (mode))
3617 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3618 rtx lhs = op0, rhs = op1;
3619 int had_mult = 0;
3621 if (GET_CODE (lhs) == NEG)
3622 coeff0 = -1, lhs = XEXP (lhs, 0);
3623 else if (GET_CODE (lhs) == MULT
3624 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3626 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3627 had_mult = 1;
3629 else if (GET_CODE (lhs) == ASHIFT
3630 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3631 && INTVAL (XEXP (lhs, 1)) >= 0
3632 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3634 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3635 lhs = XEXP (lhs, 0);
3638 if (GET_CODE (rhs) == NEG)
3639 coeff1 = -1, rhs = XEXP (rhs, 0);
3640 else if (GET_CODE (rhs) == MULT
3641 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3643 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3644 had_mult = 1;
3646 else if (GET_CODE (rhs) == ASHIFT
3647 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3648 && INTVAL (XEXP (rhs, 1)) >= 0
3649 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3651 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3652 rhs = XEXP (rhs, 0);
3655 if (rtx_equal_p (lhs, rhs))
3657 tem = cse_gen_binary (MULT, mode, lhs,
3658 GEN_INT (coeff0 + coeff1));
3659 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3663 /* If one of the operands is a PLUS or a MINUS, see if we can
3664 simplify this by the associative law.
3665 Don't use the associative law for floating point.
3666 The inaccuracy makes it nonassociative,
3667 and subtle programs can break if operations are associated. */
3669 if (INTEGRAL_MODE_P (mode)
3670 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3671 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3672 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3673 return tem;
3674 break;
3676 case COMPARE:
3677 #ifdef HAVE_cc0
3678 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3679 using cc0, in which case we want to leave it as a COMPARE
3680 so we can distinguish it from a register-register-copy.
3682 In IEEE floating point, x-0 is not the same as x. */
3684 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3685 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3686 && op1 == CONST0_RTX (mode))
3687 return op0;
3688 #else
3689 /* Do nothing here. */
3690 #endif
3691 break;
3693 case MINUS:
3694 /* None of these optimizations can be done for IEEE
3695 floating point. */
3696 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3697 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3698 break;
3700 /* We can't assume x-x is 0 even with non-IEEE floating point,
3701 but since it is zero except in very strange circumstances, we
3702 will treat it as zero with -ffast-math. */
3703 if (rtx_equal_p (op0, op1)
3704 && ! side_effects_p (op0)
3705 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3706 return CONST0_RTX (mode);
3708 /* Change subtraction from zero into negation. */
3709 if (op0 == CONST0_RTX (mode))
3710 return gen_rtx_NEG (mode, op1);
3712 /* (-1 - a) is ~a. */
3713 if (op0 == constm1_rtx)
3714 return gen_rtx_NOT (mode, op1);
3716 /* Subtracting 0 has no effect. */
3717 if (op1 == CONST0_RTX (mode))
3718 return op0;
3720 /* See if this is something like X * C - X or vice versa or
3721 if the multiplication is written as a shift. If so, we can
3722 distribute and make a new multiply, shift, or maybe just
3723 have X (if C is 2 in the example above). But don't make
3724 real multiply if we didn't have one before. */
3726 if (! FLOAT_MODE_P (mode))
3728 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3729 rtx lhs = op0, rhs = op1;
3730 int had_mult = 0;
3732 if (GET_CODE (lhs) == NEG)
3733 coeff0 = -1, lhs = XEXP (lhs, 0);
3734 else if (GET_CODE (lhs) == MULT
3735 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3737 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3738 had_mult = 1;
3740 else if (GET_CODE (lhs) == ASHIFT
3741 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3742 && INTVAL (XEXP (lhs, 1)) >= 0
3743 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3745 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3746 lhs = XEXP (lhs, 0);
3749 if (GET_CODE (rhs) == NEG)
3750 coeff1 = - 1, rhs = XEXP (rhs, 0);
3751 else if (GET_CODE (rhs) == MULT
3752 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3754 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3755 had_mult = 1;
3757 else if (GET_CODE (rhs) == ASHIFT
3758 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3759 && INTVAL (XEXP (rhs, 1)) >= 0
3760 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3762 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3763 rhs = XEXP (rhs, 0);
3766 if (rtx_equal_p (lhs, rhs))
3768 tem = cse_gen_binary (MULT, mode, lhs,
3769 GEN_INT (coeff0 - coeff1));
3770 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3774 /* (a - (-b)) -> (a + b). */
3775 if (GET_CODE (op1) == NEG)
3776 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3778 /* If one of the operands is a PLUS or a MINUS, see if we can
3779 simplify this by the associative law.
3780 Don't use the associative law for floating point.
3781 The inaccuracy makes it nonassociative,
3782 and subtle programs can break if operations are associated. */
3784 if (INTEGRAL_MODE_P (mode)
3785 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3786 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3787 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3788 return tem;
3790 /* Don't let a relocatable value get a negative coeff. */
3791 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3792 return plus_constant (op0, - INTVAL (op1));
3794 /* (x - (x & y)) -> (x & ~y) */
3795 if (GET_CODE (op1) == AND)
3797 if (rtx_equal_p (op0, XEXP (op1, 0)))
3798 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
3799 if (rtx_equal_p (op0, XEXP (op1, 1)))
3800 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
3802 break;
3804 case MULT:
3805 if (op1 == constm1_rtx)
3807 tem = simplify_unary_operation (NEG, mode, op0, mode);
3809 return tem ? tem : gen_rtx_NEG (mode, op0);
3812 /* In IEEE floating point, x*0 is not always 0. */
3813 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3814 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3815 && op1 == CONST0_RTX (mode)
3816 && ! side_effects_p (op0))
3817 return op1;
3819 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3820 However, ANSI says we can drop signals,
3821 so we can do this anyway. */
3822 if (op1 == CONST1_RTX (mode))
3823 return op0;
3825 /* Convert multiply by constant power of two into shift unless
3826 we are still generating RTL. This test is a kludge. */
3827 if (GET_CODE (op1) == CONST_INT
3828 && (val = exact_log2 (INTVAL (op1))) >= 0
3829 /* If the mode is larger than the host word size, and the
3830 uppermost bit is set, then this isn't a power of two due
3831 to implicit sign extension. */
3832 && (width <= HOST_BITS_PER_WIDE_INT
3833 || val != HOST_BITS_PER_WIDE_INT - 1)
3834 && ! rtx_equal_function_value_matters)
3835 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
3837 if (GET_CODE (op1) == CONST_DOUBLE
3838 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3840 REAL_VALUE_TYPE d;
3841 jmp_buf handler;
3842 int op1is2, op1ism1;
3844 if (setjmp (handler))
3845 return 0;
3847 set_float_handler (handler);
3848 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3849 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3850 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3851 set_float_handler (NULL_PTR);
3853 /* x*2 is x+x and x*(-1) is -x */
3854 if (op1is2 && GET_MODE (op0) == mode)
3855 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
3857 else if (op1ism1 && GET_MODE (op0) == mode)
3858 return gen_rtx_NEG (mode, op0);
3860 break;
3862 case IOR:
3863 if (op1 == const0_rtx)
3864 return op0;
3865 if (GET_CODE (op1) == CONST_INT
3866 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3867 return op1;
3868 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3869 return op0;
3870 /* A | (~A) -> -1 */
3871 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3872 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3873 && ! side_effects_p (op0)
3874 && GET_MODE_CLASS (mode) != MODE_CC)
3875 return constm1_rtx;
3876 break;
3878 case XOR:
3879 if (op1 == const0_rtx)
3880 return op0;
3881 if (GET_CODE (op1) == CONST_INT
3882 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3883 return gen_rtx_NOT (mode, op0);
3884 if (op0 == op1 && ! side_effects_p (op0)
3885 && GET_MODE_CLASS (mode) != MODE_CC)
3886 return const0_rtx;
3887 break;
3889 case AND:
3890 if (op1 == const0_rtx && ! side_effects_p (op0))
3891 return const0_rtx;
3892 if (GET_CODE (op1) == CONST_INT
3893 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3894 return op0;
3895 if (op0 == op1 && ! side_effects_p (op0)
3896 && GET_MODE_CLASS (mode) != MODE_CC)
3897 return op0;
3898 /* A & (~A) -> 0 */
3899 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3900 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3901 && ! side_effects_p (op0)
3902 && GET_MODE_CLASS (mode) != MODE_CC)
3903 return const0_rtx;
3904 break;
3906 case UDIV:
3907 /* Convert divide by power of two into shift (divide by 1 handled
3908 below). */
3909 if (GET_CODE (op1) == CONST_INT
3910 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3911 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
3913 /* ... fall through ... */
3915 case DIV:
3916 if (op1 == CONST1_RTX (mode))
3917 return op0;
3919 /* In IEEE floating point, 0/x is not always 0. */
3920 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3921 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3922 && op0 == CONST0_RTX (mode)
3923 && ! side_effects_p (op1))
3924 return op0;
3926 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3927 /* Change division by a constant into multiplication. Only do
3928 this with -ffast-math until an expert says it is safe in
3929 general. */
3930 else if (GET_CODE (op1) == CONST_DOUBLE
3931 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3932 && op1 != CONST0_RTX (mode)
3933 && flag_fast_math)
3935 REAL_VALUE_TYPE d;
3936 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3938 if (! REAL_VALUES_EQUAL (d, dconst0))
3940 #if defined (REAL_ARITHMETIC)
3941 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3942 return gen_rtx_MULT (mode, op0,
3943 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3944 #else
3945 return gen_rtx_MULT (mode, op0,
3946 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3947 #endif
3950 #endif
3951 break;
3953 case UMOD:
3954 /* Handle modulus by power of two (mod with 1 handled below). */
3955 if (GET_CODE (op1) == CONST_INT
3956 && exact_log2 (INTVAL (op1)) > 0)
3957 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
3959 /* ... fall through ... */
3961 case MOD:
3962 if ((op0 == const0_rtx || op1 == const1_rtx)
3963 && ! side_effects_p (op0) && ! side_effects_p (op1))
3964 return const0_rtx;
3965 break;
3967 case ROTATERT:
3968 case ROTATE:
3969 /* Rotating ~0 always results in ~0. */
3970 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3971 && INTVAL (op0) == GET_MODE_MASK (mode)
3972 && ! side_effects_p (op1))
3973 return op0;
3975 /* ... fall through ... */
3977 case ASHIFT:
3978 case ASHIFTRT:
3979 case LSHIFTRT:
3980 if (op1 == const0_rtx)
3981 return op0;
3982 if (op0 == const0_rtx && ! side_effects_p (op1))
3983 return op0;
3984 break;
3986 case SMIN:
3987 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3988 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3989 && ! side_effects_p (op0))
3990 return op1;
3991 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3992 return op0;
3993 break;
3995 case SMAX:
3996 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3997 && (INTVAL (op1)
3998 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
3999 && ! side_effects_p (op0))
4000 return op1;
4001 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4002 return op0;
4003 break;
4005 case UMIN:
4006 if (op1 == const0_rtx && ! side_effects_p (op0))
4007 return op1;
4008 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4009 return op0;
4010 break;
4012 case UMAX:
4013 if (op1 == constm1_rtx && ! side_effects_p (op0))
4014 return op1;
4015 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4016 return op0;
4017 break;
4019 default:
4020 abort ();
4023 return 0;
4026 /* Get the integer argument values in two forms:
4027 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4029 arg0 = INTVAL (op0);
4030 arg1 = INTVAL (op1);
4032 if (width < HOST_BITS_PER_WIDE_INT)
4034 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4035 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4037 arg0s = arg0;
4038 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4039 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4041 arg1s = arg1;
4042 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4043 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4045 else
4047 arg0s = arg0;
4048 arg1s = arg1;
4051 /* Compute the value of the arithmetic. */
4053 switch (code)
4055 case PLUS:
4056 val = arg0s + arg1s;
4057 break;
4059 case MINUS:
4060 val = arg0s - arg1s;
4061 break;
4063 case MULT:
4064 val = arg0s * arg1s;
4065 break;
4067 case DIV:
4068 if (arg1s == 0)
4069 return 0;
4070 val = arg0s / arg1s;
4071 break;
4073 case MOD:
4074 if (arg1s == 0)
4075 return 0;
4076 val = arg0s % arg1s;
4077 break;
4079 case UDIV:
4080 if (arg1 == 0)
4081 return 0;
4082 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4083 break;
4085 case UMOD:
4086 if (arg1 == 0)
4087 return 0;
4088 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4089 break;
4091 case AND:
4092 val = arg0 & arg1;
4093 break;
4095 case IOR:
4096 val = arg0 | arg1;
4097 break;
4099 case XOR:
4100 val = arg0 ^ arg1;
4101 break;
4103 case LSHIFTRT:
4104 /* If shift count is undefined, don't fold it; let the machine do
4105 what it wants. But truncate it if the machine will do that. */
4106 if (arg1 < 0)
4107 return 0;
4109 #ifdef SHIFT_COUNT_TRUNCATED
4110 if (SHIFT_COUNT_TRUNCATED)
4111 arg1 %= width;
4112 #endif
4114 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4115 break;
4117 case ASHIFT:
4118 if (arg1 < 0)
4119 return 0;
4121 #ifdef SHIFT_COUNT_TRUNCATED
4122 if (SHIFT_COUNT_TRUNCATED)
4123 arg1 %= width;
4124 #endif
4126 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4127 break;
4129 case ASHIFTRT:
4130 if (arg1 < 0)
4131 return 0;
4133 #ifdef SHIFT_COUNT_TRUNCATED
4134 if (SHIFT_COUNT_TRUNCATED)
4135 arg1 %= width;
4136 #endif
4138 val = arg0s >> arg1;
4140 /* Bootstrap compiler may not have sign extended the right shift.
4141 Manually extend the sign to insure bootstrap cc matches gcc. */
4142 if (arg0s < 0 && arg1 > 0)
4143 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4145 break;
4147 case ROTATERT:
4148 if (arg1 < 0)
4149 return 0;
4151 arg1 %= width;
4152 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4153 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4154 break;
4156 case ROTATE:
4157 if (arg1 < 0)
4158 return 0;
4160 arg1 %= width;
4161 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4162 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4163 break;
4165 case COMPARE:
4166 /* Do nothing here. */
4167 return 0;
4169 case SMIN:
4170 val = arg0s <= arg1s ? arg0s : arg1s;
4171 break;
4173 case UMIN:
4174 val = ((unsigned HOST_WIDE_INT) arg0
4175 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4176 break;
4178 case SMAX:
4179 val = arg0s > arg1s ? arg0s : arg1s;
4180 break;
4182 case UMAX:
4183 val = ((unsigned HOST_WIDE_INT) arg0
4184 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4185 break;
4187 default:
4188 abort ();
4191 /* Clear the bits that don't belong in our mode, unless they and our sign
4192 bit are all one. So we get either a reasonable negative value or a
4193 reasonable unsigned value for this mode. */
4194 if (width < HOST_BITS_PER_WIDE_INT
4195 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4196 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4197 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4199 /* If this would be an entire word for the target, but is not for
4200 the host, then sign-extend on the host so that the number will look
4201 the same way on the host that it would on the target.
4203 For example, when building a 64 bit alpha hosted 32 bit sparc
4204 targeted compiler, then we want the 32 bit unsigned value -1 to be
4205 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4206 The later confuses the sparc backend. */
4208 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4209 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4210 val |= ((HOST_WIDE_INT) (-1) << width);
4212 return GEN_INT (val);
4215 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4216 PLUS or MINUS.
4218 Rather than test for specific case, we do this by a brute-force method
4219 and do all possible simplifications until no more changes occur. Then
4220 we rebuild the operation. */
4222 static rtx
4223 simplify_plus_minus (code, mode, op0, op1)
4224 enum rtx_code code;
4225 enum machine_mode mode;
4226 rtx op0, op1;
4228 rtx ops[8];
4229 int negs[8];
4230 rtx result, tem;
4231 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4232 int first = 1, negate = 0, changed;
4233 int i, j;
4235 bzero ((char *) ops, sizeof ops);
4237 /* Set up the two operands and then expand them until nothing has been
4238 changed. If we run out of room in our array, give up; this should
4239 almost never happen. */
4241 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4243 changed = 1;
4244 while (changed)
4246 changed = 0;
4248 for (i = 0; i < n_ops; i++)
4249 switch (GET_CODE (ops[i]))
4251 case PLUS:
4252 case MINUS:
4253 if (n_ops == 7)
4254 return 0;
4256 ops[n_ops] = XEXP (ops[i], 1);
4257 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4258 ops[i] = XEXP (ops[i], 0);
4259 input_ops++;
4260 changed = 1;
4261 break;
4263 case NEG:
4264 ops[i] = XEXP (ops[i], 0);
4265 negs[i] = ! negs[i];
4266 changed = 1;
4267 break;
4269 case CONST:
4270 ops[i] = XEXP (ops[i], 0);
4271 input_consts++;
4272 changed = 1;
4273 break;
4275 case NOT:
4276 /* ~a -> (-a - 1) */
4277 if (n_ops != 7)
4279 ops[n_ops] = constm1_rtx;
4280 negs[n_ops++] = negs[i];
4281 ops[i] = XEXP (ops[i], 0);
4282 negs[i] = ! negs[i];
4283 changed = 1;
4285 break;
4287 case CONST_INT:
4288 if (negs[i])
4289 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4290 break;
4292 default:
4293 break;
4297 /* If we only have two operands, we can't do anything. */
4298 if (n_ops <= 2)
4299 return 0;
4301 /* Now simplify each pair of operands until nothing changes. The first
4302 time through just simplify constants against each other. */
4304 changed = 1;
4305 while (changed)
4307 changed = first;
4309 for (i = 0; i < n_ops - 1; i++)
4310 for (j = i + 1; j < n_ops; j++)
4311 if (ops[i] != 0 && ops[j] != 0
4312 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4314 rtx lhs = ops[i], rhs = ops[j];
4315 enum rtx_code ncode = PLUS;
4317 if (negs[i] && ! negs[j])
4318 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4319 else if (! negs[i] && negs[j])
4320 ncode = MINUS;
4322 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4323 if (tem)
4325 ops[i] = tem, ops[j] = 0;
4326 negs[i] = negs[i] && negs[j];
4327 if (GET_CODE (tem) == NEG)
4328 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4330 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4331 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4332 changed = 1;
4336 first = 0;
4339 /* Pack all the operands to the lower-numbered entries and give up if
4340 we didn't reduce the number of operands we had. Make sure we
4341 count a CONST as two operands. If we have the same number of
4342 operands, but have made more CONSTs than we had, this is also
4343 an improvement, so accept it. */
4345 for (i = 0, j = 0; j < n_ops; j++)
4346 if (ops[j] != 0)
4348 ops[i] = ops[j], negs[i++] = negs[j];
4349 if (GET_CODE (ops[j]) == CONST)
4350 n_consts++;
4353 if (i + n_consts > input_ops
4354 || (i + n_consts == input_ops && n_consts <= input_consts))
4355 return 0;
4357 n_ops = i;
4359 /* If we have a CONST_INT, put it last. */
4360 for (i = 0; i < n_ops - 1; i++)
4361 if (GET_CODE (ops[i]) == CONST_INT)
4363 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4364 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4367 /* Put a non-negated operand first. If there aren't any, make all
4368 operands positive and negate the whole thing later. */
4369 for (i = 0; i < n_ops && negs[i]; i++)
4372 if (i == n_ops)
4374 for (i = 0; i < n_ops; i++)
4375 negs[i] = 0;
4376 negate = 1;
4378 else if (i != 0)
4380 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4381 j = negs[0], negs[0] = negs[i], negs[i] = j;
4384 /* Now make the result by performing the requested operations. */
4385 result = ops[0];
4386 for (i = 1; i < n_ops; i++)
4387 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4389 return negate ? gen_rtx_NEG (mode, result) : result;
4392 /* Make a binary operation by properly ordering the operands and
4393 seeing if the expression folds. */
4395 static rtx
4396 cse_gen_binary (code, mode, op0, op1)
4397 enum rtx_code code;
4398 enum machine_mode mode;
4399 rtx op0, op1;
4401 rtx tem;
4403 /* Put complex operands first and constants second if commutative. */
4404 if (GET_RTX_CLASS (code) == 'c'
4405 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4406 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4407 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4408 || (GET_CODE (op0) == SUBREG
4409 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4410 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4411 tem = op0, op0 = op1, op1 = tem;
4413 /* If this simplifies, do it. */
4414 tem = simplify_binary_operation (code, mode, op0, op1);
4416 if (tem)
4417 return tem;
4419 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4420 just form the operation. */
4422 if (code == PLUS && GET_CODE (op1) == CONST_INT
4423 && GET_MODE (op0) != VOIDmode)
4424 return plus_constant (op0, INTVAL (op1));
4425 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4426 && GET_MODE (op0) != VOIDmode)
4427 return plus_constant (op0, - INTVAL (op1));
4428 else
4429 return gen_rtx_fmt_ee (code, mode, op0, op1);
4432 /* Like simplify_binary_operation except used for relational operators.
4433 MODE is the mode of the operands, not that of the result. If MODE
4434 is VOIDmode, both operands must also be VOIDmode and we compare the
4435 operands in "infinite precision".
4437 If no simplification is possible, this function returns zero. Otherwise,
4438 it returns either const_true_rtx or const0_rtx. */
4441 simplify_relational_operation (code, mode, op0, op1)
4442 enum rtx_code code;
4443 enum machine_mode mode;
4444 rtx op0, op1;
4446 int equal, op0lt, op0ltu, op1lt, op1ltu;
4447 rtx tem;
4449 /* If op0 is a compare, extract the comparison arguments from it. */
4450 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4451 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4453 /* We can't simplify MODE_CC values since we don't know what the
4454 actual comparison is. */
4455 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4456 #ifdef HAVE_cc0
4457 || op0 == cc0_rtx
4458 #endif
4460 return 0;
4462 /* For integer comparisons of A and B maybe we can simplify A - B and can
4463 then simplify a comparison of that with zero. If A and B are both either
4464 a register or a CONST_INT, this can't help; testing for these cases will
4465 prevent infinite recursion here and speed things up.
4467 If CODE is an unsigned comparison, then we can never do this optimization,
4468 because it gives an incorrect result if the subtraction wraps around zero.
4469 ANSI C defines unsigned operations such that they never overflow, and
4470 thus such cases can not be ignored. */
4472 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4473 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4474 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4475 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4476 && code != GTU && code != GEU && code != LTU && code != LEU)
4477 return simplify_relational_operation (signed_condition (code),
4478 mode, tem, const0_rtx);
4480 /* For non-IEEE floating-point, if the two operands are equal, we know the
4481 result. */
4482 if (rtx_equal_p (op0, op1)
4483 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4484 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4485 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4487 /* If the operands are floating-point constants, see if we can fold
4488 the result. */
4489 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4490 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4491 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4493 REAL_VALUE_TYPE d0, d1;
4494 jmp_buf handler;
4496 if (setjmp (handler))
4497 return 0;
4499 set_float_handler (handler);
4500 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4501 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4502 equal = REAL_VALUES_EQUAL (d0, d1);
4503 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4504 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4505 set_float_handler (NULL_PTR);
4507 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4509 /* Otherwise, see if the operands are both integers. */
4510 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4511 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4512 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4514 int width = GET_MODE_BITSIZE (mode);
4515 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4516 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4518 /* Get the two words comprising each integer constant. */
4519 if (GET_CODE (op0) == CONST_DOUBLE)
4521 l0u = l0s = CONST_DOUBLE_LOW (op0);
4522 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4524 else
4526 l0u = l0s = INTVAL (op0);
4527 h0u = h0s = l0s < 0 ? -1 : 0;
4530 if (GET_CODE (op1) == CONST_DOUBLE)
4532 l1u = l1s = CONST_DOUBLE_LOW (op1);
4533 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4535 else
4537 l1u = l1s = INTVAL (op1);
4538 h1u = h1s = l1s < 0 ? -1 : 0;
4541 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4542 we have to sign or zero-extend the values. */
4543 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4544 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4546 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4548 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4549 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4551 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4552 l0s |= ((HOST_WIDE_INT) (-1) << width);
4554 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4555 l1s |= ((HOST_WIDE_INT) (-1) << width);
4558 equal = (h0u == h1u && l0u == l1u);
4559 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4560 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4561 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4562 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4565 /* Otherwise, there are some code-specific tests we can make. */
4566 else
4568 switch (code)
4570 case EQ:
4571 /* References to the frame plus a constant or labels cannot
4572 be zero, but a SYMBOL_REF can due to #pragma weak. */
4573 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4574 || GET_CODE (op0) == LABEL_REF)
4575 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4576 /* On some machines, the ap reg can be 0 sometimes. */
4577 && op0 != arg_pointer_rtx
4578 #endif
4580 return const0_rtx;
4581 break;
4583 case NE:
4584 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4585 || GET_CODE (op0) == LABEL_REF)
4586 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4587 && op0 != arg_pointer_rtx
4588 #endif
4590 return const_true_rtx;
4591 break;
4593 case GEU:
4594 /* Unsigned values are never negative. */
4595 if (op1 == const0_rtx)
4596 return const_true_rtx;
4597 break;
4599 case LTU:
4600 if (op1 == const0_rtx)
4601 return const0_rtx;
4602 break;
4604 case LEU:
4605 /* Unsigned values are never greater than the largest
4606 unsigned value. */
4607 if (GET_CODE (op1) == CONST_INT
4608 && INTVAL (op1) == GET_MODE_MASK (mode)
4609 && INTEGRAL_MODE_P (mode))
4610 return const_true_rtx;
4611 break;
4613 case GTU:
4614 if (GET_CODE (op1) == CONST_INT
4615 && INTVAL (op1) == GET_MODE_MASK (mode)
4616 && INTEGRAL_MODE_P (mode))
4617 return const0_rtx;
4618 break;
4620 default:
4621 break;
4624 return 0;
4627 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4628 as appropriate. */
4629 switch (code)
4631 case EQ:
4632 return equal ? const_true_rtx : const0_rtx;
4633 case NE:
4634 return ! equal ? const_true_rtx : const0_rtx;
4635 case LT:
4636 return op0lt ? const_true_rtx : const0_rtx;
4637 case GT:
4638 return op1lt ? const_true_rtx : const0_rtx;
4639 case LTU:
4640 return op0ltu ? const_true_rtx : const0_rtx;
4641 case GTU:
4642 return op1ltu ? const_true_rtx : const0_rtx;
4643 case LE:
4644 return equal || op0lt ? const_true_rtx : const0_rtx;
4645 case GE:
4646 return equal || op1lt ? const_true_rtx : const0_rtx;
4647 case LEU:
4648 return equal || op0ltu ? const_true_rtx : const0_rtx;
4649 case GEU:
4650 return equal || op1ltu ? const_true_rtx : const0_rtx;
4651 default:
4652 abort ();
4656 /* Simplify CODE, an operation with result mode MODE and three operands,
4657 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4658 a constant. Return 0 if no simplifications is possible. */
4661 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4662 enum rtx_code code;
4663 enum machine_mode mode, op0_mode;
4664 rtx op0, op1, op2;
4666 int width = GET_MODE_BITSIZE (mode);
4668 /* VOIDmode means "infinite" precision. */
4669 if (width == 0)
4670 width = HOST_BITS_PER_WIDE_INT;
4672 switch (code)
4674 case SIGN_EXTRACT:
4675 case ZERO_EXTRACT:
4676 if (GET_CODE (op0) == CONST_INT
4677 && GET_CODE (op1) == CONST_INT
4678 && GET_CODE (op2) == CONST_INT
4679 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4680 && width <= HOST_BITS_PER_WIDE_INT)
4682 /* Extracting a bit-field from a constant */
4683 HOST_WIDE_INT val = INTVAL (op0);
4685 if (BITS_BIG_ENDIAN)
4686 val >>= (GET_MODE_BITSIZE (op0_mode)
4687 - INTVAL (op2) - INTVAL (op1));
4688 else
4689 val >>= INTVAL (op2);
4691 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4693 /* First zero-extend. */
4694 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4695 /* If desired, propagate sign bit. */
4696 if (code == SIGN_EXTRACT
4697 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4698 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4701 /* Clear the bits that don't belong in our mode,
4702 unless they and our sign bit are all one.
4703 So we get either a reasonable negative value or a reasonable
4704 unsigned value for this mode. */
4705 if (width < HOST_BITS_PER_WIDE_INT
4706 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4707 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4708 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4710 return GEN_INT (val);
4712 break;
4714 case IF_THEN_ELSE:
4715 if (GET_CODE (op0) == CONST_INT)
4716 return op0 != const0_rtx ? op1 : op2;
4718 /* Convert a == b ? b : a to "a". */
4719 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4720 && rtx_equal_p (XEXP (op0, 0), op1)
4721 && rtx_equal_p (XEXP (op0, 1), op2))
4722 return op1;
4723 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4724 && rtx_equal_p (XEXP (op0, 1), op1)
4725 && rtx_equal_p (XEXP (op0, 0), op2))
4726 return op2;
4727 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
4729 rtx temp;
4730 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4731 XEXP (op0, 0), XEXP (op0, 1));
4732 /* See if any simplifications were possible. */
4733 if (temp == const0_rtx)
4734 return op2;
4735 else if (temp == const1_rtx)
4736 return op1;
4738 break;
4740 default:
4741 abort ();
4744 return 0;
4747 /* If X is a nontrivial arithmetic operation on an argument
4748 for which a constant value can be determined, return
4749 the result of operating on that value, as a constant.
4750 Otherwise, return X, possibly with one or more operands
4751 modified by recursive calls to this function.
4753 If X is a register whose contents are known, we do NOT
4754 return those contents here. equiv_constant is called to
4755 perform that task.
4757 INSN is the insn that we may be modifying. If it is 0, make a copy
4758 of X before modifying it. */
4760 static rtx
4761 fold_rtx (x, insn)
4762 rtx x;
4763 rtx insn;
4765 register enum rtx_code code;
4766 register enum machine_mode mode;
4767 register char *fmt;
4768 register int i;
4769 rtx new = 0;
4770 int copied = 0;
4771 int must_swap = 0;
4773 /* Folded equivalents of first two operands of X. */
4774 rtx folded_arg0;
4775 rtx folded_arg1;
4777 /* Constant equivalents of first three operands of X;
4778 0 when no such equivalent is known. */
4779 rtx const_arg0;
4780 rtx const_arg1;
4781 rtx const_arg2;
4783 /* The mode of the first operand of X. We need this for sign and zero
4784 extends. */
4785 enum machine_mode mode_arg0;
4787 if (x == 0)
4788 return x;
4790 mode = GET_MODE (x);
4791 code = GET_CODE (x);
4792 switch (code)
4794 case CONST:
4795 case CONST_INT:
4796 case CONST_DOUBLE:
4797 case SYMBOL_REF:
4798 case LABEL_REF:
4799 case REG:
4800 /* No use simplifying an EXPR_LIST
4801 since they are used only for lists of args
4802 in a function call's REG_EQUAL note. */
4803 case EXPR_LIST:
4804 /* Changing anything inside an ADDRESSOF is incorrect; we don't
4805 want to (e.g.,) make (addressof (const_int 0)) just because
4806 the location is known to be zero. */
4807 case ADDRESSOF:
4808 return x;
4810 #ifdef HAVE_cc0
4811 case CC0:
4812 return prev_insn_cc0;
4813 #endif
4815 case PC:
4816 /* If the next insn is a CODE_LABEL followed by a jump table,
4817 PC's value is a LABEL_REF pointing to that label. That
4818 lets us fold switch statements on the Vax. */
4819 if (insn && GET_CODE (insn) == JUMP_INSN)
4821 rtx next = next_nonnote_insn (insn);
4823 if (next && GET_CODE (next) == CODE_LABEL
4824 && NEXT_INSN (next) != 0
4825 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4826 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4827 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4828 return gen_rtx_LABEL_REF (Pmode, next);
4830 break;
4832 case SUBREG:
4833 /* See if we previously assigned a constant value to this SUBREG. */
4834 if ((new = lookup_as_function (x, CONST_INT)) != 0
4835 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4836 return new;
4838 /* If this is a paradoxical SUBREG, we have no idea what value the
4839 extra bits would have. However, if the operand is equivalent
4840 to a SUBREG whose operand is the same as our mode, and all the
4841 modes are within a word, we can just use the inner operand
4842 because these SUBREGs just say how to treat the register.
4844 Similarly if we find an integer constant. */
4846 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4848 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4849 struct table_elt *elt;
4851 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4852 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4853 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4854 imode)) != 0)
4855 for (elt = elt->first_same_value;
4856 elt; elt = elt->next_same_value)
4858 if (CONSTANT_P (elt->exp)
4859 && GET_MODE (elt->exp) == VOIDmode)
4860 return elt->exp;
4862 if (GET_CODE (elt->exp) == SUBREG
4863 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4864 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4865 return copy_rtx (SUBREG_REG (elt->exp));
4868 return x;
4871 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4872 We might be able to if the SUBREG is extracting a single word in an
4873 integral mode or extracting the low part. */
4875 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4876 const_arg0 = equiv_constant (folded_arg0);
4877 if (const_arg0)
4878 folded_arg0 = const_arg0;
4880 if (folded_arg0 != SUBREG_REG (x))
4882 new = 0;
4884 if (GET_MODE_CLASS (mode) == MODE_INT
4885 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4886 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4887 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4888 GET_MODE (SUBREG_REG (x)));
4889 if (new == 0 && subreg_lowpart_p (x))
4890 new = gen_lowpart_if_possible (mode, folded_arg0);
4891 if (new)
4892 return new;
4895 /* If this is a narrowing SUBREG and our operand is a REG, see if
4896 we can find an equivalence for REG that is an arithmetic operation
4897 in a wider mode where both operands are paradoxical SUBREGs
4898 from objects of our result mode. In that case, we couldn't report
4899 an equivalent value for that operation, since we don't know what the
4900 extra bits will be. But we can find an equivalence for this SUBREG
4901 by folding that operation is the narrow mode. This allows us to
4902 fold arithmetic in narrow modes when the machine only supports
4903 word-sized arithmetic.
4905 Also look for a case where we have a SUBREG whose operand is the
4906 same as our result. If both modes are smaller than a word, we
4907 are simply interpreting a register in different modes and we
4908 can use the inner value. */
4910 if (GET_CODE (folded_arg0) == REG
4911 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4912 && subreg_lowpart_p (x))
4914 struct table_elt *elt;
4916 /* We can use HASH here since we know that canon_hash won't be
4917 called. */
4918 elt = lookup (folded_arg0,
4919 HASH (folded_arg0, GET_MODE (folded_arg0)),
4920 GET_MODE (folded_arg0));
4922 if (elt)
4923 elt = elt->first_same_value;
4925 for (; elt; elt = elt->next_same_value)
4927 enum rtx_code eltcode = GET_CODE (elt->exp);
4929 /* Just check for unary and binary operations. */
4930 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4931 && GET_CODE (elt->exp) != SIGN_EXTEND
4932 && GET_CODE (elt->exp) != ZERO_EXTEND
4933 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4934 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4936 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4938 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4939 op0 = fold_rtx (op0, NULL_RTX);
4941 op0 = equiv_constant (op0);
4942 if (op0)
4943 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4944 op0, mode);
4946 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4947 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4948 && eltcode != DIV && eltcode != MOD
4949 && eltcode != UDIV && eltcode != UMOD
4950 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4951 && eltcode != ROTATE && eltcode != ROTATERT
4952 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4953 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4954 == mode))
4955 || CONSTANT_P (XEXP (elt->exp, 0)))
4956 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4957 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4958 == mode))
4959 || CONSTANT_P (XEXP (elt->exp, 1))))
4961 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4962 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4964 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4965 op0 = fold_rtx (op0, NULL_RTX);
4967 if (op0)
4968 op0 = equiv_constant (op0);
4970 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4971 op1 = fold_rtx (op1, NULL_RTX);
4973 if (op1)
4974 op1 = equiv_constant (op1);
4976 /* If we are looking for the low SImode part of
4977 (ashift:DI c (const_int 32)), it doesn't work
4978 to compute that in SImode, because a 32-bit shift
4979 in SImode is unpredictable. We know the value is 0. */
4980 if (op0 && op1
4981 && GET_CODE (elt->exp) == ASHIFT
4982 && GET_CODE (op1) == CONST_INT
4983 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4985 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4987 /* If the count fits in the inner mode's width,
4988 but exceeds the outer mode's width,
4989 the value will get truncated to 0
4990 by the subreg. */
4991 new = const0_rtx;
4992 else
4993 /* If the count exceeds even the inner mode's width,
4994 don't fold this expression. */
4995 new = 0;
4997 else if (op0 && op1)
4998 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4999 op0, op1);
5002 else if (GET_CODE (elt->exp) == SUBREG
5003 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5004 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5005 <= UNITS_PER_WORD)
5006 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5007 new = copy_rtx (SUBREG_REG (elt->exp));
5009 if (new)
5010 return new;
5014 return x;
5016 case NOT:
5017 case NEG:
5018 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5019 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5020 new = lookup_as_function (XEXP (x, 0), code);
5021 if (new)
5022 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5023 break;
5025 case MEM:
5026 /* If we are not actually processing an insn, don't try to find the
5027 best address. Not only don't we care, but we could modify the
5028 MEM in an invalid way since we have no insn to validate against. */
5029 if (insn != 0)
5030 find_best_addr (insn, &XEXP (x, 0));
5033 /* Even if we don't fold in the insn itself,
5034 we can safely do so here, in hopes of getting a constant. */
5035 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5036 rtx base = 0;
5037 HOST_WIDE_INT offset = 0;
5039 if (GET_CODE (addr) == REG
5040 && REGNO_QTY_VALID_P (REGNO (addr))
5041 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5042 && qty_const[reg_qty[REGNO (addr)]] != 0)
5043 addr = qty_const[reg_qty[REGNO (addr)]];
5045 /* If address is constant, split it into a base and integer offset. */
5046 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5047 base = addr;
5048 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5049 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5051 base = XEXP (XEXP (addr, 0), 0);
5052 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5054 else if (GET_CODE (addr) == LO_SUM
5055 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5056 base = XEXP (addr, 1);
5057 else if (GET_CODE (addr) == ADDRESSOF)
5058 return change_address (x, VOIDmode, addr);
5060 /* If this is a constant pool reference, we can fold it into its
5061 constant to allow better value tracking. */
5062 if (base && GET_CODE (base) == SYMBOL_REF
5063 && CONSTANT_POOL_ADDRESS_P (base))
5065 rtx constant = get_pool_constant (base);
5066 enum machine_mode const_mode = get_pool_mode (base);
5067 rtx new;
5069 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5070 constant_pool_entries_cost = COST (constant);
5072 /* If we are loading the full constant, we have an equivalence. */
5073 if (offset == 0 && mode == const_mode)
5074 return constant;
5076 /* If this actually isn't a constant (weird!), we can't do
5077 anything. Otherwise, handle the two most common cases:
5078 extracting a word from a multi-word constant, and extracting
5079 the low-order bits. Other cases don't seem common enough to
5080 worry about. */
5081 if (! CONSTANT_P (constant))
5082 return x;
5084 if (GET_MODE_CLASS (mode) == MODE_INT
5085 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5086 && offset % UNITS_PER_WORD == 0
5087 && (new = operand_subword (constant,
5088 offset / UNITS_PER_WORD,
5089 0, const_mode)) != 0)
5090 return new;
5092 if (((BYTES_BIG_ENDIAN
5093 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5094 || (! BYTES_BIG_ENDIAN && offset == 0))
5095 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5096 return new;
5099 /* If this is a reference to a label at a known position in a jump
5100 table, we also know its value. */
5101 if (base && GET_CODE (base) == LABEL_REF)
5103 rtx label = XEXP (base, 0);
5104 rtx table_insn = NEXT_INSN (label);
5106 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5107 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5109 rtx table = PATTERN (table_insn);
5111 if (offset >= 0
5112 && (offset / GET_MODE_SIZE (GET_MODE (table))
5113 < XVECLEN (table, 0)))
5114 return XVECEXP (table, 0,
5115 offset / GET_MODE_SIZE (GET_MODE (table)));
5117 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5118 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5120 rtx table = PATTERN (table_insn);
5122 if (offset >= 0
5123 && (offset / GET_MODE_SIZE (GET_MODE (table))
5124 < XVECLEN (table, 1)))
5126 offset /= GET_MODE_SIZE (GET_MODE (table));
5127 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5128 XEXP (table, 0));
5130 if (GET_MODE (table) != Pmode)
5131 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5133 /* Indicate this is a constant. This isn't a
5134 valid form of CONST, but it will only be used
5135 to fold the next insns and then discarded, so
5136 it should be safe. */
5137 return gen_rtx_CONST (GET_MODE (new), new);
5142 return x;
5145 case ASM_OPERANDS:
5146 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5147 validate_change (insn, &XVECEXP (x, 3, i),
5148 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5149 break;
5151 default:
5152 break;
5155 const_arg0 = 0;
5156 const_arg1 = 0;
5157 const_arg2 = 0;
5158 mode_arg0 = VOIDmode;
5160 /* Try folding our operands.
5161 Then see which ones have constant values known. */
5163 fmt = GET_RTX_FORMAT (code);
5164 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5165 if (fmt[i] == 'e')
5167 rtx arg = XEXP (x, i);
5168 rtx folded_arg = arg, const_arg = 0;
5169 enum machine_mode mode_arg = GET_MODE (arg);
5170 rtx cheap_arg, expensive_arg;
5171 rtx replacements[2];
5172 int j;
5174 /* Most arguments are cheap, so handle them specially. */
5175 switch (GET_CODE (arg))
5177 case REG:
5178 /* This is the same as calling equiv_constant; it is duplicated
5179 here for speed. */
5180 if (REGNO_QTY_VALID_P (REGNO (arg))
5181 && qty_const[reg_qty[REGNO (arg)]] != 0
5182 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5183 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5184 const_arg
5185 = gen_lowpart_if_possible (GET_MODE (arg),
5186 qty_const[reg_qty[REGNO (arg)]]);
5187 break;
5189 case CONST:
5190 case CONST_INT:
5191 case SYMBOL_REF:
5192 case LABEL_REF:
5193 case CONST_DOUBLE:
5194 const_arg = arg;
5195 break;
5197 #ifdef HAVE_cc0
5198 case CC0:
5199 folded_arg = prev_insn_cc0;
5200 mode_arg = prev_insn_cc0_mode;
5201 const_arg = equiv_constant (folded_arg);
5202 break;
5203 #endif
5205 default:
5206 folded_arg = fold_rtx (arg, insn);
5207 const_arg = equiv_constant (folded_arg);
5210 /* For the first three operands, see if the operand
5211 is constant or equivalent to a constant. */
5212 switch (i)
5214 case 0:
5215 folded_arg0 = folded_arg;
5216 const_arg0 = const_arg;
5217 mode_arg0 = mode_arg;
5218 break;
5219 case 1:
5220 folded_arg1 = folded_arg;
5221 const_arg1 = const_arg;
5222 break;
5223 case 2:
5224 const_arg2 = const_arg;
5225 break;
5228 /* Pick the least expensive of the folded argument and an
5229 equivalent constant argument. */
5230 if (const_arg == 0 || const_arg == folded_arg
5231 || COST (const_arg) > COST (folded_arg))
5232 cheap_arg = folded_arg, expensive_arg = const_arg;
5233 else
5234 cheap_arg = const_arg, expensive_arg = folded_arg;
5236 /* Try to replace the operand with the cheapest of the two
5237 possibilities. If it doesn't work and this is either of the first
5238 two operands of a commutative operation, try swapping them.
5239 If THAT fails, try the more expensive, provided it is cheaper
5240 than what is already there. */
5242 if (cheap_arg == XEXP (x, i))
5243 continue;
5245 if (insn == 0 && ! copied)
5247 x = copy_rtx (x);
5248 copied = 1;
5251 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5252 for (j = 0;
5253 j < 2 && replacements[j]
5254 && COST (replacements[j]) < COST (XEXP (x, i));
5255 j++)
5257 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5258 break;
5260 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5262 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5263 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5265 if (apply_change_group ())
5267 /* Swap them back to be invalid so that this loop can
5268 continue and flag them to be swapped back later. */
5269 rtx tem;
5271 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5272 XEXP (x, 1) = tem;
5273 must_swap = 1;
5274 break;
5280 else
5282 if (fmt[i] == 'E')
5283 /* Don't try to fold inside of a vector of expressions.
5284 Doing nothing is harmless. */
5285 {;}
5288 /* If a commutative operation, place a constant integer as the second
5289 operand unless the first operand is also a constant integer. Otherwise,
5290 place any constant second unless the first operand is also a constant. */
5292 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5294 if (must_swap || (const_arg0
5295 && (const_arg1 == 0
5296 || (GET_CODE (const_arg0) == CONST_INT
5297 && GET_CODE (const_arg1) != CONST_INT))))
5299 register rtx tem = XEXP (x, 0);
5301 if (insn == 0 && ! copied)
5303 x = copy_rtx (x);
5304 copied = 1;
5307 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5308 validate_change (insn, &XEXP (x, 1), tem, 1);
5309 if (apply_change_group ())
5311 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5312 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5317 /* If X is an arithmetic operation, see if we can simplify it. */
5319 switch (GET_RTX_CLASS (code))
5321 case '1':
5323 int is_const = 0;
5325 /* We can't simplify extension ops unless we know the
5326 original mode. */
5327 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5328 && mode_arg0 == VOIDmode)
5329 break;
5331 /* If we had a CONST, strip it off and put it back later if we
5332 fold. */
5333 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5334 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5336 new = simplify_unary_operation (code, mode,
5337 const_arg0 ? const_arg0 : folded_arg0,
5338 mode_arg0);
5339 if (new != 0 && is_const)
5340 new = gen_rtx_CONST (mode, new);
5342 break;
5344 case '<':
5345 /* See what items are actually being compared and set FOLDED_ARG[01]
5346 to those values and CODE to the actual comparison code. If any are
5347 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5348 do anything if both operands are already known to be constant. */
5350 if (const_arg0 == 0 || const_arg1 == 0)
5352 struct table_elt *p0, *p1;
5353 rtx true = const_true_rtx, false = const0_rtx;
5354 enum machine_mode mode_arg1;
5356 #ifdef FLOAT_STORE_FLAG_VALUE
5357 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5359 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5360 mode);
5361 false = CONST0_RTX (mode);
5363 #endif
5365 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5366 &mode_arg0, &mode_arg1);
5367 const_arg0 = equiv_constant (folded_arg0);
5368 const_arg1 = equiv_constant (folded_arg1);
5370 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5371 what kinds of things are being compared, so we can't do
5372 anything with this comparison. */
5374 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5375 break;
5377 /* If we do not now have two constants being compared, see
5378 if we can nevertheless deduce some things about the
5379 comparison. */
5380 if (const_arg0 == 0 || const_arg1 == 0)
5382 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5383 non-explicit constant? These aren't zero, but we
5384 don't know their sign. */
5385 if (const_arg1 == const0_rtx
5386 && (NONZERO_BASE_PLUS_P (folded_arg0)
5387 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5388 come out as 0. */
5389 || GET_CODE (folded_arg0) == SYMBOL_REF
5390 #endif
5391 || GET_CODE (folded_arg0) == LABEL_REF
5392 || GET_CODE (folded_arg0) == CONST))
5394 if (code == EQ)
5395 return false;
5396 else if (code == NE)
5397 return true;
5400 /* See if the two operands are the same. We don't do this
5401 for IEEE floating-point since we can't assume x == x
5402 since x might be a NaN. */
5404 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5405 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5406 && (folded_arg0 == folded_arg1
5407 || (GET_CODE (folded_arg0) == REG
5408 && GET_CODE (folded_arg1) == REG
5409 && (reg_qty[REGNO (folded_arg0)]
5410 == reg_qty[REGNO (folded_arg1)]))
5411 || ((p0 = lookup (folded_arg0,
5412 (safe_hash (folded_arg0, mode_arg0)
5413 % NBUCKETS), mode_arg0))
5414 && (p1 = lookup (folded_arg1,
5415 (safe_hash (folded_arg1, mode_arg0)
5416 % NBUCKETS), mode_arg0))
5417 && p0->first_same_value == p1->first_same_value)))
5418 return ((code == EQ || code == LE || code == GE
5419 || code == LEU || code == GEU)
5420 ? true : false);
5422 /* If FOLDED_ARG0 is a register, see if the comparison we are
5423 doing now is either the same as we did before or the reverse
5424 (we only check the reverse if not floating-point). */
5425 else if (GET_CODE (folded_arg0) == REG)
5427 int qty = reg_qty[REGNO (folded_arg0)];
5429 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5430 && (comparison_dominates_p (qty_comparison_code[qty], code)
5431 || (comparison_dominates_p (qty_comparison_code[qty],
5432 reverse_condition (code))
5433 && ! FLOAT_MODE_P (mode_arg0)))
5434 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5435 || (const_arg1
5436 && rtx_equal_p (qty_comparison_const[qty],
5437 const_arg1))
5438 || (GET_CODE (folded_arg1) == REG
5439 && (reg_qty[REGNO (folded_arg1)]
5440 == qty_comparison_qty[qty]))))
5441 return (comparison_dominates_p (qty_comparison_code[qty],
5442 code)
5443 ? true : false);
5448 /* If we are comparing against zero, see if the first operand is
5449 equivalent to an IOR with a constant. If so, we may be able to
5450 determine the result of this comparison. */
5452 if (const_arg1 == const0_rtx)
5454 rtx y = lookup_as_function (folded_arg0, IOR);
5455 rtx inner_const;
5457 if (y != 0
5458 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5459 && GET_CODE (inner_const) == CONST_INT
5460 && INTVAL (inner_const) != 0)
5462 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5463 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5464 && (INTVAL (inner_const)
5465 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5466 rtx true = const_true_rtx, false = const0_rtx;
5468 #ifdef FLOAT_STORE_FLAG_VALUE
5469 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5471 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5472 mode);
5473 false = CONST0_RTX (mode);
5475 #endif
5477 switch (code)
5479 case EQ:
5480 return false;
5481 case NE:
5482 return true;
5483 case LT: case LE:
5484 if (has_sign)
5485 return true;
5486 break;
5487 case GT: case GE:
5488 if (has_sign)
5489 return false;
5490 break;
5491 default:
5492 break;
5497 new = simplify_relational_operation (code, mode_arg0,
5498 const_arg0 ? const_arg0 : folded_arg0,
5499 const_arg1 ? const_arg1 : folded_arg1);
5500 #ifdef FLOAT_STORE_FLAG_VALUE
5501 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5502 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5503 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5504 #endif
5505 break;
5507 case '2':
5508 case 'c':
5509 switch (code)
5511 case PLUS:
5512 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5513 with that LABEL_REF as its second operand. If so, the result is
5514 the first operand of that MINUS. This handles switches with an
5515 ADDR_DIFF_VEC table. */
5516 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5518 rtx y
5519 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5520 : lookup_as_function (folded_arg0, MINUS);
5522 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5523 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5524 return XEXP (y, 0);
5526 /* Now try for a CONST of a MINUS like the above. */
5527 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5528 : lookup_as_function (folded_arg0, CONST))) != 0
5529 && GET_CODE (XEXP (y, 0)) == MINUS
5530 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5531 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5532 return XEXP (XEXP (y, 0), 0);
5535 /* Likewise if the operands are in the other order. */
5536 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5538 rtx y
5539 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5540 : lookup_as_function (folded_arg1, MINUS);
5542 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5543 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5544 return XEXP (y, 0);
5546 /* Now try for a CONST of a MINUS like the above. */
5547 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5548 : lookup_as_function (folded_arg1, CONST))) != 0
5549 && GET_CODE (XEXP (y, 0)) == MINUS
5550 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5551 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5552 return XEXP (XEXP (y, 0), 0);
5555 /* If second operand is a register equivalent to a negative
5556 CONST_INT, see if we can find a register equivalent to the
5557 positive constant. Make a MINUS if so. Don't do this for
5558 a non-negative constant since we might then alternate between
5559 chosing positive and negative constants. Having the positive
5560 constant previously-used is the more common case. Be sure
5561 the resulting constant is non-negative; if const_arg1 were
5562 the smallest negative number this would overflow: depending
5563 on the mode, this would either just be the same value (and
5564 hence not save anything) or be incorrect. */
5565 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5566 && INTVAL (const_arg1) < 0
5567 && - INTVAL (const_arg1) >= 0
5568 && GET_CODE (folded_arg1) == REG)
5570 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5571 struct table_elt *p
5572 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5573 mode);
5575 if (p)
5576 for (p = p->first_same_value; p; p = p->next_same_value)
5577 if (GET_CODE (p->exp) == REG)
5578 return cse_gen_binary (MINUS, mode, folded_arg0,
5579 canon_reg (p->exp, NULL_RTX));
5581 goto from_plus;
5583 case MINUS:
5584 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5585 If so, produce (PLUS Z C2-C). */
5586 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5588 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5589 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5590 return fold_rtx (plus_constant (copy_rtx (y),
5591 -INTVAL (const_arg1)),
5592 NULL_RTX);
5595 /* ... fall through ... */
5597 from_plus:
5598 case SMIN: case SMAX: case UMIN: case UMAX:
5599 case IOR: case AND: case XOR:
5600 case MULT: case DIV: case UDIV:
5601 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5602 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5603 is known to be of similar form, we may be able to replace the
5604 operation with a combined operation. This may eliminate the
5605 intermediate operation if every use is simplified in this way.
5606 Note that the similar optimization done by combine.c only works
5607 if the intermediate operation's result has only one reference. */
5609 if (GET_CODE (folded_arg0) == REG
5610 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5612 int is_shift
5613 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5614 rtx y = lookup_as_function (folded_arg0, code);
5615 rtx inner_const;
5616 enum rtx_code associate_code;
5617 rtx new_const;
5619 if (y == 0
5620 || 0 == (inner_const
5621 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5622 || GET_CODE (inner_const) != CONST_INT
5623 /* If we have compiled a statement like
5624 "if (x == (x & mask1))", and now are looking at
5625 "x & mask2", we will have a case where the first operand
5626 of Y is the same as our first operand. Unless we detect
5627 this case, an infinite loop will result. */
5628 || XEXP (y, 0) == folded_arg0)
5629 break;
5631 /* Don't associate these operations if they are a PLUS with the
5632 same constant and it is a power of two. These might be doable
5633 with a pre- or post-increment. Similarly for two subtracts of
5634 identical powers of two with post decrement. */
5636 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5637 && (0
5638 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5639 || exact_log2 (INTVAL (const_arg1)) >= 0
5640 #endif
5641 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5642 || exact_log2 (- INTVAL (const_arg1)) >= 0
5643 #endif
5645 break;
5647 /* Compute the code used to compose the constants. For example,
5648 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5650 associate_code
5651 = (code == MULT || code == DIV || code == UDIV ? MULT
5652 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5654 new_const = simplify_binary_operation (associate_code, mode,
5655 const_arg1, inner_const);
5657 if (new_const == 0)
5658 break;
5660 /* If we are associating shift operations, don't let this
5661 produce a shift of the size of the object or larger.
5662 This could occur when we follow a sign-extend by a right
5663 shift on a machine that does a sign-extend as a pair
5664 of shifts. */
5666 if (is_shift && GET_CODE (new_const) == CONST_INT
5667 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5669 /* As an exception, we can turn an ASHIFTRT of this
5670 form into a shift of the number of bits - 1. */
5671 if (code == ASHIFTRT)
5672 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5673 else
5674 break;
5677 y = copy_rtx (XEXP (y, 0));
5679 /* If Y contains our first operand (the most common way this
5680 can happen is if Y is a MEM), we would do into an infinite
5681 loop if we tried to fold it. So don't in that case. */
5683 if (! reg_mentioned_p (folded_arg0, y))
5684 y = fold_rtx (y, insn);
5686 return cse_gen_binary (code, mode, y, new_const);
5688 break;
5690 default:
5691 break;
5694 new = simplify_binary_operation (code, mode,
5695 const_arg0 ? const_arg0 : folded_arg0,
5696 const_arg1 ? const_arg1 : folded_arg1);
5697 break;
5699 case 'o':
5700 /* (lo_sum (high X) X) is simply X. */
5701 if (code == LO_SUM && const_arg0 != 0
5702 && GET_CODE (const_arg0) == HIGH
5703 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5704 return const_arg1;
5705 break;
5707 case '3':
5708 case 'b':
5709 new = simplify_ternary_operation (code, mode, mode_arg0,
5710 const_arg0 ? const_arg0 : folded_arg0,
5711 const_arg1 ? const_arg1 : folded_arg1,
5712 const_arg2 ? const_arg2 : XEXP (x, 2));
5713 break;
5716 return new ? new : x;
5719 /* Return a constant value currently equivalent to X.
5720 Return 0 if we don't know one. */
5722 static rtx
5723 equiv_constant (x)
5724 rtx x;
5726 if (GET_CODE (x) == REG
5727 && REGNO_QTY_VALID_P (REGNO (x))
5728 && qty_const[reg_qty[REGNO (x)]])
5729 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5731 if (x != 0 && CONSTANT_P (x))
5732 return x;
5734 /* If X is a MEM, try to fold it outside the context of any insn to see if
5735 it might be equivalent to a constant. That handles the case where it
5736 is a constant-pool reference. Then try to look it up in the hash table
5737 in case it is something whose value we have seen before. */
5739 if (GET_CODE (x) == MEM)
5741 struct table_elt *elt;
5743 x = fold_rtx (x, NULL_RTX);
5744 if (CONSTANT_P (x))
5745 return x;
5747 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5748 if (elt == 0)
5749 return 0;
5751 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5752 if (elt->is_const && CONSTANT_P (elt->exp))
5753 return elt->exp;
5756 return 0;
5759 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5760 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5761 least-significant part of X.
5762 MODE specifies how big a part of X to return.
5764 If the requested operation cannot be done, 0 is returned.
5766 This is similar to gen_lowpart in emit-rtl.c. */
5769 gen_lowpart_if_possible (mode, x)
5770 enum machine_mode mode;
5771 register rtx x;
5773 rtx result = gen_lowpart_common (mode, x);
5775 if (result)
5776 return result;
5777 else if (GET_CODE (x) == MEM)
5779 /* This is the only other case we handle. */
5780 register int offset = 0;
5781 rtx new;
5783 if (WORDS_BIG_ENDIAN)
5784 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5785 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5786 if (BYTES_BIG_ENDIAN)
5787 /* Adjust the address so that the address-after-the-data is
5788 unchanged. */
5789 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5790 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5791 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
5792 if (! memory_address_p (mode, XEXP (new, 0)))
5793 return 0;
5794 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5795 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5796 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5797 return new;
5799 else
5800 return 0;
5803 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5804 branch. It will be zero if not.
5806 In certain cases, this can cause us to add an equivalence. For example,
5807 if we are following the taken case of
5808 if (i == 2)
5809 we can add the fact that `i' and '2' are now equivalent.
5811 In any case, we can record that this comparison was passed. If the same
5812 comparison is seen later, we will know its value. */
5814 static void
5815 record_jump_equiv (insn, taken)
5816 rtx insn;
5817 int taken;
5819 int cond_known_true;
5820 rtx op0, op1;
5821 enum machine_mode mode, mode0, mode1;
5822 int reversed_nonequality = 0;
5823 enum rtx_code code;
5825 /* Ensure this is the right kind of insn. */
5826 if (! condjump_p (insn) || simplejump_p (insn))
5827 return;
5829 /* See if this jump condition is known true or false. */
5830 if (taken)
5831 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5832 else
5833 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5835 /* Get the type of comparison being done and the operands being compared.
5836 If we had to reverse a non-equality condition, record that fact so we
5837 know that it isn't valid for floating-point. */
5838 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5839 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5840 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5842 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5843 if (! cond_known_true)
5845 reversed_nonequality = (code != EQ && code != NE);
5846 code = reverse_condition (code);
5849 /* The mode is the mode of the non-constant. */
5850 mode = mode0;
5851 if (mode1 != VOIDmode)
5852 mode = mode1;
5854 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5857 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5858 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5859 Make any useful entries we can with that information. Called from
5860 above function and called recursively. */
5862 static void
5863 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5864 enum rtx_code code;
5865 enum machine_mode mode;
5866 rtx op0, op1;
5867 int reversed_nonequality;
5869 unsigned op0_hash, op1_hash;
5870 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5871 struct table_elt *op0_elt, *op1_elt;
5873 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5874 we know that they are also equal in the smaller mode (this is also
5875 true for all smaller modes whether or not there is a SUBREG, but
5876 is not worth testing for with no SUBREG. */
5878 /* Note that GET_MODE (op0) may not equal MODE. */
5879 if (code == EQ && GET_CODE (op0) == SUBREG
5880 && (GET_MODE_SIZE (GET_MODE (op0))
5881 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5883 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5884 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5886 record_jump_cond (code, mode, SUBREG_REG (op0),
5887 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
5888 reversed_nonequality);
5891 if (code == EQ && GET_CODE (op1) == SUBREG
5892 && (GET_MODE_SIZE (GET_MODE (op1))
5893 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5895 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5896 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5898 record_jump_cond (code, mode, SUBREG_REG (op1),
5899 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
5900 reversed_nonequality);
5903 /* Similarly, if this is an NE comparison, and either is a SUBREG
5904 making a smaller mode, we know the whole thing is also NE. */
5906 /* Note that GET_MODE (op0) may not equal MODE;
5907 if we test MODE instead, we can get an infinite recursion
5908 alternating between two modes each wider than MODE. */
5910 if (code == NE && GET_CODE (op0) == SUBREG
5911 && subreg_lowpart_p (op0)
5912 && (GET_MODE_SIZE (GET_MODE (op0))
5913 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5915 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5916 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5918 record_jump_cond (code, mode, SUBREG_REG (op0),
5919 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
5920 reversed_nonequality);
5923 if (code == NE && GET_CODE (op1) == SUBREG
5924 && subreg_lowpart_p (op1)
5925 && (GET_MODE_SIZE (GET_MODE (op1))
5926 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5928 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5929 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5931 record_jump_cond (code, mode, SUBREG_REG (op1),
5932 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
5933 reversed_nonequality);
5936 /* Hash both operands. */
5938 do_not_record = 0;
5939 hash_arg_in_memory = 0;
5940 hash_arg_in_struct = 0;
5941 op0_hash = HASH (op0, mode);
5942 op0_in_memory = hash_arg_in_memory;
5943 op0_in_struct = hash_arg_in_struct;
5945 if (do_not_record)
5946 return;
5948 do_not_record = 0;
5949 hash_arg_in_memory = 0;
5950 hash_arg_in_struct = 0;
5951 op1_hash = HASH (op1, mode);
5952 op1_in_memory = hash_arg_in_memory;
5953 op1_in_struct = hash_arg_in_struct;
5955 if (do_not_record)
5956 return;
5958 /* Look up both operands. */
5959 op0_elt = lookup (op0, op0_hash, mode);
5960 op1_elt = lookup (op1, op1_hash, mode);
5962 /* If both operands are already equivalent or if they are not in the
5963 table but are identical, do nothing. */
5964 if ((op0_elt != 0 && op1_elt != 0
5965 && op0_elt->first_same_value == op1_elt->first_same_value)
5966 || op0 == op1 || rtx_equal_p (op0, op1))
5967 return;
5969 /* If we aren't setting two things equal all we can do is save this
5970 comparison. Similarly if this is floating-point. In the latter
5971 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5972 If we record the equality, we might inadvertently delete code
5973 whose intent was to change -0 to +0. */
5975 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
5977 /* If we reversed a floating-point comparison, if OP0 is not a
5978 register, or if OP1 is neither a register or constant, we can't
5979 do anything. */
5981 if (GET_CODE (op1) != REG)
5982 op1 = equiv_constant (op1);
5984 if ((reversed_nonequality && FLOAT_MODE_P (mode))
5985 || GET_CODE (op0) != REG || op1 == 0)
5986 return;
5988 /* Put OP0 in the hash table if it isn't already. This gives it a
5989 new quantity number. */
5990 if (op0_elt == 0)
5992 if (insert_regs (op0, NULL_PTR, 0))
5994 rehash_using_reg (op0);
5995 op0_hash = HASH (op0, mode);
5997 /* If OP0 is contained in OP1, this changes its hash code
5998 as well. Faster to rehash than to check, except
5999 for the simple case of a constant. */
6000 if (! CONSTANT_P (op1))
6001 op1_hash = HASH (op1,mode);
6004 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6005 op0_elt->in_memory = op0_in_memory;
6006 op0_elt->in_struct = op0_in_struct;
6009 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
6010 if (GET_CODE (op1) == REG)
6012 /* Look it up again--in case op0 and op1 are the same. */
6013 op1_elt = lookup (op1, op1_hash, mode);
6015 /* Put OP1 in the hash table so it gets a new quantity number. */
6016 if (op1_elt == 0)
6018 if (insert_regs (op1, NULL_PTR, 0))
6020 rehash_using_reg (op1);
6021 op1_hash = HASH (op1, mode);
6024 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6025 op1_elt->in_memory = op1_in_memory;
6026 op1_elt->in_struct = op1_in_struct;
6029 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
6030 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
6032 else
6034 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
6035 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
6038 return;
6041 /* If either side is still missing an equivalence, make it now,
6042 then merge the equivalences. */
6044 if (op0_elt == 0)
6046 if (insert_regs (op0, NULL_PTR, 0))
6048 rehash_using_reg (op0);
6049 op0_hash = HASH (op0, mode);
6052 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6053 op0_elt->in_memory = op0_in_memory;
6054 op0_elt->in_struct = op0_in_struct;
6057 if (op1_elt == 0)
6059 if (insert_regs (op1, NULL_PTR, 0))
6061 rehash_using_reg (op1);
6062 op1_hash = HASH (op1, mode);
6065 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6066 op1_elt->in_memory = op1_in_memory;
6067 op1_elt->in_struct = op1_in_struct;
6070 merge_equiv_classes (op0_elt, op1_elt);
6071 last_jump_equiv_class = op0_elt;
6074 /* CSE processing for one instruction.
6075 First simplify sources and addresses of all assignments
6076 in the instruction, using previously-computed equivalents values.
6077 Then install the new sources and destinations in the table
6078 of available values.
6080 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
6081 the insn. */
6083 /* Data on one SET contained in the instruction. */
6085 struct set
6087 /* The SET rtx itself. */
6088 rtx rtl;
6089 /* The SET_SRC of the rtx (the original value, if it is changing). */
6090 rtx src;
6091 /* The hash-table element for the SET_SRC of the SET. */
6092 struct table_elt *src_elt;
6093 /* Hash value for the SET_SRC. */
6094 unsigned src_hash;
6095 /* Hash value for the SET_DEST. */
6096 unsigned dest_hash;
6097 /* The SET_DEST, with SUBREG, etc., stripped. */
6098 rtx inner_dest;
6099 /* Place where the pointer to the INNER_DEST was found. */
6100 rtx *inner_dest_loc;
6101 /* Nonzero if the SET_SRC is in memory. */
6102 char src_in_memory;
6103 /* Nonzero if the SET_SRC is in a structure. */
6104 char src_in_struct;
6105 /* Nonzero if the SET_SRC contains something
6106 whose value cannot be predicted and understood. */
6107 char src_volatile;
6108 /* Original machine mode, in case it becomes a CONST_INT. */
6109 enum machine_mode mode;
6110 /* A constant equivalent for SET_SRC, if any. */
6111 rtx src_const;
6112 /* Hash value of constant equivalent for SET_SRC. */
6113 unsigned src_const_hash;
6114 /* Table entry for constant equivalent for SET_SRC, if any. */
6115 struct table_elt *src_const_elt;
6118 static void
6119 cse_insn (insn, in_libcall_block)
6120 rtx insn;
6121 int in_libcall_block;
6123 register rtx x = PATTERN (insn);
6124 register int i;
6125 rtx tem;
6126 register int n_sets = 0;
6128 #ifdef HAVE_cc0
6129 /* Records what this insn does to set CC0. */
6130 rtx this_insn_cc0 = 0;
6131 enum machine_mode this_insn_cc0_mode = VOIDmode;
6132 #endif
6134 rtx src_eqv = 0;
6135 struct table_elt *src_eqv_elt = 0;
6136 int src_eqv_volatile;
6137 int src_eqv_in_memory;
6138 int src_eqv_in_struct;
6139 unsigned src_eqv_hash;
6141 struct set *sets;
6143 this_insn = insn;
6145 /* Find all the SETs and CLOBBERs in this instruction.
6146 Record all the SETs in the array `set' and count them.
6147 Also determine whether there is a CLOBBER that invalidates
6148 all memory references, or all references at varying addresses. */
6150 if (GET_CODE (insn) == CALL_INSN)
6152 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6153 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6154 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6157 if (GET_CODE (x) == SET)
6159 sets = (struct set *) alloca (sizeof (struct set));
6160 sets[0].rtl = x;
6162 /* Ignore SETs that are unconditional jumps.
6163 They never need cse processing, so this does not hurt.
6164 The reason is not efficiency but rather
6165 so that we can test at the end for instructions
6166 that have been simplified to unconditional jumps
6167 and not be misled by unchanged instructions
6168 that were unconditional jumps to begin with. */
6169 if (SET_DEST (x) == pc_rtx
6170 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6173 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6174 The hard function value register is used only once, to copy to
6175 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6176 Ensure we invalidate the destination register. On the 80386 no
6177 other code would invalidate it since it is a fixed_reg.
6178 We need not check the return of apply_change_group; see canon_reg. */
6180 else if (GET_CODE (SET_SRC (x)) == CALL)
6182 canon_reg (SET_SRC (x), insn);
6183 apply_change_group ();
6184 fold_rtx (SET_SRC (x), insn);
6185 invalidate (SET_DEST (x), VOIDmode);
6187 else
6188 n_sets = 1;
6190 else if (GET_CODE (x) == PARALLEL)
6192 register int lim = XVECLEN (x, 0);
6194 sets = (struct set *) alloca (lim * sizeof (struct set));
6196 /* Find all regs explicitly clobbered in this insn,
6197 and ensure they are not replaced with any other regs
6198 elsewhere in this insn.
6199 When a reg that is clobbered is also used for input,
6200 we should presume that that is for a reason,
6201 and we should not substitute some other register
6202 which is not supposed to be clobbered.
6203 Therefore, this loop cannot be merged into the one below
6204 because a CALL may precede a CLOBBER and refer to the
6205 value clobbered. We must not let a canonicalization do
6206 anything in that case. */
6207 for (i = 0; i < lim; i++)
6209 register rtx y = XVECEXP (x, 0, i);
6210 if (GET_CODE (y) == CLOBBER)
6212 rtx clobbered = XEXP (y, 0);
6214 if (GET_CODE (clobbered) == REG
6215 || GET_CODE (clobbered) == SUBREG)
6216 invalidate (clobbered, VOIDmode);
6217 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6218 || GET_CODE (clobbered) == ZERO_EXTRACT)
6219 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6223 for (i = 0; i < lim; i++)
6225 register rtx y = XVECEXP (x, 0, i);
6226 if (GET_CODE (y) == SET)
6228 /* As above, we ignore unconditional jumps and call-insns and
6229 ignore the result of apply_change_group. */
6230 if (GET_CODE (SET_SRC (y)) == CALL)
6232 canon_reg (SET_SRC (y), insn);
6233 apply_change_group ();
6234 fold_rtx (SET_SRC (y), insn);
6235 invalidate (SET_DEST (y), VOIDmode);
6237 else if (SET_DEST (y) == pc_rtx
6238 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6240 else
6241 sets[n_sets++].rtl = y;
6243 else if (GET_CODE (y) == CLOBBER)
6245 /* If we clobber memory, canon the address.
6246 This does nothing when a register is clobbered
6247 because we have already invalidated the reg. */
6248 if (GET_CODE (XEXP (y, 0)) == MEM)
6249 canon_reg (XEXP (y, 0), NULL_RTX);
6251 else if (GET_CODE (y) == USE
6252 && ! (GET_CODE (XEXP (y, 0)) == REG
6253 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6254 canon_reg (y, NULL_RTX);
6255 else if (GET_CODE (y) == CALL)
6257 /* The result of apply_change_group can be ignored; see
6258 canon_reg. */
6259 canon_reg (y, insn);
6260 apply_change_group ();
6261 fold_rtx (y, insn);
6265 else if (GET_CODE (x) == CLOBBER)
6267 if (GET_CODE (XEXP (x, 0)) == MEM)
6268 canon_reg (XEXP (x, 0), NULL_RTX);
6271 /* Canonicalize a USE of a pseudo register or memory location. */
6272 else if (GET_CODE (x) == USE
6273 && ! (GET_CODE (XEXP (x, 0)) == REG
6274 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6275 canon_reg (XEXP (x, 0), NULL_RTX);
6276 else if (GET_CODE (x) == CALL)
6278 /* The result of apply_change_group can be ignored; see canon_reg. */
6279 canon_reg (x, insn);
6280 apply_change_group ();
6281 fold_rtx (x, insn);
6284 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6285 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6286 is handled specially for this case, and if it isn't set, then there will
6287 be no equivalence for the destination. */
6288 if (n_sets == 1 && REG_NOTES (insn) != 0
6289 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6290 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6291 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6292 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6294 /* Canonicalize sources and addresses of destinations.
6295 We do this in a separate pass to avoid problems when a MATCH_DUP is
6296 present in the insn pattern. In that case, we want to ensure that
6297 we don't break the duplicate nature of the pattern. So we will replace
6298 both operands at the same time. Otherwise, we would fail to find an
6299 equivalent substitution in the loop calling validate_change below.
6301 We used to suppress canonicalization of DEST if it appears in SRC,
6302 but we don't do this any more. */
6304 for (i = 0; i < n_sets; i++)
6306 rtx dest = SET_DEST (sets[i].rtl);
6307 rtx src = SET_SRC (sets[i].rtl);
6308 rtx new = canon_reg (src, insn);
6309 int insn_code;
6311 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6312 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6313 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6314 || (insn_code = recog_memoized (insn)) < 0
6315 || insn_n_dups[insn_code] > 0)
6316 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6317 else
6318 SET_SRC (sets[i].rtl) = new;
6320 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6322 validate_change (insn, &XEXP (dest, 1),
6323 canon_reg (XEXP (dest, 1), insn), 1);
6324 validate_change (insn, &XEXP (dest, 2),
6325 canon_reg (XEXP (dest, 2), insn), 1);
6328 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6329 || GET_CODE (dest) == ZERO_EXTRACT
6330 || GET_CODE (dest) == SIGN_EXTRACT)
6331 dest = XEXP (dest, 0);
6333 if (GET_CODE (dest) == MEM)
6334 canon_reg (dest, insn);
6337 /* Now that we have done all the replacements, we can apply the change
6338 group and see if they all work. Note that this will cause some
6339 canonicalizations that would have worked individually not to be applied
6340 because some other canonicalization didn't work, but this should not
6341 occur often.
6343 The result of apply_change_group can be ignored; see canon_reg. */
6345 apply_change_group ();
6347 /* Set sets[i].src_elt to the class each source belongs to.
6348 Detect assignments from or to volatile things
6349 and set set[i] to zero so they will be ignored
6350 in the rest of this function.
6352 Nothing in this loop changes the hash table or the register chains. */
6354 for (i = 0; i < n_sets; i++)
6356 register rtx src, dest;
6357 register rtx src_folded;
6358 register struct table_elt *elt = 0, *p;
6359 enum machine_mode mode;
6360 rtx src_eqv_here;
6361 rtx src_const = 0;
6362 rtx src_related = 0;
6363 struct table_elt *src_const_elt = 0;
6364 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6365 int src_related_cost = 10000, src_elt_cost = 10000;
6366 /* Set non-zero if we need to call force_const_mem on with the
6367 contents of src_folded before using it. */
6368 int src_folded_force_flag = 0;
6370 dest = SET_DEST (sets[i].rtl);
6371 src = SET_SRC (sets[i].rtl);
6373 /* If SRC is a constant that has no machine mode,
6374 hash it with the destination's machine mode.
6375 This way we can keep different modes separate. */
6377 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6378 sets[i].mode = mode;
6380 if (src_eqv)
6382 enum machine_mode eqvmode = mode;
6383 if (GET_CODE (dest) == STRICT_LOW_PART)
6384 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6385 do_not_record = 0;
6386 hash_arg_in_memory = 0;
6387 hash_arg_in_struct = 0;
6388 src_eqv = fold_rtx (src_eqv, insn);
6389 src_eqv_hash = HASH (src_eqv, eqvmode);
6391 /* Find the equivalence class for the equivalent expression. */
6393 if (!do_not_record)
6394 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6396 src_eqv_volatile = do_not_record;
6397 src_eqv_in_memory = hash_arg_in_memory;
6398 src_eqv_in_struct = hash_arg_in_struct;
6401 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6402 value of the INNER register, not the destination. So it is not
6403 a valid substitution for the source. But save it for later. */
6404 if (GET_CODE (dest) == STRICT_LOW_PART)
6405 src_eqv_here = 0;
6406 else
6407 src_eqv_here = src_eqv;
6409 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6410 simplified result, which may not necessarily be valid. */
6411 src_folded = fold_rtx (src, insn);
6413 #if 0
6414 /* ??? This caused bad code to be generated for the m68k port with -O2.
6415 Suppose src is (CONST_INT -1), and that after truncation src_folded
6416 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6417 At the end we will add src and src_const to the same equivalence
6418 class. We now have 3 and -1 on the same equivalence class. This
6419 causes later instructions to be mis-optimized. */
6420 /* If storing a constant in a bitfield, pre-truncate the constant
6421 so we will be able to record it later. */
6422 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6423 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6425 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6427 if (GET_CODE (src) == CONST_INT
6428 && GET_CODE (width) == CONST_INT
6429 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6430 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6431 src_folded
6432 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6433 << INTVAL (width)) - 1));
6435 #endif
6437 /* Compute SRC's hash code, and also notice if it
6438 should not be recorded at all. In that case,
6439 prevent any further processing of this assignment. */
6440 do_not_record = 0;
6441 hash_arg_in_memory = 0;
6442 hash_arg_in_struct = 0;
6444 sets[i].src = src;
6445 sets[i].src_hash = HASH (src, mode);
6446 sets[i].src_volatile = do_not_record;
6447 sets[i].src_in_memory = hash_arg_in_memory;
6448 sets[i].src_in_struct = hash_arg_in_struct;
6450 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6451 a pseudo that is set more than once, do not record SRC. Using
6452 SRC as a replacement for anything else will be incorrect in that
6453 situation. Note that this usually occurs only for stack slots,
6454 in which case all the RTL would be referring to SRC, so we don't
6455 lose any optimization opportunities by not having SRC in the
6456 hash table. */
6458 if (GET_CODE (src) == MEM
6459 && find_reg_note (insn, REG_EQUIV, src) != 0
6460 && GET_CODE (dest) == REG
6461 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6462 && REG_N_SETS (REGNO (dest)) != 1)
6463 sets[i].src_volatile = 1;
6465 #if 0
6466 /* It is no longer clear why we used to do this, but it doesn't
6467 appear to still be needed. So let's try without it since this
6468 code hurts cse'ing widened ops. */
6469 /* If source is a perverse subreg (such as QI treated as an SI),
6470 treat it as volatile. It may do the work of an SI in one context
6471 where the extra bits are not being used, but cannot replace an SI
6472 in general. */
6473 if (GET_CODE (src) == SUBREG
6474 && (GET_MODE_SIZE (GET_MODE (src))
6475 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6476 sets[i].src_volatile = 1;
6477 #endif
6479 /* Locate all possible equivalent forms for SRC. Try to replace
6480 SRC in the insn with each cheaper equivalent.
6482 We have the following types of equivalents: SRC itself, a folded
6483 version, a value given in a REG_EQUAL note, or a value related
6484 to a constant.
6486 Each of these equivalents may be part of an additional class
6487 of equivalents (if more than one is in the table, they must be in
6488 the same class; we check for this).
6490 If the source is volatile, we don't do any table lookups.
6492 We note any constant equivalent for possible later use in a
6493 REG_NOTE. */
6495 if (!sets[i].src_volatile)
6496 elt = lookup (src, sets[i].src_hash, mode);
6498 sets[i].src_elt = elt;
6500 if (elt && src_eqv_here && src_eqv_elt)
6502 if (elt->first_same_value != src_eqv_elt->first_same_value)
6504 /* The REG_EQUAL is indicating that two formerly distinct
6505 classes are now equivalent. So merge them. */
6506 merge_equiv_classes (elt, src_eqv_elt);
6507 src_eqv_hash = HASH (src_eqv, elt->mode);
6508 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6511 src_eqv_here = 0;
6514 else if (src_eqv_elt)
6515 elt = src_eqv_elt;
6517 /* Try to find a constant somewhere and record it in `src_const'.
6518 Record its table element, if any, in `src_const_elt'. Look in
6519 any known equivalences first. (If the constant is not in the
6520 table, also set `sets[i].src_const_hash'). */
6521 if (elt)
6522 for (p = elt->first_same_value; p; p = p->next_same_value)
6523 if (p->is_const)
6525 src_const = p->exp;
6526 src_const_elt = elt;
6527 break;
6530 if (src_const == 0
6531 && (CONSTANT_P (src_folded)
6532 /* Consider (minus (label_ref L1) (label_ref L2)) as
6533 "constant" here so we will record it. This allows us
6534 to fold switch statements when an ADDR_DIFF_VEC is used. */
6535 || (GET_CODE (src_folded) == MINUS
6536 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6537 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6538 src_const = src_folded, src_const_elt = elt;
6539 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6540 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6542 /* If we don't know if the constant is in the table, get its
6543 hash code and look it up. */
6544 if (src_const && src_const_elt == 0)
6546 sets[i].src_const_hash = HASH (src_const, mode);
6547 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6550 sets[i].src_const = src_const;
6551 sets[i].src_const_elt = src_const_elt;
6553 /* If the constant and our source are both in the table, mark them as
6554 equivalent. Otherwise, if a constant is in the table but the source
6555 isn't, set ELT to it. */
6556 if (src_const_elt && elt
6557 && src_const_elt->first_same_value != elt->first_same_value)
6558 merge_equiv_classes (elt, src_const_elt);
6559 else if (src_const_elt && elt == 0)
6560 elt = src_const_elt;
6562 /* See if there is a register linearly related to a constant
6563 equivalent of SRC. */
6564 if (src_const
6565 && (GET_CODE (src_const) == CONST
6566 || (src_const_elt && src_const_elt->related_value != 0)))
6568 src_related = use_related_value (src_const, src_const_elt);
6569 if (src_related)
6571 struct table_elt *src_related_elt
6572 = lookup (src_related, HASH (src_related, mode), mode);
6573 if (src_related_elt && elt)
6575 if (elt->first_same_value
6576 != src_related_elt->first_same_value)
6577 /* This can occur when we previously saw a CONST
6578 involving a SYMBOL_REF and then see the SYMBOL_REF
6579 twice. Merge the involved classes. */
6580 merge_equiv_classes (elt, src_related_elt);
6582 src_related = 0;
6583 src_related_elt = 0;
6585 else if (src_related_elt && elt == 0)
6586 elt = src_related_elt;
6590 /* See if we have a CONST_INT that is already in a register in a
6591 wider mode. */
6593 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6594 && GET_MODE_CLASS (mode) == MODE_INT
6595 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6597 enum machine_mode wider_mode;
6599 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6600 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6601 && src_related == 0;
6602 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6604 struct table_elt *const_elt
6605 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6607 if (const_elt == 0)
6608 continue;
6610 for (const_elt = const_elt->first_same_value;
6611 const_elt; const_elt = const_elt->next_same_value)
6612 if (GET_CODE (const_elt->exp) == REG)
6614 src_related = gen_lowpart_if_possible (mode,
6615 const_elt->exp);
6616 break;
6621 /* Another possibility is that we have an AND with a constant in
6622 a mode narrower than a word. If so, it might have been generated
6623 as part of an "if" which would narrow the AND. If we already
6624 have done the AND in a wider mode, we can use a SUBREG of that
6625 value. */
6627 if (flag_expensive_optimizations && ! src_related
6628 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6629 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6631 enum machine_mode tmode;
6632 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6634 for (tmode = GET_MODE_WIDER_MODE (mode);
6635 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6636 tmode = GET_MODE_WIDER_MODE (tmode))
6638 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6639 struct table_elt *larger_elt;
6641 if (inner)
6643 PUT_MODE (new_and, tmode);
6644 XEXP (new_and, 0) = inner;
6645 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6646 if (larger_elt == 0)
6647 continue;
6649 for (larger_elt = larger_elt->first_same_value;
6650 larger_elt; larger_elt = larger_elt->next_same_value)
6651 if (GET_CODE (larger_elt->exp) == REG)
6653 src_related
6654 = gen_lowpart_if_possible (mode, larger_elt->exp);
6655 break;
6658 if (src_related)
6659 break;
6664 #ifdef LOAD_EXTEND_OP
6665 /* See if a MEM has already been loaded with a widening operation;
6666 if it has, we can use a subreg of that. Many CISC machines
6667 also have such operations, but this is only likely to be
6668 beneficial these machines. */
6670 if (flag_expensive_optimizations && src_related == 0
6671 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6672 && GET_MODE_CLASS (mode) == MODE_INT
6673 && GET_CODE (src) == MEM && ! do_not_record
6674 && LOAD_EXTEND_OP (mode) != NIL)
6676 enum machine_mode tmode;
6678 /* Set what we are trying to extend and the operation it might
6679 have been extended with. */
6680 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6681 XEXP (memory_extend_rtx, 0) = src;
6683 for (tmode = GET_MODE_WIDER_MODE (mode);
6684 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6685 tmode = GET_MODE_WIDER_MODE (tmode))
6687 struct table_elt *larger_elt;
6689 PUT_MODE (memory_extend_rtx, tmode);
6690 larger_elt = lookup (memory_extend_rtx,
6691 HASH (memory_extend_rtx, tmode), tmode);
6692 if (larger_elt == 0)
6693 continue;
6695 for (larger_elt = larger_elt->first_same_value;
6696 larger_elt; larger_elt = larger_elt->next_same_value)
6697 if (GET_CODE (larger_elt->exp) == REG)
6699 src_related = gen_lowpart_if_possible (mode,
6700 larger_elt->exp);
6701 break;
6704 if (src_related)
6705 break;
6708 #endif /* LOAD_EXTEND_OP */
6710 if (src == src_folded)
6711 src_folded = 0;
6713 /* At this point, ELT, if non-zero, points to a class of expressions
6714 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6715 and SRC_RELATED, if non-zero, each contain additional equivalent
6716 expressions. Prune these latter expressions by deleting expressions
6717 already in the equivalence class.
6719 Check for an equivalent identical to the destination. If found,
6720 this is the preferred equivalent since it will likely lead to
6721 elimination of the insn. Indicate this by placing it in
6722 `src_related'. */
6724 if (elt) elt = elt->first_same_value;
6725 for (p = elt; p; p = p->next_same_value)
6727 enum rtx_code code = GET_CODE (p->exp);
6729 /* If the expression is not valid, ignore it. Then we do not
6730 have to check for validity below. In most cases, we can use
6731 `rtx_equal_p', since canonicalization has already been done. */
6732 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6733 continue;
6735 /* Also skip paradoxical subregs, unless that's what we're
6736 looking for. */
6737 if (code == SUBREG
6738 && (GET_MODE_SIZE (GET_MODE (p->exp))
6739 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6740 && ! (src != 0
6741 && GET_CODE (src) == SUBREG
6742 && GET_MODE (src) == GET_MODE (p->exp)
6743 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6744 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6745 continue;
6747 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6748 src = 0;
6749 else if (src_folded && GET_CODE (src_folded) == code
6750 && rtx_equal_p (src_folded, p->exp))
6751 src_folded = 0;
6752 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6753 && rtx_equal_p (src_eqv_here, p->exp))
6754 src_eqv_here = 0;
6755 else if (src_related && GET_CODE (src_related) == code
6756 && rtx_equal_p (src_related, p->exp))
6757 src_related = 0;
6759 /* This is the same as the destination of the insns, we want
6760 to prefer it. Copy it to src_related. The code below will
6761 then give it a negative cost. */
6762 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6763 src_related = dest;
6767 /* Find the cheapest valid equivalent, trying all the available
6768 possibilities. Prefer items not in the hash table to ones
6769 that are when they are equal cost. Note that we can never
6770 worsen an insn as the current contents will also succeed.
6771 If we find an equivalent identical to the destination, use it as best,
6772 since this insn will probably be eliminated in that case. */
6773 if (src)
6775 if (rtx_equal_p (src, dest))
6776 src_cost = -1;
6777 else
6778 src_cost = COST (src);
6781 if (src_eqv_here)
6783 if (rtx_equal_p (src_eqv_here, dest))
6784 src_eqv_cost = -1;
6785 else
6786 src_eqv_cost = COST (src_eqv_here);
6789 if (src_folded)
6791 if (rtx_equal_p (src_folded, dest))
6792 src_folded_cost = -1;
6793 else
6794 src_folded_cost = COST (src_folded);
6797 if (src_related)
6799 if (rtx_equal_p (src_related, dest))
6800 src_related_cost = -1;
6801 else
6802 src_related_cost = COST (src_related);
6805 /* If this was an indirect jump insn, a known label will really be
6806 cheaper even though it looks more expensive. */
6807 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6808 src_folded = src_const, src_folded_cost = -1;
6810 /* Terminate loop when replacement made. This must terminate since
6811 the current contents will be tested and will always be valid. */
6812 while (1)
6814 rtx trial;
6816 /* Skip invalid entries. */
6817 while (elt && GET_CODE (elt->exp) != REG
6818 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6819 elt = elt->next_same_value;
6821 /* A paradoxical subreg would be bad here: it'll be the right
6822 size, but later may be adjusted so that the upper bits aren't
6823 what we want. So reject it. */
6824 if (elt != 0
6825 && GET_CODE (elt->exp) == SUBREG
6826 && (GET_MODE_SIZE (GET_MODE (elt->exp))
6827 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6828 /* It is okay, though, if the rtx we're trying to match
6829 will ignore any of the bits we can't predict. */
6830 && ! (src != 0
6831 && GET_CODE (src) == SUBREG
6832 && GET_MODE (src) == GET_MODE (elt->exp)
6833 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6834 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6836 elt = elt->next_same_value;
6837 continue;
6840 if (elt) src_elt_cost = elt->cost;
6842 /* Find cheapest and skip it for the next time. For items
6843 of equal cost, use this order:
6844 src_folded, src, src_eqv, src_related and hash table entry. */
6845 if (src_folded_cost <= src_cost
6846 && src_folded_cost <= src_eqv_cost
6847 && src_folded_cost <= src_related_cost
6848 && src_folded_cost <= src_elt_cost)
6850 trial = src_folded, src_folded_cost = 10000;
6851 if (src_folded_force_flag)
6852 trial = force_const_mem (mode, trial);
6854 else if (src_cost <= src_eqv_cost
6855 && src_cost <= src_related_cost
6856 && src_cost <= src_elt_cost)
6857 trial = src, src_cost = 10000;
6858 else if (src_eqv_cost <= src_related_cost
6859 && src_eqv_cost <= src_elt_cost)
6860 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6861 else if (src_related_cost <= src_elt_cost)
6862 trial = copy_rtx (src_related), src_related_cost = 10000;
6863 else
6865 trial = copy_rtx (elt->exp);
6866 elt = elt->next_same_value;
6867 src_elt_cost = 10000;
6870 /* We don't normally have an insn matching (set (pc) (pc)), so
6871 check for this separately here. We will delete such an
6872 insn below.
6874 Tablejump insns contain a USE of the table, so simply replacing
6875 the operand with the constant won't match. This is simply an
6876 unconditional branch, however, and is therefore valid. Just
6877 insert the substitution here and we will delete and re-emit
6878 the insn later. */
6880 if (n_sets == 1 && dest == pc_rtx
6881 && (trial == pc_rtx
6882 || (GET_CODE (trial) == LABEL_REF
6883 && ! condjump_p (insn))))
6885 /* If TRIAL is a label in front of a jump table, we are
6886 really falling through the switch (this is how casesi
6887 insns work), so we must branch around the table. */
6888 if (GET_CODE (trial) == CODE_LABEL
6889 && NEXT_INSN (trial) != 0
6890 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6891 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6892 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6894 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
6896 SET_SRC (sets[i].rtl) = trial;
6897 cse_jumps_altered = 1;
6898 break;
6901 /* Look for a substitution that makes a valid insn. */
6902 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6904 /* The result of apply_change_group can be ignored; see
6905 canon_reg. */
6907 validate_change (insn, &SET_SRC (sets[i].rtl),
6908 canon_reg (SET_SRC (sets[i].rtl), insn),
6910 apply_change_group ();
6911 break;
6914 /* If we previously found constant pool entries for
6915 constants and this is a constant, try making a
6916 pool entry. Put it in src_folded unless we already have done
6917 this since that is where it likely came from. */
6919 else if (constant_pool_entries_cost
6920 && CONSTANT_P (trial)
6921 && ! (GET_CODE (trial) == CONST
6922 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6923 && (src_folded == 0
6924 || (GET_CODE (src_folded) != MEM
6925 && ! src_folded_force_flag))
6926 && GET_MODE_CLASS (mode) != MODE_CC
6927 && mode != VOIDmode)
6929 src_folded_force_flag = 1;
6930 src_folded = trial;
6931 src_folded_cost = constant_pool_entries_cost;
6935 src = SET_SRC (sets[i].rtl);
6937 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6938 However, there is an important exception: If both are registers
6939 that are not the head of their equivalence class, replace SET_SRC
6940 with the head of the class. If we do not do this, we will have
6941 both registers live over a portion of the basic block. This way,
6942 their lifetimes will likely abut instead of overlapping. */
6943 if (GET_CODE (dest) == REG
6944 && REGNO_QTY_VALID_P (REGNO (dest))
6945 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6946 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6947 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6948 /* Don't do this if the original insn had a hard reg as
6949 SET_SRC. */
6950 && (GET_CODE (sets[i].src) != REG
6951 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6952 /* We can't call canon_reg here because it won't do anything if
6953 SRC is a hard register. */
6955 int first = qty_first_reg[reg_qty[REGNO (src)]];
6957 src = SET_SRC (sets[i].rtl)
6958 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6959 : gen_rtx_REG (GET_MODE (src), first);
6961 /* If we had a constant that is cheaper than what we are now
6962 setting SRC to, use that constant. We ignored it when we
6963 thought we could make this into a no-op. */
6964 if (src_const && COST (src_const) < COST (src)
6965 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6966 src = src_const;
6969 /* If we made a change, recompute SRC values. */
6970 if (src != sets[i].src)
6972 do_not_record = 0;
6973 hash_arg_in_memory = 0;
6974 hash_arg_in_struct = 0;
6975 sets[i].src = src;
6976 sets[i].src_hash = HASH (src, mode);
6977 sets[i].src_volatile = do_not_record;
6978 sets[i].src_in_memory = hash_arg_in_memory;
6979 sets[i].src_in_struct = hash_arg_in_struct;
6980 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
6983 /* If this is a single SET, we are setting a register, and we have an
6984 equivalent constant, we want to add a REG_NOTE. We don't want
6985 to write a REG_EQUAL note for a constant pseudo since verifying that
6986 that pseudo hasn't been eliminated is a pain. Such a note also
6987 won't help anything. */
6988 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6989 && GET_CODE (src_const) != REG)
6991 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6993 /* Record the actual constant value in a REG_EQUAL note, making
6994 a new one if one does not already exist. */
6995 if (tem)
6996 XEXP (tem, 0) = src_const;
6997 else
6998 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
6999 src_const, REG_NOTES (insn));
7001 /* If storing a constant value in a register that
7002 previously held the constant value 0,
7003 record this fact with a REG_WAS_0 note on this insn.
7005 Note that the *register* is required to have previously held 0,
7006 not just any register in the quantity and we must point to the
7007 insn that set that register to zero.
7009 Rather than track each register individually, we just see if
7010 the last set for this quantity was for this register. */
7012 if (REGNO_QTY_VALID_P (REGNO (dest))
7013 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
7015 /* See if we previously had a REG_WAS_0 note. */
7016 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7017 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
7019 if ((tem = single_set (const_insn)) != 0
7020 && rtx_equal_p (SET_DEST (tem), dest))
7022 if (note)
7023 XEXP (note, 0) = const_insn;
7024 else
7025 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7026 const_insn,
7027 REG_NOTES (insn));
7032 /* Now deal with the destination. */
7033 do_not_record = 0;
7034 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7036 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7037 to the MEM or REG within it. */
7038 while (GET_CODE (dest) == SIGN_EXTRACT
7039 || GET_CODE (dest) == ZERO_EXTRACT
7040 || GET_CODE (dest) == SUBREG
7041 || GET_CODE (dest) == STRICT_LOW_PART)
7043 sets[i].inner_dest_loc = &XEXP (dest, 0);
7044 dest = XEXP (dest, 0);
7047 sets[i].inner_dest = dest;
7049 if (GET_CODE (dest) == MEM)
7051 #ifdef PUSH_ROUNDING
7052 /* Stack pushes invalidate the stack pointer. */
7053 rtx addr = XEXP (dest, 0);
7054 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7055 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7056 && XEXP (addr, 0) == stack_pointer_rtx)
7057 invalidate (stack_pointer_rtx, Pmode);
7058 #endif
7059 dest = fold_rtx (dest, insn);
7062 /* Compute the hash code of the destination now,
7063 before the effects of this instruction are recorded,
7064 since the register values used in the address computation
7065 are those before this instruction. */
7066 sets[i].dest_hash = HASH (dest, mode);
7068 /* Don't enter a bit-field in the hash table
7069 because the value in it after the store
7070 may not equal what was stored, due to truncation. */
7072 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7073 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7075 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7077 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7078 && GET_CODE (width) == CONST_INT
7079 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7080 && ! (INTVAL (src_const)
7081 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7082 /* Exception: if the value is constant,
7083 and it won't be truncated, record it. */
7085 else
7087 /* This is chosen so that the destination will be invalidated
7088 but no new value will be recorded.
7089 We must invalidate because sometimes constant
7090 values can be recorded for bitfields. */
7091 sets[i].src_elt = 0;
7092 sets[i].src_volatile = 1;
7093 src_eqv = 0;
7094 src_eqv_elt = 0;
7098 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7099 the insn. */
7100 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7102 PUT_CODE (insn, NOTE);
7103 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7104 NOTE_SOURCE_FILE (insn) = 0;
7105 cse_jumps_altered = 1;
7106 /* One less use of the label this insn used to jump to. */
7107 if (JUMP_LABEL (insn) != 0)
7108 --LABEL_NUSES (JUMP_LABEL (insn));
7109 /* No more processing for this set. */
7110 sets[i].rtl = 0;
7113 /* If this SET is now setting PC to a label, we know it used to
7114 be a conditional or computed branch. So we see if we can follow
7115 it. If it was a computed branch, delete it and re-emit. */
7116 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7118 rtx p;
7120 /* If this is not in the format for a simple branch and
7121 we are the only SET in it, re-emit it. */
7122 if (! simplejump_p (insn) && n_sets == 1)
7124 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7125 JUMP_LABEL (new) = XEXP (src, 0);
7126 LABEL_NUSES (XEXP (src, 0))++;
7127 delete_insn (insn);
7128 insn = new;
7130 else
7131 /* Otherwise, force rerecognition, since it probably had
7132 a different pattern before.
7133 This shouldn't really be necessary, since whatever
7134 changed the source value above should have done this.
7135 Until the right place is found, might as well do this here. */
7136 INSN_CODE (insn) = -1;
7138 /* Now that we've converted this jump to an unconditional jump,
7139 there is dead code after it. Delete the dead code until we
7140 reach a BARRIER, the end of the function, or a label. Do
7141 not delete NOTEs except for NOTE_INSN_DELETED since later
7142 phases assume these notes are retained. */
7144 p = insn;
7146 while (NEXT_INSN (p) != 0
7147 && GET_CODE (NEXT_INSN (p)) != BARRIER
7148 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7150 if (GET_CODE (NEXT_INSN (p)) != NOTE
7151 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7152 delete_insn (NEXT_INSN (p));
7153 else
7154 p = NEXT_INSN (p);
7157 /* If we don't have a BARRIER immediately after INSN, put one there.
7158 Much code assumes that there are no NOTEs between a JUMP_INSN and
7159 BARRIER. */
7161 if (NEXT_INSN (insn) == 0
7162 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7163 emit_barrier_before (NEXT_INSN (insn));
7165 /* We might have two BARRIERs separated by notes. Delete the second
7166 one if so. */
7168 if (p != insn && NEXT_INSN (p) != 0
7169 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7170 delete_insn (NEXT_INSN (p));
7172 cse_jumps_altered = 1;
7173 sets[i].rtl = 0;
7176 /* If destination is volatile, invalidate it and then do no further
7177 processing for this assignment. */
7179 else if (do_not_record)
7181 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7182 || GET_CODE (dest) == MEM)
7183 invalidate (dest, VOIDmode);
7184 else if (GET_CODE (dest) == STRICT_LOW_PART
7185 || GET_CODE (dest) == ZERO_EXTRACT)
7186 invalidate (XEXP (dest, 0), GET_MODE (dest));
7187 sets[i].rtl = 0;
7190 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7191 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7193 #ifdef HAVE_cc0
7194 /* If setting CC0, record what it was set to, or a constant, if it
7195 is equivalent to a constant. If it is being set to a floating-point
7196 value, make a COMPARE with the appropriate constant of 0. If we
7197 don't do this, later code can interpret this as a test against
7198 const0_rtx, which can cause problems if we try to put it into an
7199 insn as a floating-point operand. */
7200 if (dest == cc0_rtx)
7202 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7203 this_insn_cc0_mode = mode;
7204 if (FLOAT_MODE_P (mode))
7205 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7206 CONST0_RTX (mode));
7208 #endif
7211 /* Now enter all non-volatile source expressions in the hash table
7212 if they are not already present.
7213 Record their equivalence classes in src_elt.
7214 This way we can insert the corresponding destinations into
7215 the same classes even if the actual sources are no longer in them
7216 (having been invalidated). */
7218 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7219 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7221 register struct table_elt *elt;
7222 register struct table_elt *classp = sets[0].src_elt;
7223 rtx dest = SET_DEST (sets[0].rtl);
7224 enum machine_mode eqvmode = GET_MODE (dest);
7226 if (GET_CODE (dest) == STRICT_LOW_PART)
7228 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7229 classp = 0;
7231 if (insert_regs (src_eqv, classp, 0))
7233 rehash_using_reg (src_eqv);
7234 src_eqv_hash = HASH (src_eqv, eqvmode);
7236 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7237 elt->in_memory = src_eqv_in_memory;
7238 elt->in_struct = src_eqv_in_struct;
7239 src_eqv_elt = elt;
7241 /* Check to see if src_eqv_elt is the same as a set source which
7242 does not yet have an elt, and if so set the elt of the set source
7243 to src_eqv_elt. */
7244 for (i = 0; i < n_sets; i++)
7245 if (sets[i].rtl && sets[i].src_elt == 0
7246 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7247 sets[i].src_elt = src_eqv_elt;
7250 for (i = 0; i < n_sets; i++)
7251 if (sets[i].rtl && ! sets[i].src_volatile
7252 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7254 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7256 /* REG_EQUAL in setting a STRICT_LOW_PART
7257 gives an equivalent for the entire destination register,
7258 not just for the subreg being stored in now.
7259 This is a more interesting equivalence, so we arrange later
7260 to treat the entire reg as the destination. */
7261 sets[i].src_elt = src_eqv_elt;
7262 sets[i].src_hash = src_eqv_hash;
7264 else
7266 /* Insert source and constant equivalent into hash table, if not
7267 already present. */
7268 register struct table_elt *classp = src_eqv_elt;
7269 register rtx src = sets[i].src;
7270 register rtx dest = SET_DEST (sets[i].rtl);
7271 enum machine_mode mode
7272 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7274 if (sets[i].src_elt == 0)
7276 register struct table_elt *elt;
7278 /* Note that these insert_regs calls cannot remove
7279 any of the src_elt's, because they would have failed to
7280 match if not still valid. */
7281 if (insert_regs (src, classp, 0))
7283 rehash_using_reg (src);
7284 sets[i].src_hash = HASH (src, mode);
7286 elt = insert (src, classp, sets[i].src_hash, mode);
7287 elt->in_memory = sets[i].src_in_memory;
7288 elt->in_struct = sets[i].src_in_struct;
7289 sets[i].src_elt = classp = elt;
7292 if (sets[i].src_const && sets[i].src_const_elt == 0
7293 && src != sets[i].src_const
7294 && ! rtx_equal_p (sets[i].src_const, src))
7295 sets[i].src_elt = insert (sets[i].src_const, classp,
7296 sets[i].src_const_hash, mode);
7299 else if (sets[i].src_elt == 0)
7300 /* If we did not insert the source into the hash table (e.g., it was
7301 volatile), note the equivalence class for the REG_EQUAL value, if any,
7302 so that the destination goes into that class. */
7303 sets[i].src_elt = src_eqv_elt;
7305 invalidate_from_clobbers (x);
7307 /* Some registers are invalidated by subroutine calls. Memory is
7308 invalidated by non-constant calls. */
7310 if (GET_CODE (insn) == CALL_INSN)
7312 if (! CONST_CALL_P (insn))
7313 invalidate_memory ();
7314 invalidate_for_call ();
7317 /* Now invalidate everything set by this instruction.
7318 If a SUBREG or other funny destination is being set,
7319 sets[i].rtl is still nonzero, so here we invalidate the reg
7320 a part of which is being set. */
7322 for (i = 0; i < n_sets; i++)
7323 if (sets[i].rtl)
7325 /* We can't use the inner dest, because the mode associated with
7326 a ZERO_EXTRACT is significant. */
7327 register rtx dest = SET_DEST (sets[i].rtl);
7329 /* Needed for registers to remove the register from its
7330 previous quantity's chain.
7331 Needed for memory if this is a nonvarying address, unless
7332 we have just done an invalidate_memory that covers even those. */
7333 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7334 || GET_CODE (dest) == MEM)
7335 invalidate (dest, VOIDmode);
7336 else if (GET_CODE (dest) == STRICT_LOW_PART
7337 || GET_CODE (dest) == ZERO_EXTRACT)
7338 invalidate (XEXP (dest, 0), GET_MODE (dest));
7341 /* Make sure registers mentioned in destinations
7342 are safe for use in an expression to be inserted.
7343 This removes from the hash table
7344 any invalid entry that refers to one of these registers.
7346 We don't care about the return value from mention_regs because
7347 we are going to hash the SET_DEST values unconditionally. */
7349 for (i = 0; i < n_sets; i++)
7350 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7351 mention_regs (SET_DEST (sets[i].rtl));
7353 /* We may have just removed some of the src_elt's from the hash table.
7354 So replace each one with the current head of the same class. */
7356 for (i = 0; i < n_sets; i++)
7357 if (sets[i].rtl)
7359 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7360 /* If elt was removed, find current head of same class,
7361 or 0 if nothing remains of that class. */
7363 register struct table_elt *elt = sets[i].src_elt;
7365 while (elt && elt->prev_same_value)
7366 elt = elt->prev_same_value;
7368 while (elt && elt->first_same_value == 0)
7369 elt = elt->next_same_value;
7370 sets[i].src_elt = elt ? elt->first_same_value : 0;
7374 /* Now insert the destinations into their equivalence classes. */
7376 for (i = 0; i < n_sets; i++)
7377 if (sets[i].rtl)
7379 register rtx dest = SET_DEST (sets[i].rtl);
7380 register struct table_elt *elt;
7382 /* Don't record value if we are not supposed to risk allocating
7383 floating-point values in registers that might be wider than
7384 memory. */
7385 if ((flag_float_store
7386 && GET_CODE (dest) == MEM
7387 && FLOAT_MODE_P (GET_MODE (dest)))
7388 /* Don't record BLKmode values, because we don't know the
7389 size of it, and can't be sure that other BLKmode values
7390 have the same or smaller size. */
7391 || GET_MODE (dest) == BLKmode
7392 /* Don't record values of destinations set inside a libcall block
7393 since we might delete the libcall. Things should have been set
7394 up so we won't want to reuse such a value, but we play it safe
7395 here. */
7396 || in_libcall_block
7397 /* If we didn't put a REG_EQUAL value or a source into the hash
7398 table, there is no point is recording DEST. */
7399 || sets[i].src_elt == 0
7400 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7401 or SIGN_EXTEND, don't record DEST since it can cause
7402 some tracking to be wrong.
7404 ??? Think about this more later. */
7405 || (GET_CODE (dest) == SUBREG
7406 && (GET_MODE_SIZE (GET_MODE (dest))
7407 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7408 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7409 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7410 continue;
7412 /* STRICT_LOW_PART isn't part of the value BEING set,
7413 and neither is the SUBREG inside it.
7414 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7415 if (GET_CODE (dest) == STRICT_LOW_PART)
7416 dest = SUBREG_REG (XEXP (dest, 0));
7418 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7419 /* Registers must also be inserted into chains for quantities. */
7420 if (insert_regs (dest, sets[i].src_elt, 1))
7422 /* If `insert_regs' changes something, the hash code must be
7423 recalculated. */
7424 rehash_using_reg (dest);
7425 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7428 elt = insert (dest, sets[i].src_elt,
7429 sets[i].dest_hash, GET_MODE (dest));
7430 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7431 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7432 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7433 0))));
7435 if (elt->in_memory)
7437 /* This implicitly assumes a whole struct
7438 need not have MEM_IN_STRUCT_P.
7439 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7440 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7441 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7444 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7445 narrower than M2, and both M1 and M2 are the same number of words,
7446 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7447 make that equivalence as well.
7449 However, BAR may have equivalences for which gen_lowpart_if_possible
7450 will produce a simpler value than gen_lowpart_if_possible applied to
7451 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7452 BAR's equivalences. If we don't get a simplified form, make
7453 the SUBREG. It will not be used in an equivalence, but will
7454 cause two similar assignments to be detected.
7456 Note the loop below will find SUBREG_REG (DEST) since we have
7457 already entered SRC and DEST of the SET in the table. */
7459 if (GET_CODE (dest) == SUBREG
7460 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7461 / UNITS_PER_WORD)
7462 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7463 && (GET_MODE_SIZE (GET_MODE (dest))
7464 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7465 && sets[i].src_elt != 0)
7467 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7468 struct table_elt *elt, *classp = 0;
7470 for (elt = sets[i].src_elt->first_same_value; elt;
7471 elt = elt->next_same_value)
7473 rtx new_src = 0;
7474 unsigned src_hash;
7475 struct table_elt *src_elt;
7477 /* Ignore invalid entries. */
7478 if (GET_CODE (elt->exp) != REG
7479 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7480 continue;
7482 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7483 if (new_src == 0)
7484 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7486 src_hash = HASH (new_src, new_mode);
7487 src_elt = lookup (new_src, src_hash, new_mode);
7489 /* Put the new source in the hash table is if isn't
7490 already. */
7491 if (src_elt == 0)
7493 if (insert_regs (new_src, classp, 0))
7495 rehash_using_reg (new_src);
7496 src_hash = HASH (new_src, new_mode);
7498 src_elt = insert (new_src, classp, src_hash, new_mode);
7499 src_elt->in_memory = elt->in_memory;
7500 src_elt->in_struct = elt->in_struct;
7502 else if (classp && classp != src_elt->first_same_value)
7503 /* Show that two things that we've seen before are
7504 actually the same. */
7505 merge_equiv_classes (src_elt, classp);
7507 classp = src_elt->first_same_value;
7508 /* Ignore invalid entries. */
7509 while (classp
7510 && GET_CODE (classp->exp) != REG
7511 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7512 classp = classp->next_same_value;
7517 /* Special handling for (set REG0 REG1)
7518 where REG0 is the "cheapest", cheaper than REG1.
7519 After cse, REG1 will probably not be used in the sequel,
7520 so (if easily done) change this insn to (set REG1 REG0) and
7521 replace REG1 with REG0 in the previous insn that computed their value.
7522 Then REG1 will become a dead store and won't cloud the situation
7523 for later optimizations.
7525 Do not make this change if REG1 is a hard register, because it will
7526 then be used in the sequel and we may be changing a two-operand insn
7527 into a three-operand insn.
7529 Also do not do this if we are operating on a copy of INSN. */
7531 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7532 && NEXT_INSN (PREV_INSN (insn)) == insn
7533 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7534 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7535 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7536 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7537 == REGNO (SET_DEST (sets[0].rtl))))
7539 rtx prev = PREV_INSN (insn);
7540 while (prev && GET_CODE (prev) == NOTE)
7541 prev = PREV_INSN (prev);
7543 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7544 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7546 rtx dest = SET_DEST (sets[0].rtl);
7547 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7549 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7550 validate_change (insn, & SET_DEST (sets[0].rtl),
7551 SET_SRC (sets[0].rtl), 1);
7552 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7553 apply_change_group ();
7555 /* If REG1 was equivalent to a constant, REG0 is not. */
7556 if (note)
7557 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7559 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7560 any REG_WAS_0 note on INSN to PREV. */
7561 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7562 if (note)
7563 remove_note (prev, note);
7565 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7566 if (note)
7568 remove_note (insn, note);
7569 XEXP (note, 1) = REG_NOTES (prev);
7570 REG_NOTES (prev) = note;
7573 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7574 then we must delete it, because the value in REG0 has changed. */
7575 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7576 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7577 remove_note (insn, note);
7581 /* If this is a conditional jump insn, record any known equivalences due to
7582 the condition being tested. */
7584 last_jump_equiv_class = 0;
7585 if (GET_CODE (insn) == JUMP_INSN
7586 && n_sets == 1 && GET_CODE (x) == SET
7587 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7588 record_jump_equiv (insn, 0);
7590 #ifdef HAVE_cc0
7591 /* If the previous insn set CC0 and this insn no longer references CC0,
7592 delete the previous insn. Here we use the fact that nothing expects CC0
7593 to be valid over an insn, which is true until the final pass. */
7594 if (prev_insn && GET_CODE (prev_insn) == INSN
7595 && (tem = single_set (prev_insn)) != 0
7596 && SET_DEST (tem) == cc0_rtx
7597 && ! reg_mentioned_p (cc0_rtx, x))
7599 PUT_CODE (prev_insn, NOTE);
7600 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7601 NOTE_SOURCE_FILE (prev_insn) = 0;
7604 prev_insn_cc0 = this_insn_cc0;
7605 prev_insn_cc0_mode = this_insn_cc0_mode;
7606 #endif
7608 prev_insn = insn;
7611 /* Remove from the ahsh table all expressions that reference memory. */
7612 static void
7613 invalidate_memory ()
7615 register int i;
7616 register struct table_elt *p, *next;
7618 for (i = 0; i < NBUCKETS; i++)
7619 for (p = table[i]; p; p = next)
7621 next = p->next_same_hash;
7622 if (p->in_memory)
7623 remove_from_table (p, i);
7627 /* XXX ??? The name of this function bears little resemblance to
7628 what this function actually does. FIXME. */
7629 static int
7630 note_mem_written (addr)
7631 register rtx addr;
7633 /* Pushing or popping the stack invalidates just the stack pointer. */
7634 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7635 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7636 && GET_CODE (XEXP (addr, 0)) == REG
7637 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7639 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7640 reg_tick[STACK_POINTER_REGNUM]++;
7642 /* This should be *very* rare. */
7643 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7644 invalidate (stack_pointer_rtx, VOIDmode);
7645 return 1;
7647 return 0;
7650 /* Perform invalidation on the basis of everything about an insn
7651 except for invalidating the actual places that are SET in it.
7652 This includes the places CLOBBERed, and anything that might
7653 alias with something that is SET or CLOBBERed.
7655 X is the pattern of the insn. */
7657 static void
7658 invalidate_from_clobbers (x)
7659 rtx x;
7661 if (GET_CODE (x) == CLOBBER)
7663 rtx ref = XEXP (x, 0);
7664 if (ref)
7666 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7667 || GET_CODE (ref) == MEM)
7668 invalidate (ref, VOIDmode);
7669 else if (GET_CODE (ref) == STRICT_LOW_PART
7670 || GET_CODE (ref) == ZERO_EXTRACT)
7671 invalidate (XEXP (ref, 0), GET_MODE (ref));
7674 else if (GET_CODE (x) == PARALLEL)
7676 register int i;
7677 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7679 register rtx y = XVECEXP (x, 0, i);
7680 if (GET_CODE (y) == CLOBBER)
7682 rtx ref = XEXP (y, 0);
7683 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7684 || GET_CODE (ref) == MEM)
7685 invalidate (ref, VOIDmode);
7686 else if (GET_CODE (ref) == STRICT_LOW_PART
7687 || GET_CODE (ref) == ZERO_EXTRACT)
7688 invalidate (XEXP (ref, 0), GET_MODE (ref));
7694 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7695 and replace any registers in them with either an equivalent constant
7696 or the canonical form of the register. If we are inside an address,
7697 only do this if the address remains valid.
7699 OBJECT is 0 except when within a MEM in which case it is the MEM.
7701 Return the replacement for X. */
7703 static rtx
7704 cse_process_notes (x, object)
7705 rtx x;
7706 rtx object;
7708 enum rtx_code code = GET_CODE (x);
7709 char *fmt = GET_RTX_FORMAT (code);
7710 int i;
7712 switch (code)
7714 case CONST_INT:
7715 case CONST:
7716 case SYMBOL_REF:
7717 case LABEL_REF:
7718 case CONST_DOUBLE:
7719 case PC:
7720 case CC0:
7721 case LO_SUM:
7722 return x;
7724 case MEM:
7725 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7726 return x;
7728 case EXPR_LIST:
7729 case INSN_LIST:
7730 if (REG_NOTE_KIND (x) == REG_EQUAL)
7731 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7732 if (XEXP (x, 1))
7733 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7734 return x;
7736 case SIGN_EXTEND:
7737 case ZERO_EXTEND:
7738 case SUBREG:
7740 rtx new = cse_process_notes (XEXP (x, 0), object);
7741 /* We don't substitute VOIDmode constants into these rtx,
7742 since they would impede folding. */
7743 if (GET_MODE (new) != VOIDmode)
7744 validate_change (object, &XEXP (x, 0), new, 0);
7745 return x;
7748 case REG:
7749 i = reg_qty[REGNO (x)];
7751 /* Return a constant or a constant register. */
7752 if (REGNO_QTY_VALID_P (REGNO (x))
7753 && qty_const[i] != 0
7754 && (CONSTANT_P (qty_const[i])
7755 || GET_CODE (qty_const[i]) == REG))
7757 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7758 if (new)
7759 return new;
7762 /* Otherwise, canonicalize this register. */
7763 return canon_reg (x, NULL_RTX);
7765 default:
7766 break;
7769 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7770 if (fmt[i] == 'e')
7771 validate_change (object, &XEXP (x, i),
7772 cse_process_notes (XEXP (x, i), object), 0);
7774 return x;
7777 /* Find common subexpressions between the end test of a loop and the beginning
7778 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7780 Often we have a loop where an expression in the exit test is used
7781 in the body of the loop. For example "while (*p) *q++ = *p++;".
7782 Because of the way we duplicate the loop exit test in front of the loop,
7783 however, we don't detect that common subexpression. This will be caught
7784 when global cse is implemented, but this is a quite common case.
7786 This function handles the most common cases of these common expressions.
7787 It is called after we have processed the basic block ending with the
7788 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7789 jumps to a label used only once. */
7791 static void
7792 cse_around_loop (loop_start)
7793 rtx loop_start;
7795 rtx insn;
7796 int i;
7797 struct table_elt *p;
7799 /* If the jump at the end of the loop doesn't go to the start, we don't
7800 do anything. */
7801 for (insn = PREV_INSN (loop_start);
7802 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7803 insn = PREV_INSN (insn))
7806 if (insn == 0
7807 || GET_CODE (insn) != NOTE
7808 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7809 return;
7811 /* If the last insn of the loop (the end test) was an NE comparison,
7812 we will interpret it as an EQ comparison, since we fell through
7813 the loop. Any equivalences resulting from that comparison are
7814 therefore not valid and must be invalidated. */
7815 if (last_jump_equiv_class)
7816 for (p = last_jump_equiv_class->first_same_value; p;
7817 p = p->next_same_value)
7819 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7820 || (GET_CODE (p->exp) == SUBREG
7821 && GET_CODE (SUBREG_REG (p->exp)) == REG))
7822 invalidate (p->exp, VOIDmode);
7823 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7824 || GET_CODE (p->exp) == ZERO_EXTRACT)
7825 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7828 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7829 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7831 The only thing we do with SET_DEST is invalidate entries, so we
7832 can safely process each SET in order. It is slightly less efficient
7833 to do so, but we only want to handle the most common cases.
7835 The gen_move_insn call in cse_set_around_loop may create new pseudos.
7836 These pseudos won't have valid entries in any of the tables indexed
7837 by register number, such as reg_qty. We avoid out-of-range array
7838 accesses by not processing any instructions created after cse started. */
7840 for (insn = NEXT_INSN (loop_start);
7841 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7842 && INSN_UID (insn) < max_insn_uid
7843 && ! (GET_CODE (insn) == NOTE
7844 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7845 insn = NEXT_INSN (insn))
7847 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7848 && (GET_CODE (PATTERN (insn)) == SET
7849 || GET_CODE (PATTERN (insn)) == CLOBBER))
7850 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7851 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7852 && GET_CODE (PATTERN (insn)) == PARALLEL)
7853 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7854 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7855 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7856 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7857 loop_start);
7861 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7862 since they are done elsewhere. This function is called via note_stores. */
7864 static void
7865 invalidate_skipped_set (dest, set)
7866 rtx set;
7867 rtx dest;
7869 enum rtx_code code = GET_CODE (dest);
7871 if (code == MEM
7872 && ! note_mem_written (dest) /* If this is not a stack push ... */
7873 /* There are times when an address can appear varying and be a PLUS
7874 during this scan when it would be a fixed address were we to know
7875 the proper equivalences. So invalidate all memory if there is
7876 a BLKmode or nonscalar memory reference or a reference to a
7877 variable address. */
7878 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
7879 || cse_rtx_varies_p (XEXP (dest, 0))))
7881 invalidate_memory ();
7882 return;
7885 if (GET_CODE (set) == CLOBBER
7886 #ifdef HAVE_cc0
7887 || dest == cc0_rtx
7888 #endif
7889 || dest == pc_rtx)
7890 return;
7892 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
7893 invalidate (XEXP (dest, 0), GET_MODE (dest));
7894 else if (code == REG || code == SUBREG || code == MEM)
7895 invalidate (dest, VOIDmode);
7898 /* Invalidate all insns from START up to the end of the function or the
7899 next label. This called when we wish to CSE around a block that is
7900 conditionally executed. */
7902 static void
7903 invalidate_skipped_block (start)
7904 rtx start;
7906 rtx insn;
7908 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7909 insn = NEXT_INSN (insn))
7911 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7912 continue;
7914 if (GET_CODE (insn) == CALL_INSN)
7916 if (! CONST_CALL_P (insn))
7917 invalidate_memory ();
7918 invalidate_for_call ();
7921 note_stores (PATTERN (insn), invalidate_skipped_set);
7925 /* Used for communication between the following two routines; contains a
7926 value to be checked for modification. */
7928 static rtx cse_check_loop_start_value;
7930 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7931 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7933 static void
7934 cse_check_loop_start (x, set)
7935 rtx x;
7936 rtx set;
7938 if (cse_check_loop_start_value == 0
7939 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7940 return;
7942 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7943 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7944 cse_check_loop_start_value = 0;
7947 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7948 a loop that starts with the label at LOOP_START.
7950 If X is a SET, we see if its SET_SRC is currently in our hash table.
7951 If so, we see if it has a value equal to some register used only in the
7952 loop exit code (as marked by jump.c).
7954 If those two conditions are true, we search backwards from the start of
7955 the loop to see if that same value was loaded into a register that still
7956 retains its value at the start of the loop.
7958 If so, we insert an insn after the load to copy the destination of that
7959 load into the equivalent register and (try to) replace our SET_SRC with that
7960 register.
7962 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7964 static void
7965 cse_set_around_loop (x, insn, loop_start)
7966 rtx x;
7967 rtx insn;
7968 rtx loop_start;
7970 struct table_elt *src_elt;
7972 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7973 are setting PC or CC0 or whose SET_SRC is already a register. */
7974 if (GET_CODE (x) == SET
7975 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7976 && GET_CODE (SET_SRC (x)) != REG)
7978 src_elt = lookup (SET_SRC (x),
7979 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7980 GET_MODE (SET_DEST (x)));
7982 if (src_elt)
7983 for (src_elt = src_elt->first_same_value; src_elt;
7984 src_elt = src_elt->next_same_value)
7985 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7986 && COST (src_elt->exp) < COST (SET_SRC (x)))
7988 rtx p, set;
7990 /* Look for an insn in front of LOOP_START that sets
7991 something in the desired mode to SET_SRC (x) before we hit
7992 a label or CALL_INSN. */
7994 for (p = prev_nonnote_insn (loop_start);
7995 p && GET_CODE (p) != CALL_INSN
7996 && GET_CODE (p) != CODE_LABEL;
7997 p = prev_nonnote_insn (p))
7998 if ((set = single_set (p)) != 0
7999 && GET_CODE (SET_DEST (set)) == REG
8000 && GET_MODE (SET_DEST (set)) == src_elt->mode
8001 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8003 /* We now have to ensure that nothing between P
8004 and LOOP_START modified anything referenced in
8005 SET_SRC (x). We know that nothing within the loop
8006 can modify it, or we would have invalidated it in
8007 the hash table. */
8008 rtx q;
8010 cse_check_loop_start_value = SET_SRC (x);
8011 for (q = p; q != loop_start; q = NEXT_INSN (q))
8012 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8013 note_stores (PATTERN (q), cse_check_loop_start);
8015 /* If nothing was changed and we can replace our
8016 SET_SRC, add an insn after P to copy its destination
8017 to what we will be replacing SET_SRC with. */
8018 if (cse_check_loop_start_value
8019 && validate_change (insn, &SET_SRC (x),
8020 src_elt->exp, 0))
8022 /* If this creates new pseudos, this is unsafe,
8023 because the regno of new pseudo is unsuitable
8024 to index into reg_qty when cse_insn processes
8025 the new insn. Therefore, if a new pseudo was
8026 created, discard this optimization. */
8027 int nregs = max_reg_num ();
8028 rtx move
8029 = gen_move_insn (src_elt->exp, SET_DEST (set));
8030 if (nregs != max_reg_num ())
8032 if (! validate_change (insn, &SET_SRC (x),
8033 SET_SRC (set), 0))
8034 abort ();
8036 else
8037 emit_insn_after (move, p);
8039 break;
8044 /* Now invalidate anything modified by X. */
8045 note_mem_written (SET_DEST (x));
8047 /* See comment on similar code in cse_insn for explanation of these tests. */
8048 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8049 || GET_CODE (SET_DEST (x)) == MEM)
8050 invalidate (SET_DEST (x), VOIDmode);
8051 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8052 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8053 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8056 /* Find the end of INSN's basic block and return its range,
8057 the total number of SETs in all the insns of the block, the last insn of the
8058 block, and the branch path.
8060 The branch path indicates which branches should be followed. If a non-zero
8061 path size is specified, the block should be rescanned and a different set
8062 of branches will be taken. The branch path is only used if
8063 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8065 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8066 used to describe the block. It is filled in with the information about
8067 the current block. The incoming structure's branch path, if any, is used
8068 to construct the output branch path. */
8070 void
8071 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8072 rtx insn;
8073 struct cse_basic_block_data *data;
8074 int follow_jumps;
8075 int after_loop;
8076 int skip_blocks;
8078 rtx p = insn, q;
8079 int nsets = 0;
8080 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8081 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8082 int path_size = data->path_size;
8083 int path_entry = 0;
8084 int i;
8086 /* Update the previous branch path, if any. If the last branch was
8087 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8088 shorten the path by one and look at the previous branch. We know that
8089 at least one branch must have been taken if PATH_SIZE is non-zero. */
8090 while (path_size > 0)
8092 if (data->path[path_size - 1].status != NOT_TAKEN)
8094 data->path[path_size - 1].status = NOT_TAKEN;
8095 break;
8097 else
8098 path_size--;
8101 /* Scan to end of this basic block. */
8102 while (p && GET_CODE (p) != CODE_LABEL)
8104 /* Don't cse out the end of a loop. This makes a difference
8105 only for the unusual loops that always execute at least once;
8106 all other loops have labels there so we will stop in any case.
8107 Cse'ing out the end of the loop is dangerous because it
8108 might cause an invariant expression inside the loop
8109 to be reused after the end of the loop. This would make it
8110 hard to move the expression out of the loop in loop.c,
8111 especially if it is one of several equivalent expressions
8112 and loop.c would like to eliminate it.
8114 If we are running after loop.c has finished, we can ignore
8115 the NOTE_INSN_LOOP_END. */
8117 if (! after_loop && GET_CODE (p) == NOTE
8118 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8119 break;
8121 /* Don't cse over a call to setjmp; on some machines (eg vax)
8122 the regs restored by the longjmp come from
8123 a later time than the setjmp. */
8124 if (GET_CODE (p) == NOTE
8125 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8126 break;
8128 /* A PARALLEL can have lots of SETs in it,
8129 especially if it is really an ASM_OPERANDS. */
8130 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8131 && GET_CODE (PATTERN (p)) == PARALLEL)
8132 nsets += XVECLEN (PATTERN (p), 0);
8133 else if (GET_CODE (p) != NOTE)
8134 nsets += 1;
8136 /* Ignore insns made by CSE; they cannot affect the boundaries of
8137 the basic block. */
8139 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8140 high_cuid = INSN_CUID (p);
8141 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8142 low_cuid = INSN_CUID (p);
8144 /* See if this insn is in our branch path. If it is and we are to
8145 take it, do so. */
8146 if (path_entry < path_size && data->path[path_entry].branch == p)
8148 if (data->path[path_entry].status != NOT_TAKEN)
8149 p = JUMP_LABEL (p);
8151 /* Point to next entry in path, if any. */
8152 path_entry++;
8155 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8156 was specified, we haven't reached our maximum path length, there are
8157 insns following the target of the jump, this is the only use of the
8158 jump label, and the target label is preceded by a BARRIER.
8160 Alternatively, we can follow the jump if it branches around a
8161 block of code and there are no other branches into the block.
8162 In this case invalidate_skipped_block will be called to invalidate any
8163 registers set in the block when following the jump. */
8165 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8166 && GET_CODE (p) == JUMP_INSN
8167 && GET_CODE (PATTERN (p)) == SET
8168 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8169 && JUMP_LABEL (p) != 0
8170 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8171 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8173 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8174 if ((GET_CODE (q) != NOTE
8175 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8176 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8177 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8178 break;
8180 /* If we ran into a BARRIER, this code is an extension of the
8181 basic block when the branch is taken. */
8182 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8184 /* Don't allow ourself to keep walking around an
8185 always-executed loop. */
8186 if (next_real_insn (q) == next)
8188 p = NEXT_INSN (p);
8189 continue;
8192 /* Similarly, don't put a branch in our path more than once. */
8193 for (i = 0; i < path_entry; i++)
8194 if (data->path[i].branch == p)
8195 break;
8197 if (i != path_entry)
8198 break;
8200 data->path[path_entry].branch = p;
8201 data->path[path_entry++].status = TAKEN;
8203 /* This branch now ends our path. It was possible that we
8204 didn't see this branch the last time around (when the
8205 insn in front of the target was a JUMP_INSN that was
8206 turned into a no-op). */
8207 path_size = path_entry;
8209 p = JUMP_LABEL (p);
8210 /* Mark block so we won't scan it again later. */
8211 PUT_MODE (NEXT_INSN (p), QImode);
8213 /* Detect a branch around a block of code. */
8214 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8216 register rtx tmp;
8218 if (next_real_insn (q) == next)
8220 p = NEXT_INSN (p);
8221 continue;
8224 for (i = 0; i < path_entry; i++)
8225 if (data->path[i].branch == p)
8226 break;
8228 if (i != path_entry)
8229 break;
8231 /* This is no_labels_between_p (p, q) with an added check for
8232 reaching the end of a function (in case Q precedes P). */
8233 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8234 if (GET_CODE (tmp) == CODE_LABEL)
8235 break;
8237 if (tmp == q)
8239 data->path[path_entry].branch = p;
8240 data->path[path_entry++].status = AROUND;
8242 path_size = path_entry;
8244 p = JUMP_LABEL (p);
8245 /* Mark block so we won't scan it again later. */
8246 PUT_MODE (NEXT_INSN (p), QImode);
8250 p = NEXT_INSN (p);
8253 data->low_cuid = low_cuid;
8254 data->high_cuid = high_cuid;
8255 data->nsets = nsets;
8256 data->last = p;
8258 /* If all jumps in the path are not taken, set our path length to zero
8259 so a rescan won't be done. */
8260 for (i = path_size - 1; i >= 0; i--)
8261 if (data->path[i].status != NOT_TAKEN)
8262 break;
8264 if (i == -1)
8265 data->path_size = 0;
8266 else
8267 data->path_size = path_size;
8269 /* End the current branch path. */
8270 data->path[path_size].branch = 0;
8273 /* Perform cse on the instructions of a function.
8274 F is the first instruction.
8275 NREGS is one plus the highest pseudo-reg number used in the instruction.
8277 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8278 (only if -frerun-cse-after-loop).
8280 Returns 1 if jump_optimize should be redone due to simplifications
8281 in conditional jump instructions. */
8284 cse_main (f, nregs, after_loop, file)
8285 rtx f;
8286 int nregs;
8287 int after_loop;
8288 FILE *file;
8290 struct cse_basic_block_data val;
8291 register rtx insn = f;
8292 register int i;
8294 cse_jumps_altered = 0;
8295 recorded_label_ref = 0;
8296 constant_pool_entries_cost = 0;
8297 val.path_size = 0;
8299 init_recog ();
8300 init_alias_analysis ();
8302 max_reg = nregs;
8304 max_insn_uid = get_max_uid ();
8306 all_minus_one = (int *) alloca (nregs * sizeof (int));
8307 consec_ints = (int *) alloca (nregs * sizeof (int));
8309 for (i = 0; i < nregs; i++)
8311 all_minus_one[i] = -1;
8312 consec_ints[i] = i;
8315 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8316 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8317 reg_qty = (int *) alloca (nregs * sizeof (int));
8318 reg_in_table = (int *) alloca (nregs * sizeof (int));
8319 reg_tick = (int *) alloca (nregs * sizeof (int));
8321 #ifdef LOAD_EXTEND_OP
8323 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8324 and change the code and mode as appropriate. */
8325 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8326 #endif
8328 /* Discard all the free elements of the previous function
8329 since they are allocated in the temporarily obstack. */
8330 bzero ((char *) table, sizeof table);
8331 free_element_chain = 0;
8332 n_elements_made = 0;
8334 /* Find the largest uid. */
8336 max_uid = get_max_uid ();
8337 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8338 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8340 /* Compute the mapping from uids to cuids.
8341 CUIDs are numbers assigned to insns, like uids,
8342 except that cuids increase monotonically through the code.
8343 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8344 between two insns is not affected by -g. */
8346 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8348 if (GET_CODE (insn) != NOTE
8349 || NOTE_LINE_NUMBER (insn) < 0)
8350 INSN_CUID (insn) = ++i;
8351 else
8352 /* Give a line number note the same cuid as preceding insn. */
8353 INSN_CUID (insn) = i;
8356 /* Initialize which registers are clobbered by calls. */
8358 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8360 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8361 if ((call_used_regs[i]
8362 /* Used to check !fixed_regs[i] here, but that isn't safe;
8363 fixed regs are still call-clobbered, and sched can get
8364 confused if they can "live across calls".
8366 The frame pointer is always preserved across calls. The arg
8367 pointer is if it is fixed. The stack pointer usually is, unless
8368 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8369 will be present. If we are generating PIC code, the PIC offset
8370 table register is preserved across calls. */
8372 && i != STACK_POINTER_REGNUM
8373 && i != FRAME_POINTER_REGNUM
8374 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8375 && i != HARD_FRAME_POINTER_REGNUM
8376 #endif
8377 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8378 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8379 #endif
8380 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8381 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8382 #endif
8384 || global_regs[i])
8385 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8387 /* Loop over basic blocks.
8388 Compute the maximum number of qty's needed for each basic block
8389 (which is 2 for each SET). */
8390 insn = f;
8391 while (insn)
8393 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8394 flag_cse_skip_blocks);
8396 /* If this basic block was already processed or has no sets, skip it. */
8397 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8399 PUT_MODE (insn, VOIDmode);
8400 insn = (val.last ? NEXT_INSN (val.last) : 0);
8401 val.path_size = 0;
8402 continue;
8405 cse_basic_block_start = val.low_cuid;
8406 cse_basic_block_end = val.high_cuid;
8407 max_qty = val.nsets * 2;
8409 if (file)
8410 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8411 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8412 val.nsets);
8414 /* Make MAX_QTY bigger to give us room to optimize
8415 past the end of this basic block, if that should prove useful. */
8416 if (max_qty < 500)
8417 max_qty = 500;
8419 max_qty += max_reg;
8421 /* If this basic block is being extended by following certain jumps,
8422 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8423 Otherwise, we start after this basic block. */
8424 if (val.path_size > 0)
8425 cse_basic_block (insn, val.last, val.path, 0);
8426 else
8428 int old_cse_jumps_altered = cse_jumps_altered;
8429 rtx temp;
8431 /* When cse changes a conditional jump to an unconditional
8432 jump, we want to reprocess the block, since it will give
8433 us a new branch path to investigate. */
8434 cse_jumps_altered = 0;
8435 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8436 if (cse_jumps_altered == 0
8437 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8438 insn = temp;
8440 cse_jumps_altered |= old_cse_jumps_altered;
8443 #ifdef USE_C_ALLOCA
8444 alloca (0);
8445 #endif
8448 /* Tell refers_to_mem_p that qty_const info is not available. */
8449 qty_const = 0;
8451 if (max_elements_made < n_elements_made)
8452 max_elements_made = n_elements_made;
8454 return cse_jumps_altered || recorded_label_ref;
8457 /* Process a single basic block. FROM and TO and the limits of the basic
8458 block. NEXT_BRANCH points to the branch path when following jumps or
8459 a null path when not following jumps.
8461 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8462 loop. This is true when we are being called for the last time on a
8463 block and this CSE pass is before loop.c. */
8465 static rtx
8466 cse_basic_block (from, to, next_branch, around_loop)
8467 register rtx from, to;
8468 struct branch_path *next_branch;
8469 int around_loop;
8471 register rtx insn;
8472 int to_usage = 0;
8473 int in_libcall_block = 0;
8474 int num_insns = 0;
8476 /* Each of these arrays is undefined before max_reg, so only allocate
8477 the space actually needed and adjust the start below. */
8479 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8480 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8481 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8482 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8483 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8484 qty_comparison_code
8485 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8486 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8487 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8489 qty_first_reg -= max_reg;
8490 qty_last_reg -= max_reg;
8491 qty_mode -= max_reg;
8492 qty_const -= max_reg;
8493 qty_const_insn -= max_reg;
8494 qty_comparison_code -= max_reg;
8495 qty_comparison_qty -= max_reg;
8496 qty_comparison_const -= max_reg;
8498 new_basic_block ();
8500 /* TO might be a label. If so, protect it from being deleted. */
8501 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8502 ++LABEL_NUSES (to);
8504 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8506 register enum rtx_code code;
8507 int i;
8508 struct table_elt *p, *next;
8510 /* If we have processed 1,000 insns, flush the hash table to avoid
8511 extreme quadratic behavior.
8513 ??? This is a real kludge and needs to be done some other way.
8514 Perhaps for 2.9. */
8515 if (num_insns++ > 1000)
8517 for (i = 0; i < NBUCKETS; i++)
8518 for (p = table[i]; p; p = next)
8520 next = p->next_same_hash;
8522 if (GET_CODE (p->exp) == REG)
8523 invalidate (p->exp, p->mode);
8524 else
8525 remove_from_table (p, i);
8528 num_insns = 0;
8531 /* See if this is a branch that is part of the path. If so, and it is
8532 to be taken, do so. */
8533 if (next_branch->branch == insn)
8535 enum taken status = next_branch++->status;
8536 if (status != NOT_TAKEN)
8538 if (status == TAKEN)
8539 record_jump_equiv (insn, 1);
8540 else
8541 invalidate_skipped_block (NEXT_INSN (insn));
8543 /* Set the last insn as the jump insn; it doesn't affect cc0.
8544 Then follow this branch. */
8545 #ifdef HAVE_cc0
8546 prev_insn_cc0 = 0;
8547 #endif
8548 prev_insn = insn;
8549 insn = JUMP_LABEL (insn);
8550 continue;
8554 code = GET_CODE (insn);
8555 if (GET_MODE (insn) == QImode)
8556 PUT_MODE (insn, VOIDmode);
8558 if (GET_RTX_CLASS (code) == 'i')
8560 /* Process notes first so we have all notes in canonical forms when
8561 looking for duplicate operations. */
8563 if (REG_NOTES (insn))
8564 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8566 /* Track when we are inside in LIBCALL block. Inside such a block,
8567 we do not want to record destinations. The last insn of a
8568 LIBCALL block is not considered to be part of the block, since
8569 its destination is the result of the block and hence should be
8570 recorded. */
8572 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8573 in_libcall_block = 1;
8574 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8575 in_libcall_block = 0;
8577 cse_insn (insn, in_libcall_block);
8580 /* If INSN is now an unconditional jump, skip to the end of our
8581 basic block by pretending that we just did the last insn in the
8582 basic block. If we are jumping to the end of our block, show
8583 that we can have one usage of TO. */
8585 if (simplejump_p (insn))
8587 if (to == 0)
8588 return 0;
8590 if (JUMP_LABEL (insn) == to)
8591 to_usage = 1;
8593 /* Maybe TO was deleted because the jump is unconditional.
8594 If so, there is nothing left in this basic block. */
8595 /* ??? Perhaps it would be smarter to set TO
8596 to whatever follows this insn,
8597 and pretend the basic block had always ended here. */
8598 if (INSN_DELETED_P (to))
8599 break;
8601 insn = PREV_INSN (to);
8604 /* See if it is ok to keep on going past the label
8605 which used to end our basic block. Remember that we incremented
8606 the count of that label, so we decrement it here. If we made
8607 a jump unconditional, TO_USAGE will be one; in that case, we don't
8608 want to count the use in that jump. */
8610 if (to != 0 && NEXT_INSN (insn) == to
8611 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8613 struct cse_basic_block_data val;
8614 rtx prev;
8616 insn = NEXT_INSN (to);
8618 if (LABEL_NUSES (to) == 0)
8619 insn = delete_insn (to);
8621 /* If TO was the last insn in the function, we are done. */
8622 if (insn == 0)
8623 return 0;
8625 /* If TO was preceded by a BARRIER we are done with this block
8626 because it has no continuation. */
8627 prev = prev_nonnote_insn (to);
8628 if (prev && GET_CODE (prev) == BARRIER)
8629 return insn;
8631 /* Find the end of the following block. Note that we won't be
8632 following branches in this case. */
8633 to_usage = 0;
8634 val.path_size = 0;
8635 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8637 /* If the tables we allocated have enough space left
8638 to handle all the SETs in the next basic block,
8639 continue through it. Otherwise, return,
8640 and that block will be scanned individually. */
8641 if (val.nsets * 2 + next_qty > max_qty)
8642 break;
8644 cse_basic_block_start = val.low_cuid;
8645 cse_basic_block_end = val.high_cuid;
8646 to = val.last;
8648 /* Prevent TO from being deleted if it is a label. */
8649 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8650 ++LABEL_NUSES (to);
8652 /* Back up so we process the first insn in the extension. */
8653 insn = PREV_INSN (insn);
8657 if (next_qty > max_qty)
8658 abort ();
8660 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8661 the previous insn is the only insn that branches to the head of a loop,
8662 we can cse into the loop. Don't do this if we changed the jump
8663 structure of a loop unless we aren't going to be following jumps. */
8665 if ((cse_jumps_altered == 0
8666 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8667 && around_loop && to != 0
8668 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8669 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8670 && JUMP_LABEL (PREV_INSN (to)) != 0
8671 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8672 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8674 return to ? NEXT_INSN (to) : 0;
8677 /* Count the number of times registers are used (not set) in X.
8678 COUNTS is an array in which we accumulate the count, INCR is how much
8679 we count each register usage.
8681 Don't count a usage of DEST, which is the SET_DEST of a SET which
8682 contains X in its SET_SRC. This is because such a SET does not
8683 modify the liveness of DEST. */
8685 static void
8686 count_reg_usage (x, counts, dest, incr)
8687 rtx x;
8688 int *counts;
8689 rtx dest;
8690 int incr;
8692 enum rtx_code code;
8693 char *fmt;
8694 int i, j;
8696 if (x == 0)
8697 return;
8699 switch (code = GET_CODE (x))
8701 case REG:
8702 if (x != dest)
8703 counts[REGNO (x)] += incr;
8704 return;
8706 case PC:
8707 case CC0:
8708 case CONST:
8709 case CONST_INT:
8710 case CONST_DOUBLE:
8711 case SYMBOL_REF:
8712 case LABEL_REF:
8713 case CLOBBER:
8714 return;
8716 case SET:
8717 /* Unless we are setting a REG, count everything in SET_DEST. */
8718 if (GET_CODE (SET_DEST (x)) != REG)
8719 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8721 /* If SRC has side-effects, then we can't delete this insn, so the
8722 usage of SET_DEST inside SRC counts.
8724 ??? Strictly-speaking, we might be preserving this insn
8725 because some other SET has side-effects, but that's hard
8726 to do and can't happen now. */
8727 count_reg_usage (SET_SRC (x), counts,
8728 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8729 incr);
8730 return;
8732 case CALL_INSN:
8733 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8735 /* ... falls through ... */
8736 case INSN:
8737 case JUMP_INSN:
8738 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8740 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8741 use them. */
8743 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8744 return;
8746 case EXPR_LIST:
8747 case INSN_LIST:
8748 if (REG_NOTE_KIND (x) == REG_EQUAL
8749 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
8750 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8751 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8752 return;
8754 default:
8755 break;
8758 fmt = GET_RTX_FORMAT (code);
8759 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8761 if (fmt[i] == 'e')
8762 count_reg_usage (XEXP (x, i), counts, dest, incr);
8763 else if (fmt[i] == 'E')
8764 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8765 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8769 /* Scan all the insns and delete any that are dead; i.e., they store a register
8770 that is never used or they copy a register to itself.
8772 This is used to remove insns made obviously dead by cse, loop or other
8773 optimizations. It improves the heuristics in loop since it won't try to
8774 move dead invariants out of loops or make givs for dead quantities. The
8775 remaining passes of the compilation are also sped up. */
8777 void
8778 delete_trivially_dead_insns (insns, nreg)
8779 rtx insns;
8780 int nreg;
8782 int *counts = (int *) alloca (nreg * sizeof (int));
8783 rtx insn, prev;
8784 #ifdef HAVE_cc0
8785 rtx tem;
8786 #endif
8787 int i;
8788 int in_libcall = 0, dead_libcall = 0;
8790 /* First count the number of times each register is used. */
8791 bzero ((char *) counts, sizeof (int) * nreg);
8792 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8793 count_reg_usage (insn, counts, NULL_RTX, 1);
8795 /* Go from the last insn to the first and delete insns that only set unused
8796 registers or copy a register to itself. As we delete an insn, remove
8797 usage counts for registers it uses. */
8798 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8800 int live_insn = 0;
8801 rtx note;
8803 prev = prev_real_insn (insn);
8805 /* Don't delete any insns that are part of a libcall block unless
8806 we can delete the whole libcall block.
8808 Flow or loop might get confused if we did that. Remember
8809 that we are scanning backwards. */
8810 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8812 in_libcall = 1;
8813 live_insn = 1;
8814 dead_libcall = 0;
8816 /* See if there's a REG_EQUAL note on this insn and try to
8817 replace the source with the REG_EQUAL expression.
8819 We assume that insns with REG_RETVALs can only be reg->reg
8820 copies at this point. */
8821 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
8822 if (note)
8824 rtx set = single_set (insn);
8825 if (set
8826 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
8828 remove_note (insn,
8829 find_reg_note (insn, REG_RETVAL, NULL_RTX));
8830 dead_libcall = 1;
8834 else if (in_libcall)
8835 live_insn = ! dead_libcall;
8836 else if (GET_CODE (PATTERN (insn)) == SET)
8838 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8839 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8842 #ifdef HAVE_cc0
8843 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8844 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8845 && ((tem = next_nonnote_insn (insn)) == 0
8846 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8847 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8849 #endif
8850 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8851 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8852 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8853 || side_effects_p (SET_SRC (PATTERN (insn))))
8854 live_insn = 1;
8856 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8857 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8859 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8861 if (GET_CODE (elt) == SET)
8863 if (GET_CODE (SET_DEST (elt)) == REG
8864 && SET_DEST (elt) == SET_SRC (elt))
8867 #ifdef HAVE_cc0
8868 else if (GET_CODE (SET_DEST (elt)) == CC0
8869 && ! side_effects_p (SET_SRC (elt))
8870 && ((tem = next_nonnote_insn (insn)) == 0
8871 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8872 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8874 #endif
8875 else if (GET_CODE (SET_DEST (elt)) != REG
8876 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8877 || counts[REGNO (SET_DEST (elt))] != 0
8878 || side_effects_p (SET_SRC (elt)))
8879 live_insn = 1;
8881 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8882 live_insn = 1;
8884 else
8885 live_insn = 1;
8887 /* If this is a dead insn, delete it and show registers in it aren't
8888 being used. */
8890 if (! live_insn)
8892 count_reg_usage (insn, counts, NULL_RTX, -1);
8893 delete_insn (insn);
8896 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8898 in_libcall = 0;
8899 dead_libcall = 0;