* rtl.h (note_stores): Add additional paramter.
[official-gcc.git] / gcc / cse.c
blobbc7bd659ba08b6d134108982966dc3be897436a8
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "hashtab.h"
40 #include "ggc.h"
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
51 global CSE.
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and several vectors together
59 with "quantity numbers" to record equivalent (pseudo) registers.
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
70 Registers and "quantity numbers":
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
79 of as containing.
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
84 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
85 variables should be referenced with an index below `max_reg'.
87 We also maintain a bidirectional chain of registers for each
88 quantity number. `qty_first_reg', `qty_last_reg',
89 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
95 If two registers have the same quantity number, it must be true that
96 REG expressions with `qty_mode' must be in the hash table for both
97 registers and must be in the same class.
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
104 Constants and quantity numbers
106 When a quantity has a known constant value, that value is stored
107 in the appropriate element of qty_const. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate element
116 of qty_const.
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
124 Other expressions:
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
130 hash codes.
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and `qty_first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
159 must be removed.
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
184 the register.
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
190 Related expressions:
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
199 /* One plus largest register number used in this function. */
201 static int max_reg;
203 /* One plus largest instruction UID used in this function at time of
204 cse_main call. */
206 static int max_insn_uid;
208 /* Length of vectors indexed by quantity number.
209 We know in advance we will not need a quantity number this big. */
211 static int max_qty;
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
216 static int next_qty;
218 /* Indexed by quantity number, gives the first (or last) register
219 in the chain of registers that currently contain this quantity. */
221 static int *qty_first_reg;
222 static int *qty_last_reg;
224 /* Index by quantity number, gives the mode of the quantity. */
226 static enum machine_mode *qty_mode;
228 /* Indexed by quantity number, gives the rtx of the constant value of the
229 quantity, or zero if it does not have a known value.
230 A sum of the frame pointer (or arg pointer) plus a constant
231 can also be entered here. */
233 static rtx *qty_const;
235 /* Indexed by qty number, gives the insn that stored the constant value
236 recorded in `qty_const'. */
238 static rtx *qty_const_insn;
240 /* The next three variables are used to track when a comparison between a
241 quantity and some constant or register has been passed. In that case, we
242 know the results of the comparison in case we see it again. These variables
243 record a comparison that is known to be true. */
245 /* Indexed by qty number, gives the rtx code of a comparison with a known
246 result involving this quantity. If none, it is UNKNOWN. */
247 static enum rtx_code *qty_comparison_code;
249 /* Indexed by qty number, gives the constant being compared against in a
250 comparison of known result. If no such comparison, it is undefined.
251 If the comparison is not with a constant, it is zero. */
253 static rtx *qty_comparison_const;
255 /* Indexed by qty number, gives the quantity being compared against in a
256 comparison of known result. If no such comparison, if it undefined.
257 If the comparison is not with a register, it is -1. */
259 static int *qty_comparison_qty;
261 #ifdef HAVE_cc0
262 /* For machines that have a CC0, we do not record its value in the hash
263 table since its use is guaranteed to be the insn immediately following
264 its definition and any other insn is presumed to invalidate it.
266 Instead, we store below the value last assigned to CC0. If it should
267 happen to be a constant, it is stored in preference to the actual
268 assigned value. In case it is a constant, we store the mode in which
269 the constant should be interpreted. */
271 static rtx prev_insn_cc0;
272 static enum machine_mode prev_insn_cc0_mode;
273 #endif
275 /* Previous actual insn. 0 if at first insn of basic block. */
277 static rtx prev_insn;
279 /* Insn being scanned. */
281 static rtx this_insn;
283 /* Index by register number, gives the number of the next (or
284 previous) register in the chain of registers sharing the same
285 value.
287 Or -1 if this register is at the end of the chain.
289 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
291 static int *reg_next_eqv;
292 static int *reg_prev_eqv;
294 struct cse_reg_info {
295 /* The number of times the register has been altered in the current
296 basic block. */
297 int reg_tick;
299 /* The next cse_reg_info structure in the free or used list. */
300 struct cse_reg_info* next;
302 /* The REG_TICK value at which rtx's containing this register are
303 valid in the hash table. If this does not equal the current
304 reg_tick value, such expressions existing in the hash table are
305 invalid. */
306 int reg_in_table;
308 /* The quantity number of the register's current contents. */
309 int reg_qty;
311 /* Search key */
312 int regno;
315 /* A free list of cse_reg_info entries. */
316 static struct cse_reg_info *cse_reg_info_free_list;
318 /* A used list of cse_reg_info entries. */
319 static struct cse_reg_info *cse_reg_info_used_list;
320 static struct cse_reg_info *cse_reg_info_used_list_end;
322 /* A mapping from registers to cse_reg_info data structures. */
323 static hash_table_t cse_reg_info_tree;
325 /* The last lookup we did into the cse_reg_info_tree. This allows us
326 to cache repeated lookups. */
327 static int cached_regno;
328 static struct cse_reg_info *cached_cse_reg_info;
330 /* A HARD_REG_SET containing all the hard registers for which there is
331 currently a REG expression in the hash table. Note the difference
332 from the above variables, which indicate if the REG is mentioned in some
333 expression in the table. */
335 static HARD_REG_SET hard_regs_in_table;
337 /* A HARD_REG_SET containing all the hard registers that are invalidated
338 by a CALL_INSN. */
340 static HARD_REG_SET regs_invalidated_by_call;
342 /* CUID of insn that starts the basic block currently being cse-processed. */
344 static int cse_basic_block_start;
346 /* CUID of insn that ends the basic block currently being cse-processed. */
348 static int cse_basic_block_end;
350 /* Vector mapping INSN_UIDs to cuids.
351 The cuids are like uids but increase monotonically always.
352 We use them to see whether a reg is used outside a given basic block. */
354 static int *uid_cuid;
356 /* Highest UID in UID_CUID. */
357 static int max_uid;
359 /* Get the cuid of an insn. */
361 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
363 /* Nonzero if cse has altered conditional jump insns
364 in such a way that jump optimization should be redone. */
366 static int cse_jumps_altered;
368 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
369 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
370 to put in the note. */
371 static int recorded_label_ref;
373 /* canon_hash stores 1 in do_not_record
374 if it notices a reference to CC0, PC, or some other volatile
375 subexpression. */
377 static int do_not_record;
379 #ifdef LOAD_EXTEND_OP
381 /* Scratch rtl used when looking for load-extended copy of a MEM. */
382 static rtx memory_extend_rtx;
383 #endif
385 /* canon_hash stores 1 in hash_arg_in_memory
386 if it notices a reference to memory within the expression being hashed. */
388 static int hash_arg_in_memory;
390 /* canon_hash stores 1 in hash_arg_in_struct
391 if it notices a reference to memory that's part of a structure. */
393 static int hash_arg_in_struct;
395 /* The hash table contains buckets which are chains of `struct table_elt's,
396 each recording one expression's information.
397 That expression is in the `exp' field.
399 Those elements with the same hash code are chained in both directions
400 through the `next_same_hash' and `prev_same_hash' fields.
402 Each set of expressions with equivalent values
403 are on a two-way chain through the `next_same_value'
404 and `prev_same_value' fields, and all point with
405 the `first_same_value' field at the first element in
406 that chain. The chain is in order of increasing cost.
407 Each element's cost value is in its `cost' field.
409 The `in_memory' field is nonzero for elements that
410 involve any reference to memory. These elements are removed
411 whenever a write is done to an unidentified location in memory.
412 To be safe, we assume that a memory address is unidentified unless
413 the address is either a symbol constant or a constant plus
414 the frame pointer or argument pointer.
416 The `in_struct' field is nonzero for elements that
417 involve any reference to memory inside a structure or array.
419 The `related_value' field is used to connect related expressions
420 (that differ by adding an integer).
421 The related expressions are chained in a circular fashion.
422 `related_value' is zero for expressions for which this
423 chain is not useful.
425 The `cost' field stores the cost of this element's expression.
427 The `is_const' flag is set if the element is a constant (including
428 a fixed address).
430 The `flag' field is used as a temporary during some search routines.
432 The `mode' field is usually the same as GET_MODE (`exp'), but
433 if `exp' is a CONST_INT and has no machine mode then the `mode'
434 field is the mode it was being used as. Each constant is
435 recorded separately for each mode it is used with. */
438 struct table_elt
440 rtx exp;
441 struct table_elt *next_same_hash;
442 struct table_elt *prev_same_hash;
443 struct table_elt *next_same_value;
444 struct table_elt *prev_same_value;
445 struct table_elt *first_same_value;
446 struct table_elt *related_value;
447 int cost;
448 enum machine_mode mode;
449 char in_memory;
450 char in_struct;
451 char is_const;
452 char flag;
455 /* We don't want a lot of buckets, because we rarely have very many
456 things stored in the hash table, and a lot of buckets slows
457 down a lot of loops that happen frequently. */
458 #define NBUCKETS 31
460 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
461 register (hard registers may require `do_not_record' to be set). */
463 #define HASH(X, M) \
464 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
465 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS \
466 : canon_hash (X, M) % NBUCKETS)
468 /* Determine whether register number N is considered a fixed register for CSE.
469 It is desirable to replace other regs with fixed regs, to reduce need for
470 non-fixed hard regs.
471 A reg wins if it is either the frame pointer or designated as fixed,
472 but not if it is an overlapping register. */
473 #ifdef OVERLAPPING_REGNO_P
474 #define FIXED_REGNO_P(N) \
475 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
476 || fixed_regs[N] || global_regs[N]) \
477 && ! OVERLAPPING_REGNO_P ((N)))
478 #else
479 #define FIXED_REGNO_P(N) \
480 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
481 || fixed_regs[N] || global_regs[N])
482 #endif
484 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
485 hard registers and pointers into the frame are the cheapest with a cost
486 of 0. Next come pseudos with a cost of one and other hard registers with
487 a cost of 2. Aside from these special cases, call `rtx_cost'. */
489 #define CHEAP_REGNO(N) \
490 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
492 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
493 || ((N) < FIRST_PSEUDO_REGISTER \
494 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
496 /* A register is cheap if it is a user variable assigned to the register
497 or if its register number always corresponds to a cheap register. */
499 #define CHEAP_REG(N) \
500 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
501 || CHEAP_REGNO (REGNO (N)))
503 #define COST(X) \
504 (GET_CODE (X) == REG \
505 ? (CHEAP_REG (X) ? 0 \
506 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
507 : 2) \
508 : notreg_cost(X))
510 /* Get the info associated with register N. */
512 #define GET_CSE_REG_INFO(N) \
513 (((N) == cached_regno && cached_cse_reg_info) \
514 ? cached_cse_reg_info : get_cse_reg_info ((N)))
516 /* Get the number of times this register has been updated in this
517 basic block. */
519 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
521 /* Get the point at which REG was recorded in the table. */
523 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
525 /* Get the quantity number for REG. */
527 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
529 /* Determine if the quantity number for register X represents a valid index
530 into the `qty_...' variables. */
532 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
534 #ifdef ADDRESS_COST
535 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
536 during CSE, such nodes are present. Using an ADDRESSOF node which
537 refers to the address of a REG is a good thing because we can then
538 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
539 #define CSE_ADDRESS_COST(RTX) \
540 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
541 ? -1 : ADDRESS_COST(RTX))
542 #endif
544 static struct table_elt *table[NBUCKETS];
546 /* Chain of `struct table_elt's made so far for this function
547 but currently removed from the table. */
549 static struct table_elt *free_element_chain;
551 /* Number of `struct table_elt' structures made so far for this function. */
553 static int n_elements_made;
555 /* Maximum value `n_elements_made' has had so far in this compilation
556 for functions previously processed. */
558 static int max_elements_made;
560 /* Surviving equivalence class when two equivalence classes are merged
561 by recording the effects of a jump in the last insn. Zero if the
562 last insn was not a conditional jump. */
564 static struct table_elt *last_jump_equiv_class;
566 /* Set to the cost of a constant pool reference if one was found for a
567 symbolic constant. If this was found, it means we should try to
568 convert constants into constant pool entries if they don't fit in
569 the insn. */
571 static int constant_pool_entries_cost;
573 /* Define maximum length of a branch path. */
575 #define PATHLENGTH 10
577 /* This data describes a block that will be processed by cse_basic_block. */
579 struct cse_basic_block_data {
580 /* Lowest CUID value of insns in block. */
581 int low_cuid;
582 /* Highest CUID value of insns in block. */
583 int high_cuid;
584 /* Total number of SETs in block. */
585 int nsets;
586 /* Last insn in the block. */
587 rtx last;
588 /* Size of current branch path, if any. */
589 int path_size;
590 /* Current branch path, indicating which branches will be taken. */
591 struct branch_path {
592 /* The branch insn. */
593 rtx branch;
594 /* Whether it should be taken or not. AROUND is the same as taken
595 except that it is used when the destination label is not preceded
596 by a BARRIER. */
597 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
598 } path[PATHLENGTH];
601 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
602 virtual regs here because the simplify_*_operation routines are called
603 by integrate.c, which is called before virtual register instantiation. */
605 #define FIXED_BASE_PLUS_P(X) \
606 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
607 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
608 || (X) == virtual_stack_vars_rtx \
609 || (X) == virtual_incoming_args_rtx \
610 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
611 && (XEXP (X, 0) == frame_pointer_rtx \
612 || XEXP (X, 0) == hard_frame_pointer_rtx \
613 || ((X) == arg_pointer_rtx \
614 && fixed_regs[ARG_POINTER_REGNUM]) \
615 || XEXP (X, 0) == virtual_stack_vars_rtx \
616 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
617 || GET_CODE (X) == ADDRESSOF)
619 /* Similar, but also allows reference to the stack pointer.
621 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
622 arg_pointer_rtx by itself is nonzero, because on at least one machine,
623 the i960, the arg pointer is zero when it is unused. */
625 #define NONZERO_BASE_PLUS_P(X) \
626 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
627 || (X) == virtual_stack_vars_rtx \
628 || (X) == virtual_incoming_args_rtx \
629 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
630 && (XEXP (X, 0) == frame_pointer_rtx \
631 || XEXP (X, 0) == hard_frame_pointer_rtx \
632 || ((X) == arg_pointer_rtx \
633 && fixed_regs[ARG_POINTER_REGNUM]) \
634 || XEXP (X, 0) == virtual_stack_vars_rtx \
635 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
636 || (X) == stack_pointer_rtx \
637 || (X) == virtual_stack_dynamic_rtx \
638 || (X) == virtual_outgoing_args_rtx \
639 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
640 && (XEXP (X, 0) == stack_pointer_rtx \
641 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
642 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
643 || GET_CODE (X) == ADDRESSOF)
645 static int notreg_cost PROTO((rtx));
646 static void new_basic_block PROTO((void));
647 static void make_new_qty PROTO((int));
648 static void make_regs_eqv PROTO((int, int));
649 static void delete_reg_equiv PROTO((int));
650 static int mention_regs PROTO((rtx));
651 static int insert_regs PROTO((rtx, struct table_elt *, int));
652 static void free_element PROTO((struct table_elt *));
653 static void remove_from_table PROTO((struct table_elt *, unsigned));
654 static struct table_elt *get_element PROTO((void));
655 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
656 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
657 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
658 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
659 enum machine_mode));
660 static void merge_equiv_classes PROTO((struct table_elt *,
661 struct table_elt *));
662 static void invalidate PROTO((rtx, enum machine_mode));
663 static int cse_rtx_varies_p PROTO((rtx));
664 static void remove_invalid_refs PROTO((int));
665 static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
666 static void rehash_using_reg PROTO((rtx));
667 static void invalidate_memory PROTO((void));
668 static void invalidate_for_call PROTO((void));
669 static rtx use_related_value PROTO((rtx, struct table_elt *));
670 static unsigned canon_hash PROTO((rtx, enum machine_mode));
671 static unsigned safe_hash PROTO((rtx, enum machine_mode));
672 static int exp_equiv_p PROTO((rtx, rtx, int, int));
673 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
674 HOST_WIDE_INT *,
675 HOST_WIDE_INT *));
676 static int refers_to_p PROTO((rtx, rtx));
677 static rtx canon_reg PROTO((rtx, rtx));
678 static void find_best_addr PROTO((rtx, rtx *));
679 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
680 enum machine_mode *,
681 enum machine_mode *));
682 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
683 rtx, rtx));
684 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
685 rtx, rtx));
686 static rtx fold_rtx PROTO((rtx, rtx));
687 static rtx equiv_constant PROTO((rtx));
688 static void record_jump_equiv PROTO((rtx, int));
689 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
690 rtx, rtx, int));
691 static void cse_insn PROTO((rtx, rtx));
692 static int note_mem_written PROTO((rtx));
693 static void invalidate_from_clobbers PROTO((rtx));
694 static rtx cse_process_notes PROTO((rtx, rtx));
695 static void cse_around_loop PROTO((rtx));
696 static void invalidate_skipped_set PROTO((rtx, rtx, void *));
697 static void invalidate_skipped_block PROTO((rtx));
698 static void cse_check_loop_start PROTO((rtx, rtx, void *));
699 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
700 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
701 static void count_reg_usage PROTO((rtx, int *, rtx, int));
702 extern void dump_class PROTO((struct table_elt*));
703 static void check_fold_consts PROTO((PTR));
704 static struct cse_reg_info* get_cse_reg_info PROTO((int));
705 static unsigned int hash_cse_reg_info PROTO((hash_table_entry_t));
706 static int cse_reg_info_equal_p PROTO((hash_table_entry_t,
707 hash_table_entry_t));
709 static void flush_hash_table PROTO((void));
711 /* Dump the expressions in the equivalence class indicated by CLASSP.
712 This function is used only for debugging. */
713 void
714 dump_class (classp)
715 struct table_elt *classp;
717 struct table_elt *elt;
719 fprintf (stderr, "Equivalence chain for ");
720 print_rtl (stderr, classp->exp);
721 fprintf (stderr, ": \n");
723 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
725 print_rtl (stderr, elt->exp);
726 fprintf (stderr, "\n");
730 /* Return an estimate of the cost of computing rtx X.
731 One use is in cse, to decide which expression to keep in the hash table.
732 Another is in rtl generation, to pick the cheapest way to multiply.
733 Other uses like the latter are expected in the future. */
735 /* Internal function, to compute cost when X is not a register; called
736 from COST macro to keep it simple. */
738 static int
739 notreg_cost (x)
740 rtx x;
742 return ((GET_CODE (x) == SUBREG
743 && GET_CODE (SUBREG_REG (x)) == REG
744 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
745 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
746 && (GET_MODE_SIZE (GET_MODE (x))
747 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
748 && subreg_lowpart_p (x)
749 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
750 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
751 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
752 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
753 : 2))
754 : rtx_cost (x, SET) * 2);
757 /* Return the right cost to give to an operation
758 to make the cost of the corresponding register-to-register instruction
759 N times that of a fast register-to-register instruction. */
761 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
764 rtx_cost (x, outer_code)
765 rtx x;
766 enum rtx_code outer_code ATTRIBUTE_UNUSED;
768 register int i, j;
769 register enum rtx_code code;
770 register const char *fmt;
771 register int total;
773 if (x == 0)
774 return 0;
776 /* Compute the default costs of certain things.
777 Note that RTX_COSTS can override the defaults. */
779 code = GET_CODE (x);
780 switch (code)
782 case MULT:
783 /* Count multiplication by 2**n as a shift,
784 because if we are considering it, we would output it as a shift. */
785 if (GET_CODE (XEXP (x, 1)) == CONST_INT
786 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
787 total = 2;
788 else
789 total = COSTS_N_INSNS (5);
790 break;
791 case DIV:
792 case UDIV:
793 case MOD:
794 case UMOD:
795 total = COSTS_N_INSNS (7);
796 break;
797 case USE:
798 /* Used in loop.c and combine.c as a marker. */
799 total = 0;
800 break;
801 case ASM_OPERANDS:
802 /* We don't want these to be used in substitutions because
803 we have no way of validating the resulting insn. So assign
804 anything containing an ASM_OPERANDS a very high cost. */
805 total = 1000;
806 break;
807 default:
808 total = 2;
811 switch (code)
813 case REG:
814 return ! CHEAP_REG (x);
816 case SUBREG:
817 /* If we can't tie these modes, make this expensive. The larger
818 the mode, the more expensive it is. */
819 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
820 return COSTS_N_INSNS (2
821 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
822 return 2;
823 #ifdef RTX_COSTS
824 RTX_COSTS (x, code, outer_code);
825 #endif
826 #ifdef CONST_COSTS
827 CONST_COSTS (x, code, outer_code);
828 #endif
830 default:
831 #ifdef DEFAULT_RTX_COSTS
832 DEFAULT_RTX_COSTS(x, code, outer_code);
833 #endif
834 break;
837 /* Sum the costs of the sub-rtx's, plus cost of this operation,
838 which is already in total. */
840 fmt = GET_RTX_FORMAT (code);
841 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
842 if (fmt[i] == 'e')
843 total += rtx_cost (XEXP (x, i), code);
844 else if (fmt[i] == 'E')
845 for (j = 0; j < XVECLEN (x, i); j++)
846 total += rtx_cost (XVECEXP (x, i, j), code);
848 return total;
851 static struct cse_reg_info *
852 get_cse_reg_info (regno)
853 int regno;
855 struct cse_reg_info *cri;
856 struct cse_reg_info **entry;
857 struct cse_reg_info temp;
859 /* See if we already have this entry. */
860 temp.regno = regno;
861 entry = (struct cse_reg_info **) find_hash_table_entry (cse_reg_info_tree,
862 &temp, TRUE);
864 if (*entry)
865 cri = *entry;
866 else
868 /* Get a new cse_reg_info structure. */
869 if (cse_reg_info_free_list)
871 cri = cse_reg_info_free_list;
872 cse_reg_info_free_list = cri->next;
874 else
875 cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
877 /* Initialize it. */
878 cri->reg_tick = 0;
879 cri->reg_in_table = -1;
880 cri->reg_qty = regno;
881 cri->regno = regno;
882 cri->next = cse_reg_info_used_list;
883 cse_reg_info_used_list = cri;
884 if (!cse_reg_info_used_list_end)
885 cse_reg_info_used_list_end = cri;
887 *entry = cri;
890 /* Cache this lookup; we tend to be looking up information about the
891 same register several times in a row. */
892 cached_regno = regno;
893 cached_cse_reg_info = cri;
895 return cri;
898 static unsigned int
899 hash_cse_reg_info (el_ptr)
900 hash_table_entry_t el_ptr;
902 return ((const struct cse_reg_info *) el_ptr)->regno;
905 static int
906 cse_reg_info_equal_p (el_ptr1, el_ptr2)
907 hash_table_entry_t el_ptr1;
908 hash_table_entry_t el_ptr2;
910 return (((const struct cse_reg_info *) el_ptr1)->regno
911 == ((const struct cse_reg_info *) el_ptr2)->regno);
914 /* Clear the hash table and initialize each register with its own quantity,
915 for a new basic block. */
917 static void
918 new_basic_block ()
920 register int i;
922 next_qty = max_reg;
924 if (cse_reg_info_tree)
926 delete_hash_table (cse_reg_info_tree);
927 if (cse_reg_info_used_list)
929 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
930 cse_reg_info_free_list = cse_reg_info_used_list;
931 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
933 cached_cse_reg_info = 0;
936 cse_reg_info_tree = create_hash_table (0, hash_cse_reg_info,
937 cse_reg_info_equal_p);
939 CLEAR_HARD_REG_SET (hard_regs_in_table);
941 /* The per-quantity values used to be initialized here, but it is
942 much faster to initialize each as it is made in `make_new_qty'. */
944 for (i = 0; i < NBUCKETS; i++)
946 register struct table_elt *this, *next;
947 for (this = table[i]; this; this = next)
949 next = this->next_same_hash;
950 free_element (this);
954 bzero ((char *) table, sizeof table);
956 prev_insn = 0;
958 #ifdef HAVE_cc0
959 prev_insn_cc0 = 0;
960 #endif
963 /* Say that register REG contains a quantity not in any register before
964 and initialize that quantity. */
966 static void
967 make_new_qty (reg)
968 register int reg;
970 register int q;
972 if (next_qty >= max_qty)
973 abort ();
975 q = REG_QTY (reg) = next_qty++;
976 qty_first_reg[q] = reg;
977 qty_last_reg[q] = reg;
978 qty_const[q] = qty_const_insn[q] = 0;
979 qty_comparison_code[q] = UNKNOWN;
981 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
984 /* Make reg NEW equivalent to reg OLD.
985 OLD is not changing; NEW is. */
987 static void
988 make_regs_eqv (new, old)
989 register int new, old;
991 register int lastr, firstr;
992 register int q = REG_QTY (old);
994 /* Nothing should become eqv until it has a "non-invalid" qty number. */
995 if (! REGNO_QTY_VALID_P (old))
996 abort ();
998 REG_QTY (new) = q;
999 firstr = qty_first_reg[q];
1000 lastr = qty_last_reg[q];
1002 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1003 hard regs. Among pseudos, if NEW will live longer than any other reg
1004 of the same qty, and that is beyond the current basic block,
1005 make it the new canonical replacement for this qty. */
1006 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1007 /* Certain fixed registers might be of the class NO_REGS. This means
1008 that not only can they not be allocated by the compiler, but
1009 they cannot be used in substitutions or canonicalizations
1010 either. */
1011 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1012 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1013 || (new >= FIRST_PSEUDO_REGISTER
1014 && (firstr < FIRST_PSEUDO_REGISTER
1015 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1016 || (uid_cuid[REGNO_FIRST_UID (new)]
1017 < cse_basic_block_start))
1018 && (uid_cuid[REGNO_LAST_UID (new)]
1019 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1021 reg_prev_eqv[firstr] = new;
1022 reg_next_eqv[new] = firstr;
1023 reg_prev_eqv[new] = -1;
1024 qty_first_reg[q] = new;
1026 else
1028 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1029 Otherwise, insert before any non-fixed hard regs that are at the
1030 end. Registers of class NO_REGS cannot be used as an
1031 equivalent for anything. */
1032 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1033 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1034 && new >= FIRST_PSEUDO_REGISTER)
1035 lastr = reg_prev_eqv[lastr];
1036 reg_next_eqv[new] = reg_next_eqv[lastr];
1037 if (reg_next_eqv[lastr] >= 0)
1038 reg_prev_eqv[reg_next_eqv[lastr]] = new;
1039 else
1040 qty_last_reg[q] = new;
1041 reg_next_eqv[lastr] = new;
1042 reg_prev_eqv[new] = lastr;
1046 /* Remove REG from its equivalence class. */
1048 static void
1049 delete_reg_equiv (reg)
1050 register int reg;
1052 register int q = REG_QTY (reg);
1053 register int p, n;
1055 /* If invalid, do nothing. */
1056 if (q == reg)
1057 return;
1059 p = reg_prev_eqv[reg];
1060 n = reg_next_eqv[reg];
1062 if (n != -1)
1063 reg_prev_eqv[n] = p;
1064 else
1065 qty_last_reg[q] = p;
1066 if (p != -1)
1067 reg_next_eqv[p] = n;
1068 else
1069 qty_first_reg[q] = n;
1071 REG_QTY (reg) = reg;
1074 /* Remove any invalid expressions from the hash table
1075 that refer to any of the registers contained in expression X.
1077 Make sure that newly inserted references to those registers
1078 as subexpressions will be considered valid.
1080 mention_regs is not called when a register itself
1081 is being stored in the table.
1083 Return 1 if we have done something that may have changed the hash code
1084 of X. */
1086 static int
1087 mention_regs (x)
1088 rtx x;
1090 register enum rtx_code code;
1091 register int i, j;
1092 register const char *fmt;
1093 register int changed = 0;
1095 if (x == 0)
1096 return 0;
1098 code = GET_CODE (x);
1099 if (code == REG)
1101 register int regno = REGNO (x);
1102 register int endregno
1103 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1104 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1105 int i;
1107 for (i = regno; i < endregno; i++)
1109 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1110 remove_invalid_refs (i);
1112 REG_IN_TABLE (i) = REG_TICK (i);
1115 return 0;
1118 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1119 pseudo if they don't use overlapping words. We handle only pseudos
1120 here for simplicity. */
1121 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1122 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1124 int i = REGNO (SUBREG_REG (x));
1126 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1128 /* If reg_tick has been incremented more than once since
1129 reg_in_table was last set, that means that the entire
1130 register has been set before, so discard anything memorized
1131 for the entrire register, including all SUBREG expressions. */
1132 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1133 remove_invalid_refs (i);
1134 else
1135 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1138 REG_IN_TABLE (i) = REG_TICK (i);
1139 return 0;
1142 /* If X is a comparison or a COMPARE and either operand is a register
1143 that does not have a quantity, give it one. This is so that a later
1144 call to record_jump_equiv won't cause X to be assigned a different
1145 hash code and not found in the table after that call.
1147 It is not necessary to do this here, since rehash_using_reg can
1148 fix up the table later, but doing this here eliminates the need to
1149 call that expensive function in the most common case where the only
1150 use of the register is in the comparison. */
1152 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1154 if (GET_CODE (XEXP (x, 0)) == REG
1155 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1156 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1158 rehash_using_reg (XEXP (x, 0));
1159 changed = 1;
1162 if (GET_CODE (XEXP (x, 1)) == REG
1163 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1164 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1166 rehash_using_reg (XEXP (x, 1));
1167 changed = 1;
1171 fmt = GET_RTX_FORMAT (code);
1172 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1173 if (fmt[i] == 'e')
1174 changed |= mention_regs (XEXP (x, i));
1175 else if (fmt[i] == 'E')
1176 for (j = 0; j < XVECLEN (x, i); j++)
1177 changed |= mention_regs (XVECEXP (x, i, j));
1179 return changed;
1182 /* Update the register quantities for inserting X into the hash table
1183 with a value equivalent to CLASSP.
1184 (If the class does not contain a REG, it is irrelevant.)
1185 If MODIFIED is nonzero, X is a destination; it is being modified.
1186 Note that delete_reg_equiv should be called on a register
1187 before insert_regs is done on that register with MODIFIED != 0.
1189 Nonzero value means that elements of reg_qty have changed
1190 so X's hash code may be different. */
1192 static int
1193 insert_regs (x, classp, modified)
1194 rtx x;
1195 struct table_elt *classp;
1196 int modified;
1198 if (GET_CODE (x) == REG)
1200 register int regno = REGNO (x);
1202 /* If REGNO is in the equivalence table already but is of the
1203 wrong mode for that equivalence, don't do anything here. */
1205 if (REGNO_QTY_VALID_P (regno)
1206 && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1207 return 0;
1209 if (modified || ! REGNO_QTY_VALID_P (regno))
1211 if (classp)
1212 for (classp = classp->first_same_value;
1213 classp != 0;
1214 classp = classp->next_same_value)
1215 if (GET_CODE (classp->exp) == REG
1216 && GET_MODE (classp->exp) == GET_MODE (x))
1218 make_regs_eqv (regno, REGNO (classp->exp));
1219 return 1;
1222 make_new_qty (regno);
1223 qty_mode[REG_QTY (regno)] = GET_MODE (x);
1224 return 1;
1227 return 0;
1230 /* If X is a SUBREG, we will likely be inserting the inner register in the
1231 table. If that register doesn't have an assigned quantity number at
1232 this point but does later, the insertion that we will be doing now will
1233 not be accessible because its hash code will have changed. So assign
1234 a quantity number now. */
1236 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1237 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1239 int regno = REGNO (SUBREG_REG (x));
1241 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1242 /* Mention_regs checks if REG_TICK is exactly one larger than
1243 REG_IN_TABLE to find out if there was only a single preceding
1244 invalidation - for the SUBREG - or another one, which would be
1245 for the full register. Since we don't invalidate the SUBREG
1246 here first, we might have to bump up REG_TICK so that mention_regs
1247 will do the right thing. */
1248 if (REG_IN_TABLE (regno) >= 0
1249 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1250 REG_TICK (regno)++;
1251 mention_regs (x);
1252 return 1;
1254 else
1255 return mention_regs (x);
1258 /* Look in or update the hash table. */
1260 /* Put the element ELT on the list of free elements. */
1262 static void
1263 free_element (elt)
1264 struct table_elt *elt;
1266 elt->next_same_hash = free_element_chain;
1267 free_element_chain = elt;
1270 /* Return an element that is free for use. */
1272 static struct table_elt *
1273 get_element ()
1275 struct table_elt *elt = free_element_chain;
1276 if (elt)
1278 free_element_chain = elt->next_same_hash;
1279 return elt;
1281 n_elements_made++;
1282 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1285 /* Remove table element ELT from use in the table.
1286 HASH is its hash code, made using the HASH macro.
1287 It's an argument because often that is known in advance
1288 and we save much time not recomputing it. */
1290 static void
1291 remove_from_table (elt, hash)
1292 register struct table_elt *elt;
1293 unsigned hash;
1295 if (elt == 0)
1296 return;
1298 /* Mark this element as removed. See cse_insn. */
1299 elt->first_same_value = 0;
1301 /* Remove the table element from its equivalence class. */
1304 register struct table_elt *prev = elt->prev_same_value;
1305 register struct table_elt *next = elt->next_same_value;
1307 if (next) next->prev_same_value = prev;
1309 if (prev)
1310 prev->next_same_value = next;
1311 else
1313 register struct table_elt *newfirst = next;
1314 while (next)
1316 next->first_same_value = newfirst;
1317 next = next->next_same_value;
1322 /* Remove the table element from its hash bucket. */
1325 register struct table_elt *prev = elt->prev_same_hash;
1326 register struct table_elt *next = elt->next_same_hash;
1328 if (next) next->prev_same_hash = prev;
1330 if (prev)
1331 prev->next_same_hash = next;
1332 else if (table[hash] == elt)
1333 table[hash] = next;
1334 else
1336 /* This entry is not in the proper hash bucket. This can happen
1337 when two classes were merged by `merge_equiv_classes'. Search
1338 for the hash bucket that it heads. This happens only very
1339 rarely, so the cost is acceptable. */
1340 for (hash = 0; hash < NBUCKETS; hash++)
1341 if (table[hash] == elt)
1342 table[hash] = next;
1346 /* Remove the table element from its related-value circular chain. */
1348 if (elt->related_value != 0 && elt->related_value != elt)
1350 register struct table_elt *p = elt->related_value;
1351 while (p->related_value != elt)
1352 p = p->related_value;
1353 p->related_value = elt->related_value;
1354 if (p->related_value == p)
1355 p->related_value = 0;
1358 free_element (elt);
1361 /* Look up X in the hash table and return its table element,
1362 or 0 if X is not in the table.
1364 MODE is the machine-mode of X, or if X is an integer constant
1365 with VOIDmode then MODE is the mode with which X will be used.
1367 Here we are satisfied to find an expression whose tree structure
1368 looks like X. */
1370 static struct table_elt *
1371 lookup (x, hash, mode)
1372 rtx x;
1373 unsigned hash;
1374 enum machine_mode mode;
1376 register struct table_elt *p;
1378 for (p = table[hash]; p; p = p->next_same_hash)
1379 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1380 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1381 return p;
1383 return 0;
1386 /* Like `lookup' but don't care whether the table element uses invalid regs.
1387 Also ignore discrepancies in the machine mode of a register. */
1389 static struct table_elt *
1390 lookup_for_remove (x, hash, mode)
1391 rtx x;
1392 unsigned hash;
1393 enum machine_mode mode;
1395 register struct table_elt *p;
1397 if (GET_CODE (x) == REG)
1399 int regno = REGNO (x);
1400 /* Don't check the machine mode when comparing registers;
1401 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1402 for (p = table[hash]; p; p = p->next_same_hash)
1403 if (GET_CODE (p->exp) == REG
1404 && REGNO (p->exp) == regno)
1405 return p;
1407 else
1409 for (p = table[hash]; p; p = p->next_same_hash)
1410 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1411 return p;
1414 return 0;
1417 /* Look for an expression equivalent to X and with code CODE.
1418 If one is found, return that expression. */
1420 static rtx
1421 lookup_as_function (x, code)
1422 rtx x;
1423 enum rtx_code code;
1425 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1426 GET_MODE (x));
1427 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1428 long as we are narrowing. So if we looked in vain for a mode narrower
1429 than word_mode before, look for word_mode now. */
1430 if (p == 0 && code == CONST_INT
1431 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1433 x = copy_rtx (x);
1434 PUT_MODE (x, word_mode);
1435 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1438 if (p == 0)
1439 return 0;
1441 for (p = p->first_same_value; p; p = p->next_same_value)
1443 if (GET_CODE (p->exp) == code
1444 /* Make sure this is a valid entry in the table. */
1445 && exp_equiv_p (p->exp, p->exp, 1, 0))
1446 return p->exp;
1449 return 0;
1452 /* Insert X in the hash table, assuming HASH is its hash code
1453 and CLASSP is an element of the class it should go in
1454 (or 0 if a new class should be made).
1455 It is inserted at the proper position to keep the class in
1456 the order cheapest first.
1458 MODE is the machine-mode of X, or if X is an integer constant
1459 with VOIDmode then MODE is the mode with which X will be used.
1461 For elements of equal cheapness, the most recent one
1462 goes in front, except that the first element in the list
1463 remains first unless a cheaper element is added. The order of
1464 pseudo-registers does not matter, as canon_reg will be called to
1465 find the cheapest when a register is retrieved from the table.
1467 The in_memory field in the hash table element is set to 0.
1468 The caller must set it nonzero if appropriate.
1470 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1471 and if insert_regs returns a nonzero value
1472 you must then recompute its hash code before calling here.
1474 If necessary, update table showing constant values of quantities. */
1476 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1478 static struct table_elt *
1479 insert (x, classp, hash, mode)
1480 register rtx x;
1481 register struct table_elt *classp;
1482 unsigned hash;
1483 enum machine_mode mode;
1485 register struct table_elt *elt;
1487 /* If X is a register and we haven't made a quantity for it,
1488 something is wrong. */
1489 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1490 abort ();
1492 /* If X is a hard register, show it is being put in the table. */
1493 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1495 int regno = REGNO (x);
1496 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1497 int i;
1499 for (i = regno; i < endregno; i++)
1500 SET_HARD_REG_BIT (hard_regs_in_table, i);
1503 /* If X is a label, show we recorded it. */
1504 if (GET_CODE (x) == LABEL_REF
1505 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1506 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1507 recorded_label_ref = 1;
1509 /* Put an element for X into the right hash bucket. */
1511 elt = get_element ();
1512 elt->exp = x;
1513 elt->cost = COST (x);
1514 elt->next_same_value = 0;
1515 elt->prev_same_value = 0;
1516 elt->next_same_hash = table[hash];
1517 elt->prev_same_hash = 0;
1518 elt->related_value = 0;
1519 elt->in_memory = 0;
1520 elt->mode = mode;
1521 elt->is_const = (CONSTANT_P (x)
1522 /* GNU C++ takes advantage of this for `this'
1523 (and other const values). */
1524 || (RTX_UNCHANGING_P (x)
1525 && GET_CODE (x) == REG
1526 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1527 || FIXED_BASE_PLUS_P (x));
1529 if (table[hash])
1530 table[hash]->prev_same_hash = elt;
1531 table[hash] = elt;
1533 /* Put it into the proper value-class. */
1534 if (classp)
1536 classp = classp->first_same_value;
1537 if (CHEAPER (elt, classp))
1538 /* Insert at the head of the class */
1540 register struct table_elt *p;
1541 elt->next_same_value = classp;
1542 classp->prev_same_value = elt;
1543 elt->first_same_value = elt;
1545 for (p = classp; p; p = p->next_same_value)
1546 p->first_same_value = elt;
1548 else
1550 /* Insert not at head of the class. */
1551 /* Put it after the last element cheaper than X. */
1552 register struct table_elt *p, *next;
1553 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1554 p = next);
1555 /* Put it after P and before NEXT. */
1556 elt->next_same_value = next;
1557 if (next)
1558 next->prev_same_value = elt;
1559 elt->prev_same_value = p;
1560 p->next_same_value = elt;
1561 elt->first_same_value = classp;
1564 else
1565 elt->first_same_value = elt;
1567 /* If this is a constant being set equivalent to a register or a register
1568 being set equivalent to a constant, note the constant equivalence.
1570 If this is a constant, it cannot be equivalent to a different constant,
1571 and a constant is the only thing that can be cheaper than a register. So
1572 we know the register is the head of the class (before the constant was
1573 inserted).
1575 If this is a register that is not already known equivalent to a
1576 constant, we must check the entire class.
1578 If this is a register that is already known equivalent to an insn,
1579 update `qty_const_insn' to show that `this_insn' is the latest
1580 insn making that quantity equivalent to the constant. */
1582 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1583 && GET_CODE (x) != REG)
1585 qty_const[REG_QTY (REGNO (classp->exp))]
1586 = gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1587 qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
1590 else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
1591 && ! elt->is_const)
1593 register struct table_elt *p;
1595 for (p = classp; p != 0; p = p->next_same_value)
1597 if (p->is_const && GET_CODE (p->exp) != REG)
1599 qty_const[REG_QTY (REGNO (x))]
1600 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1601 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1602 break;
1607 else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1608 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1609 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1611 /* If this is a constant with symbolic value,
1612 and it has a term with an explicit integer value,
1613 link it up with related expressions. */
1614 if (GET_CODE (x) == CONST)
1616 rtx subexp = get_related_value (x);
1617 unsigned subhash;
1618 struct table_elt *subelt, *subelt_prev;
1620 if (subexp != 0)
1622 /* Get the integer-free subexpression in the hash table. */
1623 subhash = safe_hash (subexp, mode) % NBUCKETS;
1624 subelt = lookup (subexp, subhash, mode);
1625 if (subelt == 0)
1626 subelt = insert (subexp, NULL_PTR, subhash, mode);
1627 /* Initialize SUBELT's circular chain if it has none. */
1628 if (subelt->related_value == 0)
1629 subelt->related_value = subelt;
1630 /* Find the element in the circular chain that precedes SUBELT. */
1631 subelt_prev = subelt;
1632 while (subelt_prev->related_value != subelt)
1633 subelt_prev = subelt_prev->related_value;
1634 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1635 This way the element that follows SUBELT is the oldest one. */
1636 elt->related_value = subelt_prev->related_value;
1637 subelt_prev->related_value = elt;
1641 return elt;
1644 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1645 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1646 the two classes equivalent.
1648 CLASS1 will be the surviving class; CLASS2 should not be used after this
1649 call.
1651 Any invalid entries in CLASS2 will not be copied. */
1653 static void
1654 merge_equiv_classes (class1, class2)
1655 struct table_elt *class1, *class2;
1657 struct table_elt *elt, *next, *new;
1659 /* Ensure we start with the head of the classes. */
1660 class1 = class1->first_same_value;
1661 class2 = class2->first_same_value;
1663 /* If they were already equal, forget it. */
1664 if (class1 == class2)
1665 return;
1667 for (elt = class2; elt; elt = next)
1669 unsigned hash;
1670 rtx exp = elt->exp;
1671 enum machine_mode mode = elt->mode;
1673 next = elt->next_same_value;
1675 /* Remove old entry, make a new one in CLASS1's class.
1676 Don't do this for invalid entries as we cannot find their
1677 hash code (it also isn't necessary). */
1678 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1680 hash_arg_in_memory = 0;
1681 hash_arg_in_struct = 0;
1682 hash = HASH (exp, mode);
1684 if (GET_CODE (exp) == REG)
1685 delete_reg_equiv (REGNO (exp));
1687 remove_from_table (elt, hash);
1689 if (insert_regs (exp, class1, 0))
1691 rehash_using_reg (exp);
1692 hash = HASH (exp, mode);
1694 new = insert (exp, class1, hash, mode);
1695 new->in_memory = hash_arg_in_memory;
1696 new->in_struct = hash_arg_in_struct;
1702 /* Flush the entire hash table. */
1704 static void
1705 flush_hash_table ()
1707 int i;
1708 struct table_elt *p;
1710 for (i = 0; i < NBUCKETS; i++)
1711 for (p = table[i]; p; p = table[i])
1713 /* Note that invalidate can remove elements
1714 after P in the current hash chain. */
1715 if (GET_CODE (p->exp) == REG)
1716 invalidate (p->exp, p->mode);
1717 else
1718 remove_from_table (p, i);
1723 /* Remove from the hash table, or mark as invalid,
1724 all expressions whose values could be altered by storing in X.
1725 X is a register, a subreg, or a memory reference with nonvarying address
1726 (because, when a memory reference with a varying address is stored in,
1727 all memory references are removed by invalidate_memory
1728 so specific invalidation is superfluous).
1729 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1730 instead of just the amount indicated by the mode of X. This is only used
1731 for bitfield stores into memory.
1733 A nonvarying address may be just a register or just
1734 a symbol reference, or it may be either of those plus
1735 a numeric offset. */
1737 static void
1738 invalidate (x, full_mode)
1739 rtx x;
1740 enum machine_mode full_mode;
1742 register int i;
1743 register struct table_elt *p;
1745 /* If X is a register, dependencies on its contents
1746 are recorded through the qty number mechanism.
1747 Just change the qty number of the register,
1748 mark it as invalid for expressions that refer to it,
1749 and remove it itself. */
1751 if (GET_CODE (x) == REG)
1753 register int regno = REGNO (x);
1754 register unsigned hash = HASH (x, GET_MODE (x));
1756 /* Remove REGNO from any quantity list it might be on and indicate
1757 that its value might have changed. If it is a pseudo, remove its
1758 entry from the hash table.
1760 For a hard register, we do the first two actions above for any
1761 additional hard registers corresponding to X. Then, if any of these
1762 registers are in the table, we must remove any REG entries that
1763 overlap these registers. */
1765 delete_reg_equiv (regno);
1766 REG_TICK (regno)++;
1768 if (regno >= FIRST_PSEUDO_REGISTER)
1770 /* Because a register can be referenced in more than one mode,
1771 we might have to remove more than one table entry. */
1773 struct table_elt *elt;
1775 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1776 remove_from_table (elt, hash);
1778 else
1780 HOST_WIDE_INT in_table
1781 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1782 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1783 int tregno, tendregno;
1784 register struct table_elt *p, *next;
1786 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1788 for (i = regno + 1; i < endregno; i++)
1790 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1791 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1792 delete_reg_equiv (i);
1793 REG_TICK (i)++;
1796 if (in_table)
1797 for (hash = 0; hash < NBUCKETS; hash++)
1798 for (p = table[hash]; p; p = next)
1800 next = p->next_same_hash;
1802 if (GET_CODE (p->exp) != REG
1803 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1804 continue;
1806 tregno = REGNO (p->exp);
1807 tendregno
1808 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1809 if (tendregno > regno && tregno < endregno)
1810 remove_from_table (p, hash);
1814 return;
1817 if (GET_CODE (x) == SUBREG)
1819 if (GET_CODE (SUBREG_REG (x)) != REG)
1820 abort ();
1821 invalidate (SUBREG_REG (x), VOIDmode);
1822 return;
1825 /* If X is a parallel, invalidate all of its elements. */
1827 if (GET_CODE (x) == PARALLEL)
1829 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1830 invalidate (XVECEXP (x, 0, i), VOIDmode);
1831 return;
1834 /* If X is an expr_list, this is part of a disjoint return value;
1835 extract the location in question ignoring the offset. */
1837 if (GET_CODE (x) == EXPR_LIST)
1839 invalidate (XEXP (x, 0), VOIDmode);
1840 return;
1843 /* X is not a register; it must be a memory reference with
1844 a nonvarying address. Remove all hash table elements
1845 that refer to overlapping pieces of memory. */
1847 if (GET_CODE (x) != MEM)
1848 abort ();
1850 if (full_mode == VOIDmode)
1851 full_mode = GET_MODE (x);
1853 for (i = 0; i < NBUCKETS; i++)
1855 register struct table_elt *next;
1856 for (p = table[i]; p; p = next)
1858 next = p->next_same_hash;
1859 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1860 than checking all the aliases). */
1861 if (p->in_memory
1862 && (GET_CODE (p->exp) != MEM
1863 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1864 remove_from_table (p, i);
1869 /* Remove all expressions that refer to register REGNO,
1870 since they are already invalid, and we are about to
1871 mark that register valid again and don't want the old
1872 expressions to reappear as valid. */
1874 static void
1875 remove_invalid_refs (regno)
1876 int regno;
1878 register int i;
1879 register struct table_elt *p, *next;
1881 for (i = 0; i < NBUCKETS; i++)
1882 for (p = table[i]; p; p = next)
1884 next = p->next_same_hash;
1885 if (GET_CODE (p->exp) != REG
1886 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1887 remove_from_table (p, i);
1891 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1892 static void
1893 remove_invalid_subreg_refs (regno, word, mode)
1894 int regno;
1895 int word;
1896 enum machine_mode mode;
1898 register int i;
1899 register struct table_elt *p, *next;
1900 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1902 for (i = 0; i < NBUCKETS; i++)
1903 for (p = table[i]; p; p = next)
1905 rtx exp;
1906 next = p->next_same_hash;
1908 exp = p->exp;
1909 if (GET_CODE (p->exp) != REG
1910 && (GET_CODE (exp) != SUBREG
1911 || GET_CODE (SUBREG_REG (exp)) != REG
1912 || REGNO (SUBREG_REG (exp)) != regno
1913 || (((SUBREG_WORD (exp)
1914 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1915 >= word)
1916 && SUBREG_WORD (exp) <= end))
1917 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1918 remove_from_table (p, i);
1922 /* Recompute the hash codes of any valid entries in the hash table that
1923 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1925 This is called when we make a jump equivalence. */
1927 static void
1928 rehash_using_reg (x)
1929 rtx x;
1931 unsigned int i;
1932 struct table_elt *p, *next;
1933 unsigned hash;
1935 if (GET_CODE (x) == SUBREG)
1936 x = SUBREG_REG (x);
1938 /* If X is not a register or if the register is known not to be in any
1939 valid entries in the table, we have no work to do. */
1941 if (GET_CODE (x) != REG
1942 || REG_IN_TABLE (REGNO (x)) < 0
1943 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1944 return;
1946 /* Scan all hash chains looking for valid entries that mention X.
1947 If we find one and it is in the wrong hash chain, move it. We can skip
1948 objects that are registers, since they are handled specially. */
1950 for (i = 0; i < NBUCKETS; i++)
1951 for (p = table[i]; p; p = next)
1953 next = p->next_same_hash;
1954 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1955 && exp_equiv_p (p->exp, p->exp, 1, 0)
1956 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1958 if (p->next_same_hash)
1959 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1961 if (p->prev_same_hash)
1962 p->prev_same_hash->next_same_hash = p->next_same_hash;
1963 else
1964 table[i] = p->next_same_hash;
1966 p->next_same_hash = table[hash];
1967 p->prev_same_hash = 0;
1968 if (table[hash])
1969 table[hash]->prev_same_hash = p;
1970 table[hash] = p;
1975 /* Remove from the hash table any expression that is a call-clobbered
1976 register. Also update their TICK values. */
1978 static void
1979 invalidate_for_call ()
1981 int regno, endregno;
1982 int i;
1983 unsigned hash;
1984 struct table_elt *p, *next;
1985 int in_table = 0;
1987 /* Go through all the hard registers. For each that is clobbered in
1988 a CALL_INSN, remove the register from quantity chains and update
1989 reg_tick if defined. Also see if any of these registers is currently
1990 in the table. */
1992 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1993 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1995 delete_reg_equiv (regno);
1996 if (REG_TICK (regno) >= 0)
1997 REG_TICK (regno)++;
1999 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2002 /* In the case where we have no call-clobbered hard registers in the
2003 table, we are done. Otherwise, scan the table and remove any
2004 entry that overlaps a call-clobbered register. */
2006 if (in_table)
2007 for (hash = 0; hash < NBUCKETS; hash++)
2008 for (p = table[hash]; p; p = next)
2010 next = p->next_same_hash;
2012 if (GET_CODE (p->exp) != REG
2013 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2014 continue;
2016 regno = REGNO (p->exp);
2017 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2019 for (i = regno; i < endregno; i++)
2020 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2022 remove_from_table (p, hash);
2023 break;
2028 /* Given an expression X of type CONST,
2029 and ELT which is its table entry (or 0 if it
2030 is not in the hash table),
2031 return an alternate expression for X as a register plus integer.
2032 If none can be found, return 0. */
2034 static rtx
2035 use_related_value (x, elt)
2036 rtx x;
2037 struct table_elt *elt;
2039 register struct table_elt *relt = 0;
2040 register struct table_elt *p, *q;
2041 HOST_WIDE_INT offset;
2043 /* First, is there anything related known?
2044 If we have a table element, we can tell from that.
2045 Otherwise, must look it up. */
2047 if (elt != 0 && elt->related_value != 0)
2048 relt = elt;
2049 else if (elt == 0 && GET_CODE (x) == CONST)
2051 rtx subexp = get_related_value (x);
2052 if (subexp != 0)
2053 relt = lookup (subexp,
2054 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2055 GET_MODE (subexp));
2058 if (relt == 0)
2059 return 0;
2061 /* Search all related table entries for one that has an
2062 equivalent register. */
2064 p = relt;
2065 while (1)
2067 /* This loop is strange in that it is executed in two different cases.
2068 The first is when X is already in the table. Then it is searching
2069 the RELATED_VALUE list of X's class (RELT). The second case is when
2070 X is not in the table. Then RELT points to a class for the related
2071 value.
2073 Ensure that, whatever case we are in, that we ignore classes that have
2074 the same value as X. */
2076 if (rtx_equal_p (x, p->exp))
2077 q = 0;
2078 else
2079 for (q = p->first_same_value; q; q = q->next_same_value)
2080 if (GET_CODE (q->exp) == REG)
2081 break;
2083 if (q)
2084 break;
2086 p = p->related_value;
2088 /* We went all the way around, so there is nothing to be found.
2089 Alternatively, perhaps RELT was in the table for some other reason
2090 and it has no related values recorded. */
2091 if (p == relt || p == 0)
2092 break;
2095 if (q == 0)
2096 return 0;
2098 offset = (get_integer_term (x) - get_integer_term (p->exp));
2099 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2100 return plus_constant (q->exp, offset);
2103 /* Hash an rtx. We are careful to make sure the value is never negative.
2104 Equivalent registers hash identically.
2105 MODE is used in hashing for CONST_INTs only;
2106 otherwise the mode of X is used.
2108 Store 1 in do_not_record if any subexpression is volatile.
2110 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2111 which does not have the RTX_UNCHANGING_P bit set.
2112 In this case, also store 1 in hash_arg_in_struct
2113 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2115 Note that cse_insn knows that the hash code of a MEM expression
2116 is just (int) MEM plus the hash code of the address. */
2118 static unsigned
2119 canon_hash (x, mode)
2120 rtx x;
2121 enum machine_mode mode;
2123 register int i, j;
2124 register unsigned hash = 0;
2125 register enum rtx_code code;
2126 register const char *fmt;
2128 /* repeat is used to turn tail-recursion into iteration. */
2129 repeat:
2130 if (x == 0)
2131 return hash;
2133 code = GET_CODE (x);
2134 switch (code)
2136 case REG:
2138 register int regno = REGNO (x);
2140 /* On some machines, we can't record any non-fixed hard register,
2141 because extending its life will cause reload problems. We
2142 consider ap, fp, and sp to be fixed for this purpose.
2144 We also consider CCmode registers to be fixed for this purpose;
2145 failure to do so leads to failure to simplify 0<100 type of
2146 conditionals.
2148 On all machines, we can't record any global registers. */
2150 if (regno < FIRST_PSEUDO_REGISTER
2151 && (global_regs[regno]
2152 || (SMALL_REGISTER_CLASSES
2153 && ! fixed_regs[regno]
2154 && regno != FRAME_POINTER_REGNUM
2155 && regno != HARD_FRAME_POINTER_REGNUM
2156 && regno != ARG_POINTER_REGNUM
2157 && regno != STACK_POINTER_REGNUM
2158 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2160 do_not_record = 1;
2161 return 0;
2163 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2164 return hash;
2167 /* We handle SUBREG of a REG specially because the underlying
2168 reg changes its hash value with every value change; we don't
2169 want to have to forget unrelated subregs when one subreg changes. */
2170 case SUBREG:
2172 if (GET_CODE (SUBREG_REG (x)) == REG)
2174 hash += (((unsigned) SUBREG << 7)
2175 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2176 return hash;
2178 break;
2181 case CONST_INT:
2183 unsigned HOST_WIDE_INT tem = INTVAL (x);
2184 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2185 return hash;
2188 case CONST_DOUBLE:
2189 /* This is like the general case, except that it only counts
2190 the integers representing the constant. */
2191 hash += (unsigned) code + (unsigned) GET_MODE (x);
2192 if (GET_MODE (x) != VOIDmode)
2193 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2195 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2196 hash += tem;
2198 else
2199 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2200 + (unsigned) CONST_DOUBLE_HIGH (x));
2201 return hash;
2203 /* Assume there is only one rtx object for any given label. */
2204 case LABEL_REF:
2205 hash
2206 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2207 return hash;
2209 case SYMBOL_REF:
2210 hash
2211 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2212 return hash;
2214 case MEM:
2215 if (MEM_VOLATILE_P (x))
2217 do_not_record = 1;
2218 return 0;
2220 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2222 hash_arg_in_memory = 1;
2223 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2225 /* Now that we have already found this special case,
2226 might as well speed it up as much as possible. */
2227 hash += (unsigned) MEM;
2228 x = XEXP (x, 0);
2229 goto repeat;
2231 case PRE_DEC:
2232 case PRE_INC:
2233 case POST_DEC:
2234 case POST_INC:
2235 case PC:
2236 case CC0:
2237 case CALL:
2238 case UNSPEC_VOLATILE:
2239 do_not_record = 1;
2240 return 0;
2242 case ASM_OPERANDS:
2243 if (MEM_VOLATILE_P (x))
2245 do_not_record = 1;
2246 return 0;
2248 break;
2250 default:
2251 break;
2254 i = GET_RTX_LENGTH (code) - 1;
2255 hash += (unsigned) code + (unsigned) GET_MODE (x);
2256 fmt = GET_RTX_FORMAT (code);
2257 for (; i >= 0; i--)
2259 if (fmt[i] == 'e')
2261 rtx tem = XEXP (x, i);
2263 /* If we are about to do the last recursive call
2264 needed at this level, change it into iteration.
2265 This function is called enough to be worth it. */
2266 if (i == 0)
2268 x = tem;
2269 goto repeat;
2271 hash += canon_hash (tem, 0);
2273 else if (fmt[i] == 'E')
2274 for (j = 0; j < XVECLEN (x, i); j++)
2275 hash += canon_hash (XVECEXP (x, i, j), 0);
2276 else if (fmt[i] == 's')
2278 register unsigned char *p = (unsigned char *) XSTR (x, i);
2279 if (p)
2280 while (*p)
2281 hash += *p++;
2283 else if (fmt[i] == 'i')
2285 register unsigned tem = XINT (x, i);
2286 hash += tem;
2288 else if (fmt[i] == '0' || fmt[i] == 't')
2289 /* unused */;
2290 else
2291 abort ();
2293 return hash;
2296 /* Like canon_hash but with no side effects. */
2298 static unsigned
2299 safe_hash (x, mode)
2300 rtx x;
2301 enum machine_mode mode;
2303 int save_do_not_record = do_not_record;
2304 int save_hash_arg_in_memory = hash_arg_in_memory;
2305 int save_hash_arg_in_struct = hash_arg_in_struct;
2306 unsigned hash = canon_hash (x, mode);
2307 hash_arg_in_memory = save_hash_arg_in_memory;
2308 hash_arg_in_struct = save_hash_arg_in_struct;
2309 do_not_record = save_do_not_record;
2310 return hash;
2313 /* Return 1 iff X and Y would canonicalize into the same thing,
2314 without actually constructing the canonicalization of either one.
2315 If VALIDATE is nonzero,
2316 we assume X is an expression being processed from the rtl
2317 and Y was found in the hash table. We check register refs
2318 in Y for being marked as valid.
2320 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2321 that is known to be in the register. Ordinarily, we don't allow them
2322 to match, because letting them match would cause unpredictable results
2323 in all the places that search a hash table chain for an equivalent
2324 for a given value. A possible equivalent that has different structure
2325 has its hash code computed from different data. Whether the hash code
2326 is the same as that of the given value is pure luck. */
2328 static int
2329 exp_equiv_p (x, y, validate, equal_values)
2330 rtx x, y;
2331 int validate;
2332 int equal_values;
2334 register int i, j;
2335 register enum rtx_code code;
2336 register const char *fmt;
2338 /* Note: it is incorrect to assume an expression is equivalent to itself
2339 if VALIDATE is nonzero. */
2340 if (x == y && !validate)
2341 return 1;
2342 if (x == 0 || y == 0)
2343 return x == y;
2345 code = GET_CODE (x);
2346 if (code != GET_CODE (y))
2348 if (!equal_values)
2349 return 0;
2351 /* If X is a constant and Y is a register or vice versa, they may be
2352 equivalent. We only have to validate if Y is a register. */
2353 if (CONSTANT_P (x) && GET_CODE (y) == REG
2354 && REGNO_QTY_VALID_P (REGNO (y))
2355 && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2356 && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2357 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2358 return 1;
2360 if (CONSTANT_P (y) && code == REG
2361 && REGNO_QTY_VALID_P (REGNO (x))
2362 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2363 && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
2364 return 1;
2366 return 0;
2369 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2370 if (GET_MODE (x) != GET_MODE (y))
2371 return 0;
2373 switch (code)
2375 case PC:
2376 case CC0:
2377 return x == y;
2379 case CONST_INT:
2380 return INTVAL (x) == INTVAL (y);
2382 case LABEL_REF:
2383 return XEXP (x, 0) == XEXP (y, 0);
2385 case SYMBOL_REF:
2386 return XSTR (x, 0) == XSTR (y, 0);
2388 case REG:
2390 int regno = REGNO (y);
2391 int endregno
2392 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2393 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2394 int i;
2396 /* If the quantities are not the same, the expressions are not
2397 equivalent. If there are and we are not to validate, they
2398 are equivalent. Otherwise, ensure all regs are up-to-date. */
2400 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2401 return 0;
2403 if (! validate)
2404 return 1;
2406 for (i = regno; i < endregno; i++)
2407 if (REG_IN_TABLE (i) != REG_TICK (i))
2408 return 0;
2410 return 1;
2413 /* For commutative operations, check both orders. */
2414 case PLUS:
2415 case MULT:
2416 case AND:
2417 case IOR:
2418 case XOR:
2419 case NE:
2420 case EQ:
2421 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2422 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2423 validate, equal_values))
2424 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2425 validate, equal_values)
2426 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2427 validate, equal_values)));
2429 default:
2430 break;
2433 /* Compare the elements. If any pair of corresponding elements
2434 fail to match, return 0 for the whole things. */
2436 fmt = GET_RTX_FORMAT (code);
2437 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2439 switch (fmt[i])
2441 case 'e':
2442 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2443 return 0;
2444 break;
2446 case 'E':
2447 if (XVECLEN (x, i) != XVECLEN (y, i))
2448 return 0;
2449 for (j = 0; j < XVECLEN (x, i); j++)
2450 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2451 validate, equal_values))
2452 return 0;
2453 break;
2455 case 's':
2456 if (strcmp (XSTR (x, i), XSTR (y, i)))
2457 return 0;
2458 break;
2460 case 'i':
2461 if (XINT (x, i) != XINT (y, i))
2462 return 0;
2463 break;
2465 case 'w':
2466 if (XWINT (x, i) != XWINT (y, i))
2467 return 0;
2468 break;
2470 case '0':
2471 case 't':
2472 break;
2474 default:
2475 abort ();
2479 return 1;
2482 /* Return 1 iff any subexpression of X matches Y.
2483 Here we do not require that X or Y be valid (for registers referred to)
2484 for being in the hash table. */
2486 static int
2487 refers_to_p (x, y)
2488 rtx x, y;
2490 register int i;
2491 register enum rtx_code code;
2492 register const char *fmt;
2494 repeat:
2495 if (x == y)
2496 return 1;
2497 if (x == 0 || y == 0)
2498 return 0;
2500 code = GET_CODE (x);
2501 /* If X as a whole has the same code as Y, they may match.
2502 If so, return 1. */
2503 if (code == GET_CODE (y))
2505 if (exp_equiv_p (x, y, 0, 1))
2506 return 1;
2509 /* X does not match, so try its subexpressions. */
2511 fmt = GET_RTX_FORMAT (code);
2512 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2513 if (fmt[i] == 'e')
2515 if (i == 0)
2517 x = XEXP (x, 0);
2518 goto repeat;
2520 else
2521 if (refers_to_p (XEXP (x, i), y))
2522 return 1;
2524 else if (fmt[i] == 'E')
2526 int j;
2527 for (j = 0; j < XVECLEN (x, i); j++)
2528 if (refers_to_p (XVECEXP (x, i, j), y))
2529 return 1;
2532 return 0;
2535 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2536 set PBASE, PSTART, and PEND which correspond to the base of the address,
2537 the starting offset, and ending offset respectively.
2539 ADDR is known to be a nonvarying address. */
2541 /* ??? Despite what the comments say, this function is in fact frequently
2542 passed varying addresses. This does not appear to cause any problems. */
2544 static void
2545 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2546 rtx addr;
2547 int size;
2548 rtx *pbase;
2549 HOST_WIDE_INT *pstart, *pend;
2551 rtx base;
2552 HOST_WIDE_INT start, end;
2554 base = addr;
2555 start = 0;
2556 end = 0;
2558 if (flag_pic && GET_CODE (base) == PLUS
2559 && XEXP (base, 0) == pic_offset_table_rtx)
2560 base = XEXP (base, 1);
2562 /* Registers with nonvarying addresses usually have constant equivalents;
2563 but the frame pointer register is also possible. */
2564 if (GET_CODE (base) == REG
2565 && qty_const != 0
2566 && REGNO_QTY_VALID_P (REGNO (base))
2567 && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2568 && qty_const[REG_QTY (REGNO (base))] != 0)
2569 base = qty_const[REG_QTY (REGNO (base))];
2570 else if (GET_CODE (base) == PLUS
2571 && GET_CODE (XEXP (base, 1)) == CONST_INT
2572 && GET_CODE (XEXP (base, 0)) == REG
2573 && qty_const != 0
2574 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2575 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2576 == GET_MODE (XEXP (base, 0)))
2577 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
2579 start = INTVAL (XEXP (base, 1));
2580 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2582 /* This can happen as the result of virtual register instantiation,
2583 if the initial offset is too large to be a valid address. */
2584 else if (GET_CODE (base) == PLUS
2585 && GET_CODE (XEXP (base, 0)) == REG
2586 && GET_CODE (XEXP (base, 1)) == REG
2587 && qty_const != 0
2588 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2589 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2590 == GET_MODE (XEXP (base, 0)))
2591 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
2592 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2593 && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
2594 == GET_MODE (XEXP (base, 1)))
2595 && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
2597 rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2598 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2600 /* One of the two values must be a constant. */
2601 if (GET_CODE (base) != CONST_INT)
2603 if (GET_CODE (tem) != CONST_INT)
2604 abort ();
2605 start = INTVAL (tem);
2607 else
2609 start = INTVAL (base);
2610 base = tem;
2614 /* Handle everything that we can find inside an address that has been
2615 viewed as constant. */
2617 while (1)
2619 /* If no part of this switch does a "continue", the code outside
2620 will exit this loop. */
2622 switch (GET_CODE (base))
2624 case LO_SUM:
2625 /* By definition, operand1 of a LO_SUM is the associated constant
2626 address. Use the associated constant address as the base
2627 instead. */
2628 base = XEXP (base, 1);
2629 continue;
2631 case CONST:
2632 /* Strip off CONST. */
2633 base = XEXP (base, 0);
2634 continue;
2636 case PLUS:
2637 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2639 start += INTVAL (XEXP (base, 1));
2640 base = XEXP (base, 0);
2641 continue;
2643 break;
2645 case AND:
2646 /* Handle the case of an AND which is the negative of a power of
2647 two. This is used to represent unaligned memory operations. */
2648 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2649 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2651 set_nonvarying_address_components (XEXP (base, 0), size,
2652 pbase, pstart, pend);
2654 /* Assume the worst misalignment. START is affected, but not
2655 END, so compensate but adjusting SIZE. Don't lose any
2656 constant we already had. */
2658 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2659 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2660 end += *pend;
2661 base = *pbase;
2663 break;
2665 default:
2666 break;
2669 break;
2672 if (GET_CODE (base) == CONST_INT)
2674 start += INTVAL (base);
2675 base = const0_rtx;
2678 end = start + size;
2680 /* Set the return values. */
2681 *pbase = base;
2682 *pstart = start;
2683 *pend = end;
2686 /* Return 1 if X has a value that can vary even between two
2687 executions of the program. 0 means X can be compared reliably
2688 against certain constants or near-constants. */
2690 static int
2691 cse_rtx_varies_p (x)
2692 register rtx x;
2694 /* We need not check for X and the equivalence class being of the same
2695 mode because if X is equivalent to a constant in some mode, it
2696 doesn't vary in any mode. */
2698 if (GET_CODE (x) == REG
2699 && REGNO_QTY_VALID_P (REGNO (x))
2700 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2701 && qty_const[REG_QTY (REGNO (x))] != 0)
2702 return 0;
2704 if (GET_CODE (x) == PLUS
2705 && GET_CODE (XEXP (x, 1)) == CONST_INT
2706 && GET_CODE (XEXP (x, 0)) == REG
2707 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2708 && (GET_MODE (XEXP (x, 0))
2709 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2710 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
2711 return 0;
2713 /* This can happen as the result of virtual register instantiation, if
2714 the initial constant is too large to be a valid address. This gives
2715 us a three instruction sequence, load large offset into a register,
2716 load fp minus a constant into a register, then a MEM which is the
2717 sum of the two `constant' registers. */
2718 if (GET_CODE (x) == PLUS
2719 && GET_CODE (XEXP (x, 0)) == REG
2720 && GET_CODE (XEXP (x, 1)) == REG
2721 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2722 && (GET_MODE (XEXP (x, 0))
2723 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2724 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
2725 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2726 && (GET_MODE (XEXP (x, 1))
2727 == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2728 && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
2729 return 0;
2731 return rtx_varies_p (x);
2734 /* Canonicalize an expression:
2735 replace each register reference inside it
2736 with the "oldest" equivalent register.
2738 If INSN is non-zero and we are replacing a pseudo with a hard register
2739 or vice versa, validate_change is used to ensure that INSN remains valid
2740 after we make our substitution. The calls are made with IN_GROUP non-zero
2741 so apply_change_group must be called upon the outermost return from this
2742 function (unless INSN is zero). The result of apply_change_group can
2743 generally be discarded since the changes we are making are optional. */
2745 static rtx
2746 canon_reg (x, insn)
2747 rtx x;
2748 rtx insn;
2750 register int i;
2751 register enum rtx_code code;
2752 register const char *fmt;
2754 if (x == 0)
2755 return x;
2757 code = GET_CODE (x);
2758 switch (code)
2760 case PC:
2761 case CC0:
2762 case CONST:
2763 case CONST_INT:
2764 case CONST_DOUBLE:
2765 case SYMBOL_REF:
2766 case LABEL_REF:
2767 case ADDR_VEC:
2768 case ADDR_DIFF_VEC:
2769 return x;
2771 case REG:
2773 register int first;
2775 /* Never replace a hard reg, because hard regs can appear
2776 in more than one machine mode, and we must preserve the mode
2777 of each occurrence. Also, some hard regs appear in
2778 MEMs that are shared and mustn't be altered. Don't try to
2779 replace any reg that maps to a reg of class NO_REGS. */
2780 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2781 || ! REGNO_QTY_VALID_P (REGNO (x)))
2782 return x;
2784 first = qty_first_reg[REG_QTY (REGNO (x))];
2785 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2786 : REGNO_REG_CLASS (first) == NO_REGS ? x
2787 : gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
2790 default:
2791 break;
2794 fmt = GET_RTX_FORMAT (code);
2795 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2797 register int j;
2799 if (fmt[i] == 'e')
2801 rtx new = canon_reg (XEXP (x, i), insn);
2802 int insn_code;
2804 /* If replacing pseudo with hard reg or vice versa, ensure the
2805 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2806 if (insn != 0 && new != 0
2807 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2808 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2809 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2810 || (insn_code = recog_memoized (insn)) < 0
2811 || insn_data[insn_code].n_dups > 0))
2812 validate_change (insn, &XEXP (x, i), new, 1);
2813 else
2814 XEXP (x, i) = new;
2816 else if (fmt[i] == 'E')
2817 for (j = 0; j < XVECLEN (x, i); j++)
2818 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2821 return x;
2824 /* LOC is a location within INSN that is an operand address (the contents of
2825 a MEM). Find the best equivalent address to use that is valid for this
2826 insn.
2828 On most CISC machines, complicated address modes are costly, and rtx_cost
2829 is a good approximation for that cost. However, most RISC machines have
2830 only a few (usually only one) memory reference formats. If an address is
2831 valid at all, it is often just as cheap as any other address. Hence, for
2832 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2833 costs of various addresses. For two addresses of equal cost, choose the one
2834 with the highest `rtx_cost' value as that has the potential of eliminating
2835 the most insns. For equal costs, we choose the first in the equivalence
2836 class. Note that we ignore the fact that pseudo registers are cheaper
2837 than hard registers here because we would also prefer the pseudo registers.
2840 static void
2841 find_best_addr (insn, loc)
2842 rtx insn;
2843 rtx *loc;
2845 struct table_elt *elt;
2846 rtx addr = *loc;
2847 #ifdef ADDRESS_COST
2848 struct table_elt *p;
2849 int found_better = 1;
2850 #endif
2851 int save_do_not_record = do_not_record;
2852 int save_hash_arg_in_memory = hash_arg_in_memory;
2853 int save_hash_arg_in_struct = hash_arg_in_struct;
2854 int addr_volatile;
2855 int regno;
2856 unsigned hash;
2858 /* Do not try to replace constant addresses or addresses of local and
2859 argument slots. These MEM expressions are made only once and inserted
2860 in many instructions, as well as being used to control symbol table
2861 output. It is not safe to clobber them.
2863 There are some uncommon cases where the address is already in a register
2864 for some reason, but we cannot take advantage of that because we have
2865 no easy way to unshare the MEM. In addition, looking up all stack
2866 addresses is costly. */
2867 if ((GET_CODE (addr) == PLUS
2868 && GET_CODE (XEXP (addr, 0)) == REG
2869 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2870 && (regno = REGNO (XEXP (addr, 0)),
2871 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2872 || regno == ARG_POINTER_REGNUM))
2873 || (GET_CODE (addr) == REG
2874 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2875 || regno == HARD_FRAME_POINTER_REGNUM
2876 || regno == ARG_POINTER_REGNUM))
2877 || GET_CODE (addr) == ADDRESSOF
2878 || CONSTANT_ADDRESS_P (addr))
2879 return;
2881 /* If this address is not simply a register, try to fold it. This will
2882 sometimes simplify the expression. Many simplifications
2883 will not be valid, but some, usually applying the associative rule, will
2884 be valid and produce better code. */
2885 if (GET_CODE (addr) != REG)
2887 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2889 if (1
2890 #ifdef ADDRESS_COST
2891 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2892 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2893 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2894 #else
2895 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2896 #endif
2897 && validate_change (insn, loc, folded, 0))
2898 addr = folded;
2901 /* If this address is not in the hash table, we can't look for equivalences
2902 of the whole address. Also, ignore if volatile. */
2904 do_not_record = 0;
2905 hash = HASH (addr, Pmode);
2906 addr_volatile = do_not_record;
2907 do_not_record = save_do_not_record;
2908 hash_arg_in_memory = save_hash_arg_in_memory;
2909 hash_arg_in_struct = save_hash_arg_in_struct;
2911 if (addr_volatile)
2912 return;
2914 elt = lookup (addr, hash, Pmode);
2916 #ifndef ADDRESS_COST
2917 if (elt)
2919 int our_cost = elt->cost;
2921 /* Find the lowest cost below ours that works. */
2922 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2923 if (elt->cost < our_cost
2924 && (GET_CODE (elt->exp) == REG
2925 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2926 && validate_change (insn, loc,
2927 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2928 return;
2930 #else
2932 if (elt)
2934 /* We need to find the best (under the criteria documented above) entry
2935 in the class that is valid. We use the `flag' field to indicate
2936 choices that were invalid and iterate until we can't find a better
2937 one that hasn't already been tried. */
2939 for (p = elt->first_same_value; p; p = p->next_same_value)
2940 p->flag = 0;
2942 while (found_better)
2944 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2945 int best_rtx_cost = (elt->cost + 1) >> 1;
2946 struct table_elt *best_elt = elt;
2948 found_better = 0;
2949 for (p = elt->first_same_value; p; p = p->next_same_value)
2950 if (! p->flag)
2952 if ((GET_CODE (p->exp) == REG
2953 || exp_equiv_p (p->exp, p->exp, 1, 0))
2954 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2955 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2956 && (p->cost + 1) >> 1 > best_rtx_cost)))
2958 found_better = 1;
2959 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2960 best_rtx_cost = (p->cost + 1) >> 1;
2961 best_elt = p;
2965 if (found_better)
2967 if (validate_change (insn, loc,
2968 canon_reg (copy_rtx (best_elt->exp),
2969 NULL_RTX), 0))
2970 return;
2971 else
2972 best_elt->flag = 1;
2977 /* If the address is a binary operation with the first operand a register
2978 and the second a constant, do the same as above, but looking for
2979 equivalences of the register. Then try to simplify before checking for
2980 the best address to use. This catches a few cases: First is when we
2981 have REG+const and the register is another REG+const. We can often merge
2982 the constants and eliminate one insn and one register. It may also be
2983 that a machine has a cheap REG+REG+const. Finally, this improves the
2984 code on the Alpha for unaligned byte stores. */
2986 if (flag_expensive_optimizations
2987 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2988 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2989 && GET_CODE (XEXP (*loc, 0)) == REG
2990 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2992 rtx c = XEXP (*loc, 1);
2994 do_not_record = 0;
2995 hash = HASH (XEXP (*loc, 0), Pmode);
2996 do_not_record = save_do_not_record;
2997 hash_arg_in_memory = save_hash_arg_in_memory;
2998 hash_arg_in_struct = save_hash_arg_in_struct;
3000 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3001 if (elt == 0)
3002 return;
3004 /* We need to find the best (under the criteria documented above) entry
3005 in the class that is valid. We use the `flag' field to indicate
3006 choices that were invalid and iterate until we can't find a better
3007 one that hasn't already been tried. */
3009 for (p = elt->first_same_value; p; p = p->next_same_value)
3010 p->flag = 0;
3012 while (found_better)
3014 int best_addr_cost = CSE_ADDRESS_COST (*loc);
3015 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3016 struct table_elt *best_elt = elt;
3017 rtx best_rtx = *loc;
3018 int count;
3020 /* This is at worst case an O(n^2) algorithm, so limit our search
3021 to the first 32 elements on the list. This avoids trouble
3022 compiling code with very long basic blocks that can easily
3023 call cse_gen_binary so many times that we run out of memory. */
3025 found_better = 0;
3026 for (p = elt->first_same_value, count = 0;
3027 p && count < 32;
3028 p = p->next_same_value, count++)
3029 if (! p->flag
3030 && (GET_CODE (p->exp) == REG
3031 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3033 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
3035 if ((CSE_ADDRESS_COST (new) < best_addr_cost
3036 || (CSE_ADDRESS_COST (new) == best_addr_cost
3037 && (COST (new) + 1) >> 1 > best_rtx_cost)))
3039 found_better = 1;
3040 best_addr_cost = CSE_ADDRESS_COST (new);
3041 best_rtx_cost = (COST (new) + 1) >> 1;
3042 best_elt = p;
3043 best_rtx = new;
3047 if (found_better)
3049 if (validate_change (insn, loc,
3050 canon_reg (copy_rtx (best_rtx),
3051 NULL_RTX), 0))
3052 return;
3053 else
3054 best_elt->flag = 1;
3058 #endif
3061 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3062 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3063 what values are being compared.
3065 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3066 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3067 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3068 compared to produce cc0.
3070 The return value is the comparison operator and is either the code of
3071 A or the code corresponding to the inverse of the comparison. */
3073 static enum rtx_code
3074 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3075 enum rtx_code code;
3076 rtx *parg1, *parg2;
3077 enum machine_mode *pmode1, *pmode2;
3079 rtx arg1, arg2;
3081 arg1 = *parg1, arg2 = *parg2;
3083 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3085 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3087 /* Set non-zero when we find something of interest. */
3088 rtx x = 0;
3089 int reverse_code = 0;
3090 struct table_elt *p = 0;
3092 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3093 On machines with CC0, this is the only case that can occur, since
3094 fold_rtx will return the COMPARE or item being compared with zero
3095 when given CC0. */
3097 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3098 x = arg1;
3100 /* If ARG1 is a comparison operator and CODE is testing for
3101 STORE_FLAG_VALUE, get the inner arguments. */
3103 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3105 if (code == NE
3106 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3107 && code == LT && STORE_FLAG_VALUE == -1)
3108 #ifdef FLOAT_STORE_FLAG_VALUE
3109 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3110 && FLOAT_STORE_FLAG_VALUE < 0)
3111 #endif
3113 x = arg1;
3114 else if (code == EQ
3115 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3116 && code == GE && STORE_FLAG_VALUE == -1)
3117 #ifdef FLOAT_STORE_FLAG_VALUE
3118 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3119 && FLOAT_STORE_FLAG_VALUE < 0)
3120 #endif
3122 x = arg1, reverse_code = 1;
3125 /* ??? We could also check for
3127 (ne (and (eq (...) (const_int 1))) (const_int 0))
3129 and related forms, but let's wait until we see them occurring. */
3131 if (x == 0)
3132 /* Look up ARG1 in the hash table and see if it has an equivalence
3133 that lets us see what is being compared. */
3134 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3135 GET_MODE (arg1));
3136 if (p) p = p->first_same_value;
3138 for (; p; p = p->next_same_value)
3140 enum machine_mode inner_mode = GET_MODE (p->exp);
3142 /* If the entry isn't valid, skip it. */
3143 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3144 continue;
3146 if (GET_CODE (p->exp) == COMPARE
3147 /* Another possibility is that this machine has a compare insn
3148 that includes the comparison code. In that case, ARG1 would
3149 be equivalent to a comparison operation that would set ARG1 to
3150 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3151 ORIG_CODE is the actual comparison being done; if it is an EQ,
3152 we must reverse ORIG_CODE. On machine with a negative value
3153 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3154 || ((code == NE
3155 || (code == LT
3156 && GET_MODE_CLASS (inner_mode) == MODE_INT
3157 && (GET_MODE_BITSIZE (inner_mode)
3158 <= HOST_BITS_PER_WIDE_INT)
3159 && (STORE_FLAG_VALUE
3160 & ((HOST_WIDE_INT) 1
3161 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3162 #ifdef FLOAT_STORE_FLAG_VALUE
3163 || (code == LT
3164 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3165 && FLOAT_STORE_FLAG_VALUE < 0)
3166 #endif
3168 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3170 x = p->exp;
3171 break;
3173 else if ((code == EQ
3174 || (code == GE
3175 && GET_MODE_CLASS (inner_mode) == MODE_INT
3176 && (GET_MODE_BITSIZE (inner_mode)
3177 <= HOST_BITS_PER_WIDE_INT)
3178 && (STORE_FLAG_VALUE
3179 & ((HOST_WIDE_INT) 1
3180 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3181 #ifdef FLOAT_STORE_FLAG_VALUE
3182 || (code == GE
3183 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3184 && FLOAT_STORE_FLAG_VALUE < 0)
3185 #endif
3187 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3189 reverse_code = 1;
3190 x = p->exp;
3191 break;
3194 /* If this is fp + constant, the equivalent is a better operand since
3195 it may let us predict the value of the comparison. */
3196 else if (NONZERO_BASE_PLUS_P (p->exp))
3198 arg1 = p->exp;
3199 continue;
3203 /* If we didn't find a useful equivalence for ARG1, we are done.
3204 Otherwise, set up for the next iteration. */
3205 if (x == 0)
3206 break;
3208 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3209 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3210 code = GET_CODE (x);
3212 if (reverse_code)
3213 code = reverse_condition (code);
3216 /* Return our results. Return the modes from before fold_rtx
3217 because fold_rtx might produce const_int, and then it's too late. */
3218 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3219 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3221 return code;
3224 /* Try to simplify a unary operation CODE whose output mode is to be
3225 MODE with input operand OP whose mode was originally OP_MODE.
3226 Return zero if no simplification can be made. */
3229 simplify_unary_operation (code, mode, op, op_mode)
3230 enum rtx_code code;
3231 enum machine_mode mode;
3232 rtx op;
3233 enum machine_mode op_mode;
3235 register int width = GET_MODE_BITSIZE (mode);
3237 /* The order of these tests is critical so that, for example, we don't
3238 check the wrong mode (input vs. output) for a conversion operation,
3239 such as FIX. At some point, this should be simplified. */
3241 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3243 if (code == FLOAT && GET_MODE (op) == VOIDmode
3244 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3246 HOST_WIDE_INT hv, lv;
3247 REAL_VALUE_TYPE d;
3249 if (GET_CODE (op) == CONST_INT)
3250 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3251 else
3252 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3254 #ifdef REAL_ARITHMETIC
3255 REAL_VALUE_FROM_INT (d, lv, hv, mode);
3256 #else
3257 if (hv < 0)
3259 d = (double) (~ hv);
3260 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3261 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3262 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3263 d = (- d - 1.0);
3265 else
3267 d = (double) hv;
3268 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3269 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3270 d += (double) (unsigned HOST_WIDE_INT) lv;
3272 #endif /* REAL_ARITHMETIC */
3273 d = real_value_truncate (mode, d);
3274 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3276 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3277 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3279 HOST_WIDE_INT hv, lv;
3280 REAL_VALUE_TYPE d;
3282 if (GET_CODE (op) == CONST_INT)
3283 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3284 else
3285 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3287 if (op_mode == VOIDmode)
3289 /* We don't know how to interpret negative-looking numbers in
3290 this case, so don't try to fold those. */
3291 if (hv < 0)
3292 return 0;
3294 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3296 else
3297 hv = 0, lv &= GET_MODE_MASK (op_mode);
3299 #ifdef REAL_ARITHMETIC
3300 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3301 #else
3303 d = (double) (unsigned HOST_WIDE_INT) hv;
3304 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3305 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3306 d += (double) (unsigned HOST_WIDE_INT) lv;
3307 #endif /* REAL_ARITHMETIC */
3308 d = real_value_truncate (mode, d);
3309 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3311 #endif
3313 if (GET_CODE (op) == CONST_INT
3314 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3316 register HOST_WIDE_INT arg0 = INTVAL (op);
3317 register HOST_WIDE_INT val;
3319 switch (code)
3321 case NOT:
3322 val = ~ arg0;
3323 break;
3325 case NEG:
3326 val = - arg0;
3327 break;
3329 case ABS:
3330 val = (arg0 >= 0 ? arg0 : - arg0);
3331 break;
3333 case FFS:
3334 /* Don't use ffs here. Instead, get low order bit and then its
3335 number. If arg0 is zero, this will return 0, as desired. */
3336 arg0 &= GET_MODE_MASK (mode);
3337 val = exact_log2 (arg0 & (- arg0)) + 1;
3338 break;
3340 case TRUNCATE:
3341 val = arg0;
3342 break;
3344 case ZERO_EXTEND:
3345 if (op_mode == VOIDmode)
3346 op_mode = mode;
3347 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3349 /* If we were really extending the mode,
3350 we would have to distinguish between zero-extension
3351 and sign-extension. */
3352 if (width != GET_MODE_BITSIZE (op_mode))
3353 abort ();
3354 val = arg0;
3356 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3357 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3358 else
3359 return 0;
3360 break;
3362 case SIGN_EXTEND:
3363 if (op_mode == VOIDmode)
3364 op_mode = mode;
3365 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3367 /* If we were really extending the mode,
3368 we would have to distinguish between zero-extension
3369 and sign-extension. */
3370 if (width != GET_MODE_BITSIZE (op_mode))
3371 abort ();
3372 val = arg0;
3374 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3377 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3378 if (val
3379 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3380 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3382 else
3383 return 0;
3384 break;
3386 case SQRT:
3387 return 0;
3389 default:
3390 abort ();
3393 val = trunc_int_for_mode (val, mode);
3395 return GEN_INT (val);
3398 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3399 for a DImode operation on a CONST_INT. */
3400 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3401 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3403 HOST_WIDE_INT l1, h1, lv, hv;
3405 if (GET_CODE (op) == CONST_DOUBLE)
3406 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3407 else
3408 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3410 switch (code)
3412 case NOT:
3413 lv = ~ l1;
3414 hv = ~ h1;
3415 break;
3417 case NEG:
3418 neg_double (l1, h1, &lv, &hv);
3419 break;
3421 case ABS:
3422 if (h1 < 0)
3423 neg_double (l1, h1, &lv, &hv);
3424 else
3425 lv = l1, hv = h1;
3426 break;
3428 case FFS:
3429 hv = 0;
3430 if (l1 == 0)
3431 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3432 else
3433 lv = exact_log2 (l1 & (-l1)) + 1;
3434 break;
3436 case TRUNCATE:
3437 /* This is just a change-of-mode, so do nothing. */
3438 lv = l1, hv = h1;
3439 break;
3441 case ZERO_EXTEND:
3442 if (op_mode == VOIDmode
3443 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3444 return 0;
3446 hv = 0;
3447 lv = l1 & GET_MODE_MASK (op_mode);
3448 break;
3450 case SIGN_EXTEND:
3451 if (op_mode == VOIDmode
3452 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3453 return 0;
3454 else
3456 lv = l1 & GET_MODE_MASK (op_mode);
3457 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3458 && (lv & ((HOST_WIDE_INT) 1
3459 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3460 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3462 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3464 break;
3466 case SQRT:
3467 return 0;
3469 default:
3470 return 0;
3473 return immed_double_const (lv, hv, mode);
3476 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3477 else if (GET_CODE (op) == CONST_DOUBLE
3478 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3480 REAL_VALUE_TYPE d;
3481 jmp_buf handler;
3482 rtx x;
3484 if (setjmp (handler))
3485 /* There used to be a warning here, but that is inadvisable.
3486 People may want to cause traps, and the natural way
3487 to do it should not get a warning. */
3488 return 0;
3490 set_float_handler (handler);
3492 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3494 switch (code)
3496 case NEG:
3497 d = REAL_VALUE_NEGATE (d);
3498 break;
3500 case ABS:
3501 if (REAL_VALUE_NEGATIVE (d))
3502 d = REAL_VALUE_NEGATE (d);
3503 break;
3505 case FLOAT_TRUNCATE:
3506 d = real_value_truncate (mode, d);
3507 break;
3509 case FLOAT_EXTEND:
3510 /* All this does is change the mode. */
3511 break;
3513 case FIX:
3514 d = REAL_VALUE_RNDZINT (d);
3515 break;
3517 case UNSIGNED_FIX:
3518 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3519 break;
3521 case SQRT:
3522 return 0;
3524 default:
3525 abort ();
3528 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3529 set_float_handler (NULL_PTR);
3530 return x;
3533 else if (GET_CODE (op) == CONST_DOUBLE
3534 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3535 && GET_MODE_CLASS (mode) == MODE_INT
3536 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3538 REAL_VALUE_TYPE d;
3539 jmp_buf handler;
3540 HOST_WIDE_INT val;
3542 if (setjmp (handler))
3543 return 0;
3545 set_float_handler (handler);
3547 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3549 switch (code)
3551 case FIX:
3552 val = REAL_VALUE_FIX (d);
3553 break;
3555 case UNSIGNED_FIX:
3556 val = REAL_VALUE_UNSIGNED_FIX (d);
3557 break;
3559 default:
3560 abort ();
3563 set_float_handler (NULL_PTR);
3565 val = trunc_int_for_mode (val, mode);
3567 return GEN_INT (val);
3569 #endif
3570 /* This was formerly used only for non-IEEE float.
3571 eggert@twinsun.com says it is safe for IEEE also. */
3572 else
3574 /* There are some simplifications we can do even if the operands
3575 aren't constant. */
3576 switch (code)
3578 case NEG:
3579 case NOT:
3580 /* (not (not X)) == X, similarly for NEG. */
3581 if (GET_CODE (op) == code)
3582 return XEXP (op, 0);
3583 break;
3585 case SIGN_EXTEND:
3586 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3587 becomes just the MINUS if its mode is MODE. This allows
3588 folding switch statements on machines using casesi (such as
3589 the Vax). */
3590 if (GET_CODE (op) == TRUNCATE
3591 && GET_MODE (XEXP (op, 0)) == mode
3592 && GET_CODE (XEXP (op, 0)) == MINUS
3593 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3594 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3595 return XEXP (op, 0);
3597 #ifdef POINTERS_EXTEND_UNSIGNED
3598 if (! POINTERS_EXTEND_UNSIGNED
3599 && mode == Pmode && GET_MODE (op) == ptr_mode
3600 && CONSTANT_P (op))
3601 return convert_memory_address (Pmode, op);
3602 #endif
3603 break;
3605 #ifdef POINTERS_EXTEND_UNSIGNED
3606 case ZERO_EXTEND:
3607 if (POINTERS_EXTEND_UNSIGNED
3608 && mode == Pmode && GET_MODE (op) == ptr_mode
3609 && CONSTANT_P (op))
3610 return convert_memory_address (Pmode, op);
3611 break;
3612 #endif
3614 default:
3615 break;
3618 return 0;
3622 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3623 and OP1. Return 0 if no simplification is possible.
3625 Don't use this for relational operations such as EQ or LT.
3626 Use simplify_relational_operation instead. */
3629 simplify_binary_operation (code, mode, op0, op1)
3630 enum rtx_code code;
3631 enum machine_mode mode;
3632 rtx op0, op1;
3634 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3635 HOST_WIDE_INT val;
3636 int width = GET_MODE_BITSIZE (mode);
3637 rtx tem;
3639 /* Relational operations don't work here. We must know the mode
3640 of the operands in order to do the comparison correctly.
3641 Assuming a full word can give incorrect results.
3642 Consider comparing 128 with -128 in QImode. */
3644 if (GET_RTX_CLASS (code) == '<')
3645 abort ();
3647 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3648 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3649 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3650 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3652 REAL_VALUE_TYPE f0, f1, value;
3653 jmp_buf handler;
3655 if (setjmp (handler))
3656 return 0;
3658 set_float_handler (handler);
3660 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3661 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3662 f0 = real_value_truncate (mode, f0);
3663 f1 = real_value_truncate (mode, f1);
3665 #ifdef REAL_ARITHMETIC
3666 #ifndef REAL_INFINITY
3667 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3668 return 0;
3669 #endif
3670 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3671 #else
3672 switch (code)
3674 case PLUS:
3675 value = f0 + f1;
3676 break;
3677 case MINUS:
3678 value = f0 - f1;
3679 break;
3680 case MULT:
3681 value = f0 * f1;
3682 break;
3683 case DIV:
3684 #ifndef REAL_INFINITY
3685 if (f1 == 0)
3686 return 0;
3687 #endif
3688 value = f0 / f1;
3689 break;
3690 case SMIN:
3691 value = MIN (f0, f1);
3692 break;
3693 case SMAX:
3694 value = MAX (f0, f1);
3695 break;
3696 default:
3697 abort ();
3699 #endif
3701 value = real_value_truncate (mode, value);
3702 set_float_handler (NULL_PTR);
3703 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3705 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3707 /* We can fold some multi-word operations. */
3708 if (GET_MODE_CLASS (mode) == MODE_INT
3709 && width == HOST_BITS_PER_WIDE_INT * 2
3710 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3711 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3713 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3715 if (GET_CODE (op0) == CONST_DOUBLE)
3716 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3717 else
3718 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3720 if (GET_CODE (op1) == CONST_DOUBLE)
3721 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3722 else
3723 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3725 switch (code)
3727 case MINUS:
3728 /* A - B == A + (-B). */
3729 neg_double (l2, h2, &lv, &hv);
3730 l2 = lv, h2 = hv;
3732 /* .. fall through ... */
3734 case PLUS:
3735 add_double (l1, h1, l2, h2, &lv, &hv);
3736 break;
3738 case MULT:
3739 mul_double (l1, h1, l2, h2, &lv, &hv);
3740 break;
3742 case DIV: case MOD: case UDIV: case UMOD:
3743 /* We'd need to include tree.h to do this and it doesn't seem worth
3744 it. */
3745 return 0;
3747 case AND:
3748 lv = l1 & l2, hv = h1 & h2;
3749 break;
3751 case IOR:
3752 lv = l1 | l2, hv = h1 | h2;
3753 break;
3755 case XOR:
3756 lv = l1 ^ l2, hv = h1 ^ h2;
3757 break;
3759 case SMIN:
3760 if (h1 < h2
3761 || (h1 == h2
3762 && ((unsigned HOST_WIDE_INT) l1
3763 < (unsigned HOST_WIDE_INT) l2)))
3764 lv = l1, hv = h1;
3765 else
3766 lv = l2, hv = h2;
3767 break;
3769 case SMAX:
3770 if (h1 > h2
3771 || (h1 == h2
3772 && ((unsigned HOST_WIDE_INT) l1
3773 > (unsigned HOST_WIDE_INT) l2)))
3774 lv = l1, hv = h1;
3775 else
3776 lv = l2, hv = h2;
3777 break;
3779 case UMIN:
3780 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3781 || (h1 == h2
3782 && ((unsigned HOST_WIDE_INT) l1
3783 < (unsigned HOST_WIDE_INT) l2)))
3784 lv = l1, hv = h1;
3785 else
3786 lv = l2, hv = h2;
3787 break;
3789 case UMAX:
3790 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3791 || (h1 == h2
3792 && ((unsigned HOST_WIDE_INT) l1
3793 > (unsigned HOST_WIDE_INT) l2)))
3794 lv = l1, hv = h1;
3795 else
3796 lv = l2, hv = h2;
3797 break;
3799 case LSHIFTRT: case ASHIFTRT:
3800 case ASHIFT:
3801 case ROTATE: case ROTATERT:
3802 #ifdef SHIFT_COUNT_TRUNCATED
3803 if (SHIFT_COUNT_TRUNCATED)
3804 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3805 #endif
3807 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3808 return 0;
3810 if (code == LSHIFTRT || code == ASHIFTRT)
3811 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3812 code == ASHIFTRT);
3813 else if (code == ASHIFT)
3814 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3815 else if (code == ROTATE)
3816 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3817 else /* code == ROTATERT */
3818 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3819 break;
3821 default:
3822 return 0;
3825 return immed_double_const (lv, hv, mode);
3828 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3829 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3831 /* Even if we can't compute a constant result,
3832 there are some cases worth simplifying. */
3834 switch (code)
3836 case PLUS:
3837 /* In IEEE floating point, x+0 is not the same as x. Similarly
3838 for the other optimizations below. */
3839 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3840 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3841 break;
3843 if (op1 == CONST0_RTX (mode))
3844 return op0;
3846 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3847 if (GET_CODE (op0) == NEG)
3848 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3849 else if (GET_CODE (op1) == NEG)
3850 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3852 /* Handle both-operands-constant cases. We can only add
3853 CONST_INTs to constants since the sum of relocatable symbols
3854 can't be handled by most assemblers. Don't add CONST_INT
3855 to CONST_INT since overflow won't be computed properly if wider
3856 than HOST_BITS_PER_WIDE_INT. */
3858 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3859 && GET_CODE (op1) == CONST_INT)
3860 return plus_constant (op0, INTVAL (op1));
3861 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3862 && GET_CODE (op0) == CONST_INT)
3863 return plus_constant (op1, INTVAL (op0));
3865 /* See if this is something like X * C - X or vice versa or
3866 if the multiplication is written as a shift. If so, we can
3867 distribute and make a new multiply, shift, or maybe just
3868 have X (if C is 2 in the example above). But don't make
3869 real multiply if we didn't have one before. */
3871 if (! FLOAT_MODE_P (mode))
3873 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3874 rtx lhs = op0, rhs = op1;
3875 int had_mult = 0;
3877 if (GET_CODE (lhs) == NEG)
3878 coeff0 = -1, lhs = XEXP (lhs, 0);
3879 else if (GET_CODE (lhs) == MULT
3880 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3882 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3883 had_mult = 1;
3885 else if (GET_CODE (lhs) == ASHIFT
3886 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3887 && INTVAL (XEXP (lhs, 1)) >= 0
3888 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3890 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3891 lhs = XEXP (lhs, 0);
3894 if (GET_CODE (rhs) == NEG)
3895 coeff1 = -1, rhs = XEXP (rhs, 0);
3896 else if (GET_CODE (rhs) == MULT
3897 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3899 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3900 had_mult = 1;
3902 else if (GET_CODE (rhs) == ASHIFT
3903 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3904 && INTVAL (XEXP (rhs, 1)) >= 0
3905 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3907 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3908 rhs = XEXP (rhs, 0);
3911 if (rtx_equal_p (lhs, rhs))
3913 tem = cse_gen_binary (MULT, mode, lhs,
3914 GEN_INT (coeff0 + coeff1));
3915 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3919 /* If one of the operands is a PLUS or a MINUS, see if we can
3920 simplify this by the associative law.
3921 Don't use the associative law for floating point.
3922 The inaccuracy makes it nonassociative,
3923 and subtle programs can break if operations are associated. */
3925 if (INTEGRAL_MODE_P (mode)
3926 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3927 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3928 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3929 return tem;
3930 break;
3932 case COMPARE:
3933 #ifdef HAVE_cc0
3934 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3935 using cc0, in which case we want to leave it as a COMPARE
3936 so we can distinguish it from a register-register-copy.
3938 In IEEE floating point, x-0 is not the same as x. */
3940 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3941 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3942 && op1 == CONST0_RTX (mode))
3943 return op0;
3944 #else
3945 /* Do nothing here. */
3946 #endif
3947 break;
3949 case MINUS:
3950 /* None of these optimizations can be done for IEEE
3951 floating point. */
3952 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3953 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3954 break;
3956 /* We can't assume x-x is 0 even with non-IEEE floating point,
3957 but since it is zero except in very strange circumstances, we
3958 will treat it as zero with -ffast-math. */
3959 if (rtx_equal_p (op0, op1)
3960 && ! side_effects_p (op0)
3961 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3962 return CONST0_RTX (mode);
3964 /* Change subtraction from zero into negation. */
3965 if (op0 == CONST0_RTX (mode))
3966 return gen_rtx_NEG (mode, op1);
3968 /* (-1 - a) is ~a. */
3969 if (op0 == constm1_rtx)
3970 return gen_rtx_NOT (mode, op1);
3972 /* Subtracting 0 has no effect. */
3973 if (op1 == CONST0_RTX (mode))
3974 return op0;
3976 /* See if this is something like X * C - X or vice versa or
3977 if the multiplication is written as a shift. If so, we can
3978 distribute and make a new multiply, shift, or maybe just
3979 have X (if C is 2 in the example above). But don't make
3980 real multiply if we didn't have one before. */
3982 if (! FLOAT_MODE_P (mode))
3984 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3985 rtx lhs = op0, rhs = op1;
3986 int had_mult = 0;
3988 if (GET_CODE (lhs) == NEG)
3989 coeff0 = -1, lhs = XEXP (lhs, 0);
3990 else if (GET_CODE (lhs) == MULT
3991 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3993 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3994 had_mult = 1;
3996 else if (GET_CODE (lhs) == ASHIFT
3997 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3998 && INTVAL (XEXP (lhs, 1)) >= 0
3999 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
4001 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
4002 lhs = XEXP (lhs, 0);
4005 if (GET_CODE (rhs) == NEG)
4006 coeff1 = - 1, rhs = XEXP (rhs, 0);
4007 else if (GET_CODE (rhs) == MULT
4008 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
4010 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
4011 had_mult = 1;
4013 else if (GET_CODE (rhs) == ASHIFT
4014 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
4015 && INTVAL (XEXP (rhs, 1)) >= 0
4016 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
4018 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
4019 rhs = XEXP (rhs, 0);
4022 if (rtx_equal_p (lhs, rhs))
4024 tem = cse_gen_binary (MULT, mode, lhs,
4025 GEN_INT (coeff0 - coeff1));
4026 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4030 /* (a - (-b)) -> (a + b). */
4031 if (GET_CODE (op1) == NEG)
4032 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
4034 /* If one of the operands is a PLUS or a MINUS, see if we can
4035 simplify this by the associative law.
4036 Don't use the associative law for floating point.
4037 The inaccuracy makes it nonassociative,
4038 and subtle programs can break if operations are associated. */
4040 if (INTEGRAL_MODE_P (mode)
4041 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4042 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4043 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4044 return tem;
4046 /* Don't let a relocatable value get a negative coeff. */
4047 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
4048 return plus_constant (op0, - INTVAL (op1));
4050 /* (x - (x & y)) -> (x & ~y) */
4051 if (GET_CODE (op1) == AND)
4053 if (rtx_equal_p (op0, XEXP (op1, 0)))
4054 return cse_gen_binary (AND, mode, op0,
4055 gen_rtx_NOT (mode, XEXP (op1, 1)));
4056 if (rtx_equal_p (op0, XEXP (op1, 1)))
4057 return cse_gen_binary (AND, mode, op0,
4058 gen_rtx_NOT (mode, XEXP (op1, 0)));
4060 break;
4062 case MULT:
4063 if (op1 == constm1_rtx)
4065 tem = simplify_unary_operation (NEG, mode, op0, mode);
4067 return tem ? tem : gen_rtx_NEG (mode, op0);
4070 /* In IEEE floating point, x*0 is not always 0. */
4071 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4072 || ! FLOAT_MODE_P (mode) || flag_fast_math)
4073 && op1 == CONST0_RTX (mode)
4074 && ! side_effects_p (op0))
4075 return op1;
4077 /* In IEEE floating point, x*1 is not equivalent to x for nans.
4078 However, ANSI says we can drop signals,
4079 so we can do this anyway. */
4080 if (op1 == CONST1_RTX (mode))
4081 return op0;
4083 /* Convert multiply by constant power of two into shift unless
4084 we are still generating RTL. This test is a kludge. */
4085 if (GET_CODE (op1) == CONST_INT
4086 && (val = exact_log2 (INTVAL (op1))) >= 0
4087 /* If the mode is larger than the host word size, and the
4088 uppermost bit is set, then this isn't a power of two due
4089 to implicit sign extension. */
4090 && (width <= HOST_BITS_PER_WIDE_INT
4091 || val != HOST_BITS_PER_WIDE_INT - 1)
4092 && ! rtx_equal_function_value_matters)
4093 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
4095 if (GET_CODE (op1) == CONST_DOUBLE
4096 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4098 REAL_VALUE_TYPE d;
4099 jmp_buf handler;
4100 int op1is2, op1ism1;
4102 if (setjmp (handler))
4103 return 0;
4105 set_float_handler (handler);
4106 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4107 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4108 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4109 set_float_handler (NULL_PTR);
4111 /* x*2 is x+x and x*(-1) is -x */
4112 if (op1is2 && GET_MODE (op0) == mode)
4113 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
4115 else if (op1ism1 && GET_MODE (op0) == mode)
4116 return gen_rtx_NEG (mode, op0);
4118 break;
4120 case IOR:
4121 if (op1 == const0_rtx)
4122 return op0;
4123 if (GET_CODE (op1) == CONST_INT
4124 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4125 return op1;
4126 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4127 return op0;
4128 /* A | (~A) -> -1 */
4129 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4130 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4131 && ! side_effects_p (op0)
4132 && GET_MODE_CLASS (mode) != MODE_CC)
4133 return constm1_rtx;
4134 break;
4136 case XOR:
4137 if (op1 == const0_rtx)
4138 return op0;
4139 if (GET_CODE (op1) == CONST_INT
4140 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4141 return gen_rtx_NOT (mode, op0);
4142 if (op0 == op1 && ! side_effects_p (op0)
4143 && GET_MODE_CLASS (mode) != MODE_CC)
4144 return const0_rtx;
4145 break;
4147 case AND:
4148 if (op1 == const0_rtx && ! side_effects_p (op0))
4149 return const0_rtx;
4150 if (GET_CODE (op1) == CONST_INT
4151 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4152 return op0;
4153 if (op0 == op1 && ! side_effects_p (op0)
4154 && GET_MODE_CLASS (mode) != MODE_CC)
4155 return op0;
4156 /* A & (~A) -> 0 */
4157 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4158 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4159 && ! side_effects_p (op0)
4160 && GET_MODE_CLASS (mode) != MODE_CC)
4161 return const0_rtx;
4162 break;
4164 case UDIV:
4165 /* Convert divide by power of two into shift (divide by 1 handled
4166 below). */
4167 if (GET_CODE (op1) == CONST_INT
4168 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4169 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4171 /* ... fall through ... */
4173 case DIV:
4174 if (op1 == CONST1_RTX (mode))
4175 return op0;
4177 /* In IEEE floating point, 0/x is not always 0. */
4178 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4179 || ! FLOAT_MODE_P (mode) || flag_fast_math)
4180 && op0 == CONST0_RTX (mode)
4181 && ! side_effects_p (op1))
4182 return op0;
4184 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4185 /* Change division by a constant into multiplication. Only do
4186 this with -ffast-math until an expert says it is safe in
4187 general. */
4188 else if (GET_CODE (op1) == CONST_DOUBLE
4189 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4190 && op1 != CONST0_RTX (mode)
4191 && flag_fast_math)
4193 REAL_VALUE_TYPE d;
4194 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4196 if (! REAL_VALUES_EQUAL (d, dconst0))
4198 #if defined (REAL_ARITHMETIC)
4199 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4200 return gen_rtx_MULT (mode, op0,
4201 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4202 #else
4203 return
4204 gen_rtx_MULT (mode, op0,
4205 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4206 #endif
4209 #endif
4210 break;
4212 case UMOD:
4213 /* Handle modulus by power of two (mod with 1 handled below). */
4214 if (GET_CODE (op1) == CONST_INT
4215 && exact_log2 (INTVAL (op1)) > 0)
4216 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4218 /* ... fall through ... */
4220 case MOD:
4221 if ((op0 == const0_rtx || op1 == const1_rtx)
4222 && ! side_effects_p (op0) && ! side_effects_p (op1))
4223 return const0_rtx;
4224 break;
4226 case ROTATERT:
4227 case ROTATE:
4228 /* Rotating ~0 always results in ~0. */
4229 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4230 && (unsigned HOST_WIDE_INT) INTVAL (op0) == GET_MODE_MASK (mode)
4231 && ! side_effects_p (op1))
4232 return op0;
4234 /* ... fall through ... */
4236 case ASHIFT:
4237 case ASHIFTRT:
4238 case LSHIFTRT:
4239 if (op1 == const0_rtx)
4240 return op0;
4241 if (op0 == const0_rtx && ! side_effects_p (op1))
4242 return op0;
4243 break;
4245 case SMIN:
4246 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4247 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4248 && ! side_effects_p (op0))
4249 return op1;
4250 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4251 return op0;
4252 break;
4254 case SMAX:
4255 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4256 && ((unsigned HOST_WIDE_INT) INTVAL (op1)
4257 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4258 && ! side_effects_p (op0))
4259 return op1;
4260 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4261 return op0;
4262 break;
4264 case UMIN:
4265 if (op1 == const0_rtx && ! side_effects_p (op0))
4266 return op1;
4267 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4268 return op0;
4269 break;
4271 case UMAX:
4272 if (op1 == constm1_rtx && ! side_effects_p (op0))
4273 return op1;
4274 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4275 return op0;
4276 break;
4278 default:
4279 abort ();
4282 return 0;
4285 /* Get the integer argument values in two forms:
4286 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4288 arg0 = INTVAL (op0);
4289 arg1 = INTVAL (op1);
4291 if (width < HOST_BITS_PER_WIDE_INT)
4293 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4294 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4296 arg0s = arg0;
4297 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4298 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4300 arg1s = arg1;
4301 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4302 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4304 else
4306 arg0s = arg0;
4307 arg1s = arg1;
4310 /* Compute the value of the arithmetic. */
4312 switch (code)
4314 case PLUS:
4315 val = arg0s + arg1s;
4316 break;
4318 case MINUS:
4319 val = arg0s - arg1s;
4320 break;
4322 case MULT:
4323 val = arg0s * arg1s;
4324 break;
4326 case DIV:
4327 if (arg1s == 0)
4328 return 0;
4329 val = arg0s / arg1s;
4330 break;
4332 case MOD:
4333 if (arg1s == 0)
4334 return 0;
4335 val = arg0s % arg1s;
4336 break;
4338 case UDIV:
4339 if (arg1 == 0)
4340 return 0;
4341 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4342 break;
4344 case UMOD:
4345 if (arg1 == 0)
4346 return 0;
4347 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4348 break;
4350 case AND:
4351 val = arg0 & arg1;
4352 break;
4354 case IOR:
4355 val = arg0 | arg1;
4356 break;
4358 case XOR:
4359 val = arg0 ^ arg1;
4360 break;
4362 case LSHIFTRT:
4363 /* If shift count is undefined, don't fold it; let the machine do
4364 what it wants. But truncate it if the machine will do that. */
4365 if (arg1 < 0)
4366 return 0;
4368 #ifdef SHIFT_COUNT_TRUNCATED
4369 if (SHIFT_COUNT_TRUNCATED)
4370 arg1 %= width;
4371 #endif
4373 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4374 break;
4376 case ASHIFT:
4377 if (arg1 < 0)
4378 return 0;
4380 #ifdef SHIFT_COUNT_TRUNCATED
4381 if (SHIFT_COUNT_TRUNCATED)
4382 arg1 %= width;
4383 #endif
4385 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4386 break;
4388 case ASHIFTRT:
4389 if (arg1 < 0)
4390 return 0;
4392 #ifdef SHIFT_COUNT_TRUNCATED
4393 if (SHIFT_COUNT_TRUNCATED)
4394 arg1 %= width;
4395 #endif
4397 val = arg0s >> arg1;
4399 /* Bootstrap compiler may not have sign extended the right shift.
4400 Manually extend the sign to insure bootstrap cc matches gcc. */
4401 if (arg0s < 0 && arg1 > 0)
4402 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4404 break;
4406 case ROTATERT:
4407 if (arg1 < 0)
4408 return 0;
4410 arg1 %= width;
4411 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4412 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4413 break;
4415 case ROTATE:
4416 if (arg1 < 0)
4417 return 0;
4419 arg1 %= width;
4420 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4421 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4422 break;
4424 case COMPARE:
4425 /* Do nothing here. */
4426 return 0;
4428 case SMIN:
4429 val = arg0s <= arg1s ? arg0s : arg1s;
4430 break;
4432 case UMIN:
4433 val = ((unsigned HOST_WIDE_INT) arg0
4434 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4435 break;
4437 case SMAX:
4438 val = arg0s > arg1s ? arg0s : arg1s;
4439 break;
4441 case UMAX:
4442 val = ((unsigned HOST_WIDE_INT) arg0
4443 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4444 break;
4446 default:
4447 abort ();
4450 val = trunc_int_for_mode (val, mode);
4452 return GEN_INT (val);
4455 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4456 PLUS or MINUS.
4458 Rather than test for specific case, we do this by a brute-force method
4459 and do all possible simplifications until no more changes occur. Then
4460 we rebuild the operation. */
4462 static rtx
4463 simplify_plus_minus (code, mode, op0, op1)
4464 enum rtx_code code;
4465 enum machine_mode mode;
4466 rtx op0, op1;
4468 rtx ops[8];
4469 int negs[8];
4470 rtx result, tem;
4471 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4472 int first = 1, negate = 0, changed;
4473 int i, j;
4475 bzero ((char *) ops, sizeof ops);
4477 /* Set up the two operands and then expand them until nothing has been
4478 changed. If we run out of room in our array, give up; this should
4479 almost never happen. */
4481 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4483 changed = 1;
4484 while (changed)
4486 changed = 0;
4488 for (i = 0; i < n_ops; i++)
4489 switch (GET_CODE (ops[i]))
4491 case PLUS:
4492 case MINUS:
4493 if (n_ops == 7)
4494 return 0;
4496 ops[n_ops] = XEXP (ops[i], 1);
4497 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4498 ops[i] = XEXP (ops[i], 0);
4499 input_ops++;
4500 changed = 1;
4501 break;
4503 case NEG:
4504 ops[i] = XEXP (ops[i], 0);
4505 negs[i] = ! negs[i];
4506 changed = 1;
4507 break;
4509 case CONST:
4510 ops[i] = XEXP (ops[i], 0);
4511 input_consts++;
4512 changed = 1;
4513 break;
4515 case NOT:
4516 /* ~a -> (-a - 1) */
4517 if (n_ops != 7)
4519 ops[n_ops] = constm1_rtx;
4520 negs[n_ops++] = negs[i];
4521 ops[i] = XEXP (ops[i], 0);
4522 negs[i] = ! negs[i];
4523 changed = 1;
4525 break;
4527 case CONST_INT:
4528 if (negs[i])
4529 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4530 break;
4532 default:
4533 break;
4537 /* If we only have two operands, we can't do anything. */
4538 if (n_ops <= 2)
4539 return 0;
4541 /* Now simplify each pair of operands until nothing changes. The first
4542 time through just simplify constants against each other. */
4544 changed = 1;
4545 while (changed)
4547 changed = first;
4549 for (i = 0; i < n_ops - 1; i++)
4550 for (j = i + 1; j < n_ops; j++)
4551 if (ops[i] != 0 && ops[j] != 0
4552 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4554 rtx lhs = ops[i], rhs = ops[j];
4555 enum rtx_code ncode = PLUS;
4557 if (negs[i] && ! negs[j])
4558 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4559 else if (! negs[i] && negs[j])
4560 ncode = MINUS;
4562 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4563 if (tem)
4565 ops[i] = tem, ops[j] = 0;
4566 negs[i] = negs[i] && negs[j];
4567 if (GET_CODE (tem) == NEG)
4568 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4570 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4571 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4572 changed = 1;
4576 first = 0;
4579 /* Pack all the operands to the lower-numbered entries and give up if
4580 we didn't reduce the number of operands we had. Make sure we
4581 count a CONST as two operands. If we have the same number of
4582 operands, but have made more CONSTs than we had, this is also
4583 an improvement, so accept it. */
4585 for (i = 0, j = 0; j < n_ops; j++)
4586 if (ops[j] != 0)
4588 ops[i] = ops[j], negs[i++] = negs[j];
4589 if (GET_CODE (ops[j]) == CONST)
4590 n_consts++;
4593 if (i + n_consts > input_ops
4594 || (i + n_consts == input_ops && n_consts <= input_consts))
4595 return 0;
4597 n_ops = i;
4599 /* If we have a CONST_INT, put it last. */
4600 for (i = 0; i < n_ops - 1; i++)
4601 if (GET_CODE (ops[i]) == CONST_INT)
4603 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4604 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4607 /* Put a non-negated operand first. If there aren't any, make all
4608 operands positive and negate the whole thing later. */
4609 for (i = 0; i < n_ops && negs[i]; i++)
4612 if (i == n_ops)
4614 for (i = 0; i < n_ops; i++)
4615 negs[i] = 0;
4616 negate = 1;
4618 else if (i != 0)
4620 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4621 j = negs[0], negs[0] = negs[i], negs[i] = j;
4624 /* Now make the result by performing the requested operations. */
4625 result = ops[0];
4626 for (i = 1; i < n_ops; i++)
4627 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4629 return negate ? gen_rtx_NEG (mode, result) : result;
4632 /* Make a binary operation by properly ordering the operands and
4633 seeing if the expression folds. */
4635 static rtx
4636 cse_gen_binary (code, mode, op0, op1)
4637 enum rtx_code code;
4638 enum machine_mode mode;
4639 rtx op0, op1;
4641 rtx tem;
4643 /* Put complex operands first and constants second if commutative. */
4644 if (GET_RTX_CLASS (code) == 'c'
4645 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4646 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4647 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4648 || (GET_CODE (op0) == SUBREG
4649 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4650 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4651 tem = op0, op0 = op1, op1 = tem;
4653 /* If this simplifies, do it. */
4654 tem = simplify_binary_operation (code, mode, op0, op1);
4656 if (tem)
4657 return tem;
4659 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4660 just form the operation. */
4662 if (code == PLUS && GET_CODE (op1) == CONST_INT
4663 && GET_MODE (op0) != VOIDmode)
4664 return plus_constant (op0, INTVAL (op1));
4665 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4666 && GET_MODE (op0) != VOIDmode)
4667 return plus_constant (op0, - INTVAL (op1));
4668 else
4669 return gen_rtx_fmt_ee (code, mode, op0, op1);
4672 struct cfc_args
4674 /* Input */
4675 rtx op0, op1;
4676 /* Output */
4677 int equal, op0lt, op1lt;
4680 static void
4681 check_fold_consts (data)
4682 PTR data;
4684 struct cfc_args * args = (struct cfc_args *) data;
4685 REAL_VALUE_TYPE d0, d1;
4687 REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4688 REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4689 args->equal = REAL_VALUES_EQUAL (d0, d1);
4690 args->op0lt = REAL_VALUES_LESS (d0, d1);
4691 args->op1lt = REAL_VALUES_LESS (d1, d0);
4694 /* Like simplify_binary_operation except used for relational operators.
4695 MODE is the mode of the operands, not that of the result. If MODE
4696 is VOIDmode, both operands must also be VOIDmode and we compare the
4697 operands in "infinite precision".
4699 If no simplification is possible, this function returns zero. Otherwise,
4700 it returns either const_true_rtx or const0_rtx. */
4703 simplify_relational_operation (code, mode, op0, op1)
4704 enum rtx_code code;
4705 enum machine_mode mode;
4706 rtx op0, op1;
4708 int equal, op0lt, op0ltu, op1lt, op1ltu;
4709 rtx tem;
4711 /* If op0 is a compare, extract the comparison arguments from it. */
4712 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4713 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4715 /* We can't simplify MODE_CC values since we don't know what the
4716 actual comparison is. */
4717 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4718 #ifdef HAVE_cc0
4719 || op0 == cc0_rtx
4720 #endif
4722 return 0;
4724 /* For integer comparisons of A and B maybe we can simplify A - B and can
4725 then simplify a comparison of that with zero. If A and B are both either
4726 a register or a CONST_INT, this can't help; testing for these cases will
4727 prevent infinite recursion here and speed things up.
4729 If CODE is an unsigned comparison, then we can never do this optimization,
4730 because it gives an incorrect result if the subtraction wraps around zero.
4731 ANSI C defines unsigned operations such that they never overflow, and
4732 thus such cases can not be ignored. */
4734 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4735 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4736 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4737 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4738 && code != GTU && code != GEU && code != LTU && code != LEU)
4739 return simplify_relational_operation (signed_condition (code),
4740 mode, tem, const0_rtx);
4742 /* For non-IEEE floating-point, if the two operands are equal, we know the
4743 result. */
4744 if (rtx_equal_p (op0, op1)
4745 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4746 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4747 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4749 /* If the operands are floating-point constants, see if we can fold
4750 the result. */
4751 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4752 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4753 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4755 struct cfc_args args;
4757 /* Setup input for check_fold_consts() */
4758 args.op0 = op0;
4759 args.op1 = op1;
4761 if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4762 /* We got an exception from check_fold_consts() */
4763 return 0;
4765 /* Receive output from check_fold_consts() */
4766 equal = args.equal;
4767 op0lt = op0ltu = args.op0lt;
4768 op1lt = op1ltu = args.op1lt;
4770 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4772 /* Otherwise, see if the operands are both integers. */
4773 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4774 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4775 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4777 int width = GET_MODE_BITSIZE (mode);
4778 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4779 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4781 /* Get the two words comprising each integer constant. */
4782 if (GET_CODE (op0) == CONST_DOUBLE)
4784 l0u = l0s = CONST_DOUBLE_LOW (op0);
4785 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4787 else
4789 l0u = l0s = INTVAL (op0);
4790 h0u = h0s = l0s < 0 ? -1 : 0;
4793 if (GET_CODE (op1) == CONST_DOUBLE)
4795 l1u = l1s = CONST_DOUBLE_LOW (op1);
4796 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4798 else
4800 l1u = l1s = INTVAL (op1);
4801 h1u = h1s = l1s < 0 ? -1 : 0;
4804 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4805 we have to sign or zero-extend the values. */
4806 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4807 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4809 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4811 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4812 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4814 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4815 l0s |= ((HOST_WIDE_INT) (-1) << width);
4817 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4818 l1s |= ((HOST_WIDE_INT) (-1) << width);
4821 equal = (h0u == h1u && l0u == l1u);
4822 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4823 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4824 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4825 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4828 /* Otherwise, there are some code-specific tests we can make. */
4829 else
4831 switch (code)
4833 case EQ:
4834 /* References to the frame plus a constant or labels cannot
4835 be zero, but a SYMBOL_REF can due to #pragma weak. */
4836 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4837 || GET_CODE (op0) == LABEL_REF)
4838 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4839 /* On some machines, the ap reg can be 0 sometimes. */
4840 && op0 != arg_pointer_rtx
4841 #endif
4843 return const0_rtx;
4844 break;
4846 case NE:
4847 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4848 || GET_CODE (op0) == LABEL_REF)
4849 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4850 && op0 != arg_pointer_rtx
4851 #endif
4853 return const_true_rtx;
4854 break;
4856 case GEU:
4857 /* Unsigned values are never negative. */
4858 if (op1 == const0_rtx)
4859 return const_true_rtx;
4860 break;
4862 case LTU:
4863 if (op1 == const0_rtx)
4864 return const0_rtx;
4865 break;
4867 case LEU:
4868 /* Unsigned values are never greater than the largest
4869 unsigned value. */
4870 if (GET_CODE (op1) == CONST_INT
4871 && (unsigned HOST_WIDE_INT) INTVAL (op1) == GET_MODE_MASK (mode)
4872 && INTEGRAL_MODE_P (mode))
4873 return const_true_rtx;
4874 break;
4876 case GTU:
4877 if (GET_CODE (op1) == CONST_INT
4878 && (unsigned HOST_WIDE_INT) INTVAL (op1) == GET_MODE_MASK (mode)
4879 && INTEGRAL_MODE_P (mode))
4880 return const0_rtx;
4881 break;
4883 default:
4884 break;
4887 return 0;
4890 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4891 as appropriate. */
4892 switch (code)
4894 case EQ:
4895 return equal ? const_true_rtx : const0_rtx;
4896 case NE:
4897 return ! equal ? const_true_rtx : const0_rtx;
4898 case LT:
4899 return op0lt ? const_true_rtx : const0_rtx;
4900 case GT:
4901 return op1lt ? const_true_rtx : const0_rtx;
4902 case LTU:
4903 return op0ltu ? const_true_rtx : const0_rtx;
4904 case GTU:
4905 return op1ltu ? const_true_rtx : const0_rtx;
4906 case LE:
4907 return equal || op0lt ? const_true_rtx : const0_rtx;
4908 case GE:
4909 return equal || op1lt ? const_true_rtx : const0_rtx;
4910 case LEU:
4911 return equal || op0ltu ? const_true_rtx : const0_rtx;
4912 case GEU:
4913 return equal || op1ltu ? const_true_rtx : const0_rtx;
4914 default:
4915 abort ();
4919 /* Simplify CODE, an operation with result mode MODE and three operands,
4920 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4921 a constant. Return 0 if no simplifications is possible. */
4924 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4925 enum rtx_code code;
4926 enum machine_mode mode, op0_mode;
4927 rtx op0, op1, op2;
4929 int width = GET_MODE_BITSIZE (mode);
4931 /* VOIDmode means "infinite" precision. */
4932 if (width == 0)
4933 width = HOST_BITS_PER_WIDE_INT;
4935 switch (code)
4937 case SIGN_EXTRACT:
4938 case ZERO_EXTRACT:
4939 if (GET_CODE (op0) == CONST_INT
4940 && GET_CODE (op1) == CONST_INT
4941 && GET_CODE (op2) == CONST_INT
4942 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4943 && width <= HOST_BITS_PER_WIDE_INT)
4945 /* Extracting a bit-field from a constant */
4946 HOST_WIDE_INT val = INTVAL (op0);
4948 if (BITS_BIG_ENDIAN)
4949 val >>= (GET_MODE_BITSIZE (op0_mode)
4950 - INTVAL (op2) - INTVAL (op1));
4951 else
4952 val >>= INTVAL (op2);
4954 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4956 /* First zero-extend. */
4957 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4958 /* If desired, propagate sign bit. */
4959 if (code == SIGN_EXTRACT
4960 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4961 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4964 /* Clear the bits that don't belong in our mode,
4965 unless they and our sign bit are all one.
4966 So we get either a reasonable negative value or a reasonable
4967 unsigned value for this mode. */
4968 if (width < HOST_BITS_PER_WIDE_INT
4969 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4970 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4971 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4973 return GEN_INT (val);
4975 break;
4977 case IF_THEN_ELSE:
4978 if (GET_CODE (op0) == CONST_INT)
4979 return op0 != const0_rtx ? op1 : op2;
4981 /* Convert a == b ? b : a to "a". */
4982 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4983 && rtx_equal_p (XEXP (op0, 0), op1)
4984 && rtx_equal_p (XEXP (op0, 1), op2))
4985 return op1;
4986 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4987 && rtx_equal_p (XEXP (op0, 1), op1)
4988 && rtx_equal_p (XEXP (op0, 0), op2))
4989 return op2;
4990 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
4992 rtx temp;
4993 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4994 XEXP (op0, 0), XEXP (op0, 1));
4995 /* See if any simplifications were possible. */
4996 if (temp == const0_rtx)
4997 return op2;
4998 else if (temp == const1_rtx)
4999 return op1;
5001 break;
5003 default:
5004 abort ();
5007 return 0;
5010 /* If X is a nontrivial arithmetic operation on an argument
5011 for which a constant value can be determined, return
5012 the result of operating on that value, as a constant.
5013 Otherwise, return X, possibly with one or more operands
5014 modified by recursive calls to this function.
5016 If X is a register whose contents are known, we do NOT
5017 return those contents here. equiv_constant is called to
5018 perform that task.
5020 INSN is the insn that we may be modifying. If it is 0, make a copy
5021 of X before modifying it. */
5023 static rtx
5024 fold_rtx (x, insn)
5025 rtx x;
5026 rtx insn;
5028 register enum rtx_code code;
5029 register enum machine_mode mode;
5030 register const char *fmt;
5031 register int i;
5032 rtx new = 0;
5033 int copied = 0;
5034 int must_swap = 0;
5036 /* Folded equivalents of first two operands of X. */
5037 rtx folded_arg0;
5038 rtx folded_arg1;
5040 /* Constant equivalents of first three operands of X;
5041 0 when no such equivalent is known. */
5042 rtx const_arg0;
5043 rtx const_arg1;
5044 rtx const_arg2;
5046 /* The mode of the first operand of X. We need this for sign and zero
5047 extends. */
5048 enum machine_mode mode_arg0;
5050 if (x == 0)
5051 return x;
5053 mode = GET_MODE (x);
5054 code = GET_CODE (x);
5055 switch (code)
5057 case CONST:
5058 case CONST_INT:
5059 case CONST_DOUBLE:
5060 case SYMBOL_REF:
5061 case LABEL_REF:
5062 case REG:
5063 /* No use simplifying an EXPR_LIST
5064 since they are used only for lists of args
5065 in a function call's REG_EQUAL note. */
5066 case EXPR_LIST:
5067 /* Changing anything inside an ADDRESSOF is incorrect; we don't
5068 want to (e.g.,) make (addressof (const_int 0)) just because
5069 the location is known to be zero. */
5070 case ADDRESSOF:
5071 return x;
5073 #ifdef HAVE_cc0
5074 case CC0:
5075 return prev_insn_cc0;
5076 #endif
5078 case PC:
5079 /* If the next insn is a CODE_LABEL followed by a jump table,
5080 PC's value is a LABEL_REF pointing to that label. That
5081 lets us fold switch statements on the Vax. */
5082 if (insn && GET_CODE (insn) == JUMP_INSN)
5084 rtx next = next_nonnote_insn (insn);
5086 if (next && GET_CODE (next) == CODE_LABEL
5087 && NEXT_INSN (next) != 0
5088 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5089 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5090 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
5091 return gen_rtx_LABEL_REF (Pmode, next);
5093 break;
5095 case SUBREG:
5096 /* See if we previously assigned a constant value to this SUBREG. */
5097 if ((new = lookup_as_function (x, CONST_INT)) != 0
5098 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
5099 return new;
5101 /* If this is a paradoxical SUBREG, we have no idea what value the
5102 extra bits would have. However, if the operand is equivalent
5103 to a SUBREG whose operand is the same as our mode, and all the
5104 modes are within a word, we can just use the inner operand
5105 because these SUBREGs just say how to treat the register.
5107 Similarly if we find an integer constant. */
5109 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5111 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5112 struct table_elt *elt;
5114 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5115 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5116 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5117 imode)) != 0)
5118 for (elt = elt->first_same_value;
5119 elt; elt = elt->next_same_value)
5121 if (CONSTANT_P (elt->exp)
5122 && GET_MODE (elt->exp) == VOIDmode)
5123 return elt->exp;
5125 if (GET_CODE (elt->exp) == SUBREG
5126 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5127 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5128 return copy_rtx (SUBREG_REG (elt->exp));
5131 return x;
5134 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5135 We might be able to if the SUBREG is extracting a single word in an
5136 integral mode or extracting the low part. */
5138 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5139 const_arg0 = equiv_constant (folded_arg0);
5140 if (const_arg0)
5141 folded_arg0 = const_arg0;
5143 if (folded_arg0 != SUBREG_REG (x))
5145 new = 0;
5147 if (GET_MODE_CLASS (mode) == MODE_INT
5148 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5149 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5150 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5151 GET_MODE (SUBREG_REG (x)));
5152 if (new == 0 && subreg_lowpart_p (x))
5153 new = gen_lowpart_if_possible (mode, folded_arg0);
5154 if (new)
5155 return new;
5158 /* If this is a narrowing SUBREG and our operand is a REG, see if
5159 we can find an equivalence for REG that is an arithmetic operation
5160 in a wider mode where both operands are paradoxical SUBREGs
5161 from objects of our result mode. In that case, we couldn't report
5162 an equivalent value for that operation, since we don't know what the
5163 extra bits will be. But we can find an equivalence for this SUBREG
5164 by folding that operation is the narrow mode. This allows us to
5165 fold arithmetic in narrow modes when the machine only supports
5166 word-sized arithmetic.
5168 Also look for a case where we have a SUBREG whose operand is the
5169 same as our result. If both modes are smaller than a word, we
5170 are simply interpreting a register in different modes and we
5171 can use the inner value. */
5173 if (GET_CODE (folded_arg0) == REG
5174 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5175 && subreg_lowpart_p (x))
5177 struct table_elt *elt;
5179 /* We can use HASH here since we know that canon_hash won't be
5180 called. */
5181 elt = lookup (folded_arg0,
5182 HASH (folded_arg0, GET_MODE (folded_arg0)),
5183 GET_MODE (folded_arg0));
5185 if (elt)
5186 elt = elt->first_same_value;
5188 for (; elt; elt = elt->next_same_value)
5190 enum rtx_code eltcode = GET_CODE (elt->exp);
5192 /* Just check for unary and binary operations. */
5193 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5194 && GET_CODE (elt->exp) != SIGN_EXTEND
5195 && GET_CODE (elt->exp) != ZERO_EXTEND
5196 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5197 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5199 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5201 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5202 op0 = fold_rtx (op0, NULL_RTX);
5204 op0 = equiv_constant (op0);
5205 if (op0)
5206 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5207 op0, mode);
5209 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5210 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5211 && eltcode != DIV && eltcode != MOD
5212 && eltcode != UDIV && eltcode != UMOD
5213 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5214 && eltcode != ROTATE && eltcode != ROTATERT
5215 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5216 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5217 == mode))
5218 || CONSTANT_P (XEXP (elt->exp, 0)))
5219 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5220 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5221 == mode))
5222 || CONSTANT_P (XEXP (elt->exp, 1))))
5224 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5225 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5227 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5228 op0 = fold_rtx (op0, NULL_RTX);
5230 if (op0)
5231 op0 = equiv_constant (op0);
5233 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5234 op1 = fold_rtx (op1, NULL_RTX);
5236 if (op1)
5237 op1 = equiv_constant (op1);
5239 /* If we are looking for the low SImode part of
5240 (ashift:DI c (const_int 32)), it doesn't work
5241 to compute that in SImode, because a 32-bit shift
5242 in SImode is unpredictable. We know the value is 0. */
5243 if (op0 && op1
5244 && GET_CODE (elt->exp) == ASHIFT
5245 && GET_CODE (op1) == CONST_INT
5246 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5248 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5250 /* If the count fits in the inner mode's width,
5251 but exceeds the outer mode's width,
5252 the value will get truncated to 0
5253 by the subreg. */
5254 new = const0_rtx;
5255 else
5256 /* If the count exceeds even the inner mode's width,
5257 don't fold this expression. */
5258 new = 0;
5260 else if (op0 && op1)
5261 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5262 op0, op1);
5265 else if (GET_CODE (elt->exp) == SUBREG
5266 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5267 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5268 <= UNITS_PER_WORD)
5269 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5270 new = copy_rtx (SUBREG_REG (elt->exp));
5272 if (new)
5273 return new;
5277 return x;
5279 case NOT:
5280 case NEG:
5281 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5282 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5283 new = lookup_as_function (XEXP (x, 0), code);
5284 if (new)
5285 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5286 break;
5288 case MEM:
5289 /* If we are not actually processing an insn, don't try to find the
5290 best address. Not only don't we care, but we could modify the
5291 MEM in an invalid way since we have no insn to validate against. */
5292 if (insn != 0)
5293 find_best_addr (insn, &XEXP (x, 0));
5296 /* Even if we don't fold in the insn itself,
5297 we can safely do so here, in hopes of getting a constant. */
5298 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5299 rtx base = 0;
5300 HOST_WIDE_INT offset = 0;
5302 if (GET_CODE (addr) == REG
5303 && REGNO_QTY_VALID_P (REGNO (addr))
5304 && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5305 && qty_const[REG_QTY (REGNO (addr))] != 0)
5306 addr = qty_const[REG_QTY (REGNO (addr))];
5308 /* If address is constant, split it into a base and integer offset. */
5309 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5310 base = addr;
5311 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5312 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5314 base = XEXP (XEXP (addr, 0), 0);
5315 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5317 else if (GET_CODE (addr) == LO_SUM
5318 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5319 base = XEXP (addr, 1);
5320 else if (GET_CODE (addr) == ADDRESSOF)
5321 return change_address (x, VOIDmode, addr);
5323 /* If this is a constant pool reference, we can fold it into its
5324 constant to allow better value tracking. */
5325 if (base && GET_CODE (base) == SYMBOL_REF
5326 && CONSTANT_POOL_ADDRESS_P (base))
5328 rtx constant = get_pool_constant (base);
5329 enum machine_mode const_mode = get_pool_mode (base);
5330 rtx new;
5332 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5333 constant_pool_entries_cost = COST (constant);
5335 /* If we are loading the full constant, we have an equivalence. */
5336 if (offset == 0 && mode == const_mode)
5337 return constant;
5339 /* If this actually isn't a constant (weird!), we can't do
5340 anything. Otherwise, handle the two most common cases:
5341 extracting a word from a multi-word constant, and extracting
5342 the low-order bits. Other cases don't seem common enough to
5343 worry about. */
5344 if (! CONSTANT_P (constant))
5345 return x;
5347 if (GET_MODE_CLASS (mode) == MODE_INT
5348 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5349 && offset % UNITS_PER_WORD == 0
5350 && (new = operand_subword (constant,
5351 offset / UNITS_PER_WORD,
5352 0, const_mode)) != 0)
5353 return new;
5355 if (((BYTES_BIG_ENDIAN
5356 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5357 || (! BYTES_BIG_ENDIAN && offset == 0))
5358 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5359 return new;
5362 /* If this is a reference to a label at a known position in a jump
5363 table, we also know its value. */
5364 if (base && GET_CODE (base) == LABEL_REF)
5366 rtx label = XEXP (base, 0);
5367 rtx table_insn = NEXT_INSN (label);
5369 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5370 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5372 rtx table = PATTERN (table_insn);
5374 if (offset >= 0
5375 && (offset / GET_MODE_SIZE (GET_MODE (table))
5376 < XVECLEN (table, 0)))
5377 return XVECEXP (table, 0,
5378 offset / GET_MODE_SIZE (GET_MODE (table)));
5380 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5381 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5383 rtx table = PATTERN (table_insn);
5385 if (offset >= 0
5386 && (offset / GET_MODE_SIZE (GET_MODE (table))
5387 < XVECLEN (table, 1)))
5389 offset /= GET_MODE_SIZE (GET_MODE (table));
5390 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5391 XEXP (table, 0));
5393 if (GET_MODE (table) != Pmode)
5394 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5396 /* Indicate this is a constant. This isn't a
5397 valid form of CONST, but it will only be used
5398 to fold the next insns and then discarded, so
5399 it should be safe.
5401 Note this expression must be explicitly discarded,
5402 by cse_insn, else it may end up in a REG_EQUAL note
5403 and "escape" to cause problems elsewhere. */
5404 return gen_rtx_CONST (GET_MODE (new), new);
5409 return x;
5412 case ASM_OPERANDS:
5413 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5414 validate_change (insn, &XVECEXP (x, 3, i),
5415 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5416 break;
5418 default:
5419 break;
5422 const_arg0 = 0;
5423 const_arg1 = 0;
5424 const_arg2 = 0;
5425 mode_arg0 = VOIDmode;
5427 /* Try folding our operands.
5428 Then see which ones have constant values known. */
5430 fmt = GET_RTX_FORMAT (code);
5431 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5432 if (fmt[i] == 'e')
5434 rtx arg = XEXP (x, i);
5435 rtx folded_arg = arg, const_arg = 0;
5436 enum machine_mode mode_arg = GET_MODE (arg);
5437 rtx cheap_arg, expensive_arg;
5438 rtx replacements[2];
5439 int j;
5441 /* Most arguments are cheap, so handle them specially. */
5442 switch (GET_CODE (arg))
5444 case REG:
5445 /* This is the same as calling equiv_constant; it is duplicated
5446 here for speed. */
5447 if (REGNO_QTY_VALID_P (REGNO (arg))
5448 && qty_const[REG_QTY (REGNO (arg))] != 0
5449 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5450 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
5451 const_arg
5452 = gen_lowpart_if_possible (GET_MODE (arg),
5453 qty_const[REG_QTY (REGNO (arg))]);
5454 break;
5456 case CONST:
5457 case CONST_INT:
5458 case SYMBOL_REF:
5459 case LABEL_REF:
5460 case CONST_DOUBLE:
5461 const_arg = arg;
5462 break;
5464 #ifdef HAVE_cc0
5465 case CC0:
5466 folded_arg = prev_insn_cc0;
5467 mode_arg = prev_insn_cc0_mode;
5468 const_arg = equiv_constant (folded_arg);
5469 break;
5470 #endif
5472 default:
5473 folded_arg = fold_rtx (arg, insn);
5474 const_arg = equiv_constant (folded_arg);
5477 /* For the first three operands, see if the operand
5478 is constant or equivalent to a constant. */
5479 switch (i)
5481 case 0:
5482 folded_arg0 = folded_arg;
5483 const_arg0 = const_arg;
5484 mode_arg0 = mode_arg;
5485 break;
5486 case 1:
5487 folded_arg1 = folded_arg;
5488 const_arg1 = const_arg;
5489 break;
5490 case 2:
5491 const_arg2 = const_arg;
5492 break;
5495 /* Pick the least expensive of the folded argument and an
5496 equivalent constant argument. */
5497 if (const_arg == 0 || const_arg == folded_arg
5498 || COST (const_arg) > COST (folded_arg))
5499 cheap_arg = folded_arg, expensive_arg = const_arg;
5500 else
5501 cheap_arg = const_arg, expensive_arg = folded_arg;
5503 /* Try to replace the operand with the cheapest of the two
5504 possibilities. If it doesn't work and this is either of the first
5505 two operands of a commutative operation, try swapping them.
5506 If THAT fails, try the more expensive, provided it is cheaper
5507 than what is already there. */
5509 if (cheap_arg == XEXP (x, i))
5510 continue;
5512 if (insn == 0 && ! copied)
5514 x = copy_rtx (x);
5515 copied = 1;
5518 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5519 for (j = 0;
5520 j < 2 && replacements[j]
5521 && COST (replacements[j]) < COST (XEXP (x, i));
5522 j++)
5524 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5525 break;
5527 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5529 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5530 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5532 if (apply_change_group ())
5534 /* Swap them back to be invalid so that this loop can
5535 continue and flag them to be swapped back later. */
5536 rtx tem;
5538 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5539 XEXP (x, 1) = tem;
5540 must_swap = 1;
5541 break;
5547 else
5549 if (fmt[i] == 'E')
5550 /* Don't try to fold inside of a vector of expressions.
5551 Doing nothing is harmless. */
5552 {;}
5555 /* If a commutative operation, place a constant integer as the second
5556 operand unless the first operand is also a constant integer. Otherwise,
5557 place any constant second unless the first operand is also a constant. */
5559 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5561 if (must_swap || (const_arg0
5562 && (const_arg1 == 0
5563 || (GET_CODE (const_arg0) == CONST_INT
5564 && GET_CODE (const_arg1) != CONST_INT))))
5566 register rtx tem = XEXP (x, 0);
5568 if (insn == 0 && ! copied)
5570 x = copy_rtx (x);
5571 copied = 1;
5574 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5575 validate_change (insn, &XEXP (x, 1), tem, 1);
5576 if (apply_change_group ())
5578 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5579 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5584 /* If X is an arithmetic operation, see if we can simplify it. */
5586 switch (GET_RTX_CLASS (code))
5588 case '1':
5590 int is_const = 0;
5592 /* We can't simplify extension ops unless we know the
5593 original mode. */
5594 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5595 && mode_arg0 == VOIDmode)
5596 break;
5598 /* If we had a CONST, strip it off and put it back later if we
5599 fold. */
5600 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5601 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5603 new = simplify_unary_operation (code, mode,
5604 const_arg0 ? const_arg0 : folded_arg0,
5605 mode_arg0);
5606 if (new != 0 && is_const)
5607 new = gen_rtx_CONST (mode, new);
5609 break;
5611 case '<':
5612 /* See what items are actually being compared and set FOLDED_ARG[01]
5613 to those values and CODE to the actual comparison code. If any are
5614 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5615 do anything if both operands are already known to be constant. */
5617 if (const_arg0 == 0 || const_arg1 == 0)
5619 struct table_elt *p0, *p1;
5620 rtx true = const_true_rtx, false = const0_rtx;
5621 enum machine_mode mode_arg1;
5623 #ifdef FLOAT_STORE_FLAG_VALUE
5624 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5626 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5627 mode);
5628 false = CONST0_RTX (mode);
5630 #endif
5632 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5633 &mode_arg0, &mode_arg1);
5634 const_arg0 = equiv_constant (folded_arg0);
5635 const_arg1 = equiv_constant (folded_arg1);
5637 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5638 what kinds of things are being compared, so we can't do
5639 anything with this comparison. */
5641 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5642 break;
5644 /* If we do not now have two constants being compared, see
5645 if we can nevertheless deduce some things about the
5646 comparison. */
5647 if (const_arg0 == 0 || const_arg1 == 0)
5649 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5650 non-explicit constant? These aren't zero, but we
5651 don't know their sign. */
5652 if (const_arg1 == const0_rtx
5653 && (NONZERO_BASE_PLUS_P (folded_arg0)
5654 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5655 come out as 0. */
5656 || GET_CODE (folded_arg0) == SYMBOL_REF
5657 #endif
5658 || GET_CODE (folded_arg0) == LABEL_REF
5659 || GET_CODE (folded_arg0) == CONST))
5661 if (code == EQ)
5662 return false;
5663 else if (code == NE)
5664 return true;
5667 /* See if the two operands are the same. We don't do this
5668 for IEEE floating-point since we can't assume x == x
5669 since x might be a NaN. */
5671 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5672 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5673 && (folded_arg0 == folded_arg1
5674 || (GET_CODE (folded_arg0) == REG
5675 && GET_CODE (folded_arg1) == REG
5676 && (REG_QTY (REGNO (folded_arg0))
5677 == REG_QTY (REGNO (folded_arg1))))
5678 || ((p0 = lookup (folded_arg0,
5679 (safe_hash (folded_arg0, mode_arg0)
5680 % NBUCKETS), mode_arg0))
5681 && (p1 = lookup (folded_arg1,
5682 (safe_hash (folded_arg1, mode_arg0)
5683 % NBUCKETS), mode_arg0))
5684 && p0->first_same_value == p1->first_same_value)))
5685 return ((code == EQ || code == LE || code == GE
5686 || code == LEU || code == GEU)
5687 ? true : false);
5689 /* If FOLDED_ARG0 is a register, see if the comparison we are
5690 doing now is either the same as we did before or the reverse
5691 (we only check the reverse if not floating-point). */
5692 else if (GET_CODE (folded_arg0) == REG)
5694 int qty = REG_QTY (REGNO (folded_arg0));
5696 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5697 && (comparison_dominates_p (qty_comparison_code[qty], code)
5698 || (comparison_dominates_p (qty_comparison_code[qty],
5699 reverse_condition (code))
5700 && ! FLOAT_MODE_P (mode_arg0)))
5701 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5702 || (const_arg1
5703 && rtx_equal_p (qty_comparison_const[qty],
5704 const_arg1))
5705 || (GET_CODE (folded_arg1) == REG
5706 && (REG_QTY (REGNO (folded_arg1))
5707 == qty_comparison_qty[qty]))))
5708 return (comparison_dominates_p (qty_comparison_code[qty],
5709 code)
5710 ? true : false);
5715 /* If we are comparing against zero, see if the first operand is
5716 equivalent to an IOR with a constant. If so, we may be able to
5717 determine the result of this comparison. */
5719 if (const_arg1 == const0_rtx)
5721 rtx y = lookup_as_function (folded_arg0, IOR);
5722 rtx inner_const;
5724 if (y != 0
5725 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5726 && GET_CODE (inner_const) == CONST_INT
5727 && INTVAL (inner_const) != 0)
5729 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5730 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5731 && (INTVAL (inner_const)
5732 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5733 rtx true = const_true_rtx, false = const0_rtx;
5735 #ifdef FLOAT_STORE_FLAG_VALUE
5736 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5738 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5739 mode);
5740 false = CONST0_RTX (mode);
5742 #endif
5744 switch (code)
5746 case EQ:
5747 return false;
5748 case NE:
5749 return true;
5750 case LT: case LE:
5751 if (has_sign)
5752 return true;
5753 break;
5754 case GT: case GE:
5755 if (has_sign)
5756 return false;
5757 break;
5758 default:
5759 break;
5764 new = simplify_relational_operation (code, mode_arg0,
5765 const_arg0 ? const_arg0 : folded_arg0,
5766 const_arg1 ? const_arg1 : folded_arg1);
5767 #ifdef FLOAT_STORE_FLAG_VALUE
5768 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5769 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5770 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5771 #endif
5772 break;
5774 case '2':
5775 case 'c':
5776 switch (code)
5778 case PLUS:
5779 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5780 with that LABEL_REF as its second operand. If so, the result is
5781 the first operand of that MINUS. This handles switches with an
5782 ADDR_DIFF_VEC table. */
5783 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5785 rtx y
5786 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5787 : lookup_as_function (folded_arg0, MINUS);
5789 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5790 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5791 return XEXP (y, 0);
5793 /* Now try for a CONST of a MINUS like the above. */
5794 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5795 : lookup_as_function (folded_arg0, CONST))) != 0
5796 && GET_CODE (XEXP (y, 0)) == MINUS
5797 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5798 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5799 return XEXP (XEXP (y, 0), 0);
5802 /* Likewise if the operands are in the other order. */
5803 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5805 rtx y
5806 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5807 : lookup_as_function (folded_arg1, MINUS);
5809 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5810 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5811 return XEXP (y, 0);
5813 /* Now try for a CONST of a MINUS like the above. */
5814 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5815 : lookup_as_function (folded_arg1, CONST))) != 0
5816 && GET_CODE (XEXP (y, 0)) == MINUS
5817 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5818 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5819 return XEXP (XEXP (y, 0), 0);
5822 /* If second operand is a register equivalent to a negative
5823 CONST_INT, see if we can find a register equivalent to the
5824 positive constant. Make a MINUS if so. Don't do this for
5825 a non-negative constant since we might then alternate between
5826 chosing positive and negative constants. Having the positive
5827 constant previously-used is the more common case. Be sure
5828 the resulting constant is non-negative; if const_arg1 were
5829 the smallest negative number this would overflow: depending
5830 on the mode, this would either just be the same value (and
5831 hence not save anything) or be incorrect. */
5832 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5833 && INTVAL (const_arg1) < 0
5834 /* This used to test
5836 - INTVAL (const_arg1) >= 0
5838 But The Sun V5.0 compilers mis-compiled that test. So
5839 instead we test for the problematic value in a more direct
5840 manner and hope the Sun compilers get it correct. */
5841 && INTVAL (const_arg1) !=
5842 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
5843 && GET_CODE (folded_arg1) == REG)
5845 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5846 struct table_elt *p
5847 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5848 mode);
5850 if (p)
5851 for (p = p->first_same_value; p; p = p->next_same_value)
5852 if (GET_CODE (p->exp) == REG)
5853 return cse_gen_binary (MINUS, mode, folded_arg0,
5854 canon_reg (p->exp, NULL_RTX));
5856 goto from_plus;
5858 case MINUS:
5859 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5860 If so, produce (PLUS Z C2-C). */
5861 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5863 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5864 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5865 return fold_rtx (plus_constant (copy_rtx (y),
5866 -INTVAL (const_arg1)),
5867 NULL_RTX);
5870 /* ... fall through ... */
5872 from_plus:
5873 case SMIN: case SMAX: case UMIN: case UMAX:
5874 case IOR: case AND: case XOR:
5875 case MULT: case DIV: case UDIV:
5876 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5877 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5878 is known to be of similar form, we may be able to replace the
5879 operation with a combined operation. This may eliminate the
5880 intermediate operation if every use is simplified in this way.
5881 Note that the similar optimization done by combine.c only works
5882 if the intermediate operation's result has only one reference. */
5884 if (GET_CODE (folded_arg0) == REG
5885 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5887 int is_shift
5888 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5889 rtx y = lookup_as_function (folded_arg0, code);
5890 rtx inner_const;
5891 enum rtx_code associate_code;
5892 rtx new_const;
5894 if (y == 0
5895 || 0 == (inner_const
5896 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5897 || GET_CODE (inner_const) != CONST_INT
5898 /* If we have compiled a statement like
5899 "if (x == (x & mask1))", and now are looking at
5900 "x & mask2", we will have a case where the first operand
5901 of Y is the same as our first operand. Unless we detect
5902 this case, an infinite loop will result. */
5903 || XEXP (y, 0) == folded_arg0)
5904 break;
5906 /* Don't associate these operations if they are a PLUS with the
5907 same constant and it is a power of two. These might be doable
5908 with a pre- or post-increment. Similarly for two subtracts of
5909 identical powers of two with post decrement. */
5911 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5912 && ((HAVE_PRE_INCREMENT
5913 && exact_log2 (INTVAL (const_arg1)) >= 0)
5914 || (HAVE_POST_INCREMENT
5915 && exact_log2 (INTVAL (const_arg1)) >= 0)
5916 || (HAVE_PRE_DECREMENT
5917 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5918 || (HAVE_POST_DECREMENT
5919 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5920 break;
5922 /* Compute the code used to compose the constants. For example,
5923 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5925 associate_code
5926 = (code == MULT || code == DIV || code == UDIV ? MULT
5927 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5929 new_const = simplify_binary_operation (associate_code, mode,
5930 const_arg1, inner_const);
5932 if (new_const == 0)
5933 break;
5935 /* If we are associating shift operations, don't let this
5936 produce a shift of the size of the object or larger.
5937 This could occur when we follow a sign-extend by a right
5938 shift on a machine that does a sign-extend as a pair
5939 of shifts. */
5941 if (is_shift && GET_CODE (new_const) == CONST_INT
5942 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5944 /* As an exception, we can turn an ASHIFTRT of this
5945 form into a shift of the number of bits - 1. */
5946 if (code == ASHIFTRT)
5947 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5948 else
5949 break;
5952 y = copy_rtx (XEXP (y, 0));
5954 /* If Y contains our first operand (the most common way this
5955 can happen is if Y is a MEM), we would do into an infinite
5956 loop if we tried to fold it. So don't in that case. */
5958 if (! reg_mentioned_p (folded_arg0, y))
5959 y = fold_rtx (y, insn);
5961 return cse_gen_binary (code, mode, y, new_const);
5963 break;
5965 default:
5966 break;
5969 new = simplify_binary_operation (code, mode,
5970 const_arg0 ? const_arg0 : folded_arg0,
5971 const_arg1 ? const_arg1 : folded_arg1);
5972 break;
5974 case 'o':
5975 /* (lo_sum (high X) X) is simply X. */
5976 if (code == LO_SUM && const_arg0 != 0
5977 && GET_CODE (const_arg0) == HIGH
5978 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5979 return const_arg1;
5980 break;
5982 case '3':
5983 case 'b':
5984 new = simplify_ternary_operation (code, mode, mode_arg0,
5985 const_arg0 ? const_arg0 : folded_arg0,
5986 const_arg1 ? const_arg1 : folded_arg1,
5987 const_arg2 ? const_arg2 : XEXP (x, 2));
5988 break;
5990 case 'x':
5991 /* Always eliminate CONSTANT_P_RTX at this stage. */
5992 if (code == CONSTANT_P_RTX)
5993 return (const_arg0 ? const1_rtx : const0_rtx);
5994 break;
5997 return new ? new : x;
6000 /* Return a constant value currently equivalent to X.
6001 Return 0 if we don't know one. */
6003 static rtx
6004 equiv_constant (x)
6005 rtx x;
6007 if (GET_CODE (x) == REG
6008 && REGNO_QTY_VALID_P (REGNO (x))
6009 && qty_const[REG_QTY (REGNO (x))])
6010 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
6012 if (x == 0 || CONSTANT_P (x))
6013 return x;
6015 /* If X is a MEM, try to fold it outside the context of any insn to see if
6016 it might be equivalent to a constant. That handles the case where it
6017 is a constant-pool reference. Then try to look it up in the hash table
6018 in case it is something whose value we have seen before. */
6020 if (GET_CODE (x) == MEM)
6022 struct table_elt *elt;
6024 x = fold_rtx (x, NULL_RTX);
6025 if (CONSTANT_P (x))
6026 return x;
6028 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
6029 if (elt == 0)
6030 return 0;
6032 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
6033 if (elt->is_const && CONSTANT_P (elt->exp))
6034 return elt->exp;
6037 return 0;
6040 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6041 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6042 least-significant part of X.
6043 MODE specifies how big a part of X to return.
6045 If the requested operation cannot be done, 0 is returned.
6047 This is similar to gen_lowpart in emit-rtl.c. */
6050 gen_lowpart_if_possible (mode, x)
6051 enum machine_mode mode;
6052 register rtx x;
6054 rtx result = gen_lowpart_common (mode, x);
6056 if (result)
6057 return result;
6058 else if (GET_CODE (x) == MEM)
6060 /* This is the only other case we handle. */
6061 register int offset = 0;
6062 rtx new;
6064 if (WORDS_BIG_ENDIAN)
6065 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6066 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6067 if (BYTES_BIG_ENDIAN)
6068 /* Adjust the address so that the address-after-the-data is
6069 unchanged. */
6070 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6071 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6072 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
6073 if (! memory_address_p (mode, XEXP (new, 0)))
6074 return 0;
6075 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6076 MEM_COPY_ATTRIBUTES (new, x);
6077 return new;
6079 else
6080 return 0;
6083 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6084 branch. It will be zero if not.
6086 In certain cases, this can cause us to add an equivalence. For example,
6087 if we are following the taken case of
6088 if (i == 2)
6089 we can add the fact that `i' and '2' are now equivalent.
6091 In any case, we can record that this comparison was passed. If the same
6092 comparison is seen later, we will know its value. */
6094 static void
6095 record_jump_equiv (insn, taken)
6096 rtx insn;
6097 int taken;
6099 int cond_known_true;
6100 rtx op0, op1;
6101 enum machine_mode mode, mode0, mode1;
6102 int reversed_nonequality = 0;
6103 enum rtx_code code;
6105 /* Ensure this is the right kind of insn. */
6106 if (! condjump_p (insn) || simplejump_p (insn))
6107 return;
6109 /* See if this jump condition is known true or false. */
6110 if (taken)
6111 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6112 else
6113 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6115 /* Get the type of comparison being done and the operands being compared.
6116 If we had to reverse a non-equality condition, record that fact so we
6117 know that it isn't valid for floating-point. */
6118 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6119 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6120 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6122 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
6123 if (! cond_known_true)
6125 reversed_nonequality = (code != EQ && code != NE);
6126 code = reverse_condition (code);
6129 /* The mode is the mode of the non-constant. */
6130 mode = mode0;
6131 if (mode1 != VOIDmode)
6132 mode = mode1;
6134 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6137 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6138 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6139 Make any useful entries we can with that information. Called from
6140 above function and called recursively. */
6142 static void
6143 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6144 enum rtx_code code;
6145 enum machine_mode mode;
6146 rtx op0, op1;
6147 int reversed_nonequality;
6149 unsigned op0_hash, op1_hash;
6150 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6151 struct table_elt *op0_elt, *op1_elt;
6153 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6154 we know that they are also equal in the smaller mode (this is also
6155 true for all smaller modes whether or not there is a SUBREG, but
6156 is not worth testing for with no SUBREG). */
6158 /* Note that GET_MODE (op0) may not equal MODE. */
6159 if (code == EQ && GET_CODE (op0) == SUBREG
6160 && (GET_MODE_SIZE (GET_MODE (op0))
6161 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6163 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6164 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6166 record_jump_cond (code, mode, SUBREG_REG (op0),
6167 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6168 reversed_nonequality);
6171 if (code == EQ && GET_CODE (op1) == SUBREG
6172 && (GET_MODE_SIZE (GET_MODE (op1))
6173 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6175 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6176 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6178 record_jump_cond (code, mode, SUBREG_REG (op1),
6179 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6180 reversed_nonequality);
6183 /* Similarly, if this is an NE comparison, and either is a SUBREG
6184 making a smaller mode, we know the whole thing is also NE. */
6186 /* Note that GET_MODE (op0) may not equal MODE;
6187 if we test MODE instead, we can get an infinite recursion
6188 alternating between two modes each wider than MODE. */
6190 if (code == NE && GET_CODE (op0) == SUBREG
6191 && subreg_lowpart_p (op0)
6192 && (GET_MODE_SIZE (GET_MODE (op0))
6193 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6195 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6196 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6198 record_jump_cond (code, mode, SUBREG_REG (op0),
6199 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6200 reversed_nonequality);
6203 if (code == NE && GET_CODE (op1) == SUBREG
6204 && subreg_lowpart_p (op1)
6205 && (GET_MODE_SIZE (GET_MODE (op1))
6206 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6208 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6209 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6211 record_jump_cond (code, mode, SUBREG_REG (op1),
6212 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6213 reversed_nonequality);
6216 /* Hash both operands. */
6218 do_not_record = 0;
6219 hash_arg_in_memory = 0;
6220 hash_arg_in_struct = 0;
6221 op0_hash = HASH (op0, mode);
6222 op0_in_memory = hash_arg_in_memory;
6223 op0_in_struct = hash_arg_in_struct;
6225 if (do_not_record)
6226 return;
6228 do_not_record = 0;
6229 hash_arg_in_memory = 0;
6230 hash_arg_in_struct = 0;
6231 op1_hash = HASH (op1, mode);
6232 op1_in_memory = hash_arg_in_memory;
6233 op1_in_struct = hash_arg_in_struct;
6235 if (do_not_record)
6236 return;
6238 /* Look up both operands. */
6239 op0_elt = lookup (op0, op0_hash, mode);
6240 op1_elt = lookup (op1, op1_hash, mode);
6242 /* If both operands are already equivalent or if they are not in the
6243 table but are identical, do nothing. */
6244 if ((op0_elt != 0 && op1_elt != 0
6245 && op0_elt->first_same_value == op1_elt->first_same_value)
6246 || op0 == op1 || rtx_equal_p (op0, op1))
6247 return;
6249 /* If we aren't setting two things equal all we can do is save this
6250 comparison. Similarly if this is floating-point. In the latter
6251 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6252 If we record the equality, we might inadvertently delete code
6253 whose intent was to change -0 to +0. */
6255 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6257 /* If we reversed a floating-point comparison, if OP0 is not a
6258 register, or if OP1 is neither a register or constant, we can't
6259 do anything. */
6261 if (GET_CODE (op1) != REG)
6262 op1 = equiv_constant (op1);
6264 if ((reversed_nonequality && FLOAT_MODE_P (mode))
6265 || GET_CODE (op0) != REG || op1 == 0)
6266 return;
6268 /* Put OP0 in the hash table if it isn't already. This gives it a
6269 new quantity number. */
6270 if (op0_elt == 0)
6272 if (insert_regs (op0, NULL_PTR, 0))
6274 rehash_using_reg (op0);
6275 op0_hash = HASH (op0, mode);
6277 /* If OP0 is contained in OP1, this changes its hash code
6278 as well. Faster to rehash than to check, except
6279 for the simple case of a constant. */
6280 if (! CONSTANT_P (op1))
6281 op1_hash = HASH (op1,mode);
6284 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6285 op0_elt->in_memory = op0_in_memory;
6286 op0_elt->in_struct = op0_in_struct;
6289 qty_comparison_code[REG_QTY (REGNO (op0))] = code;
6290 if (GET_CODE (op1) == REG)
6292 /* Look it up again--in case op0 and op1 are the same. */
6293 op1_elt = lookup (op1, op1_hash, mode);
6295 /* Put OP1 in the hash table so it gets a new quantity number. */
6296 if (op1_elt == 0)
6298 if (insert_regs (op1, NULL_PTR, 0))
6300 rehash_using_reg (op1);
6301 op1_hash = HASH (op1, mode);
6304 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6305 op1_elt->in_memory = op1_in_memory;
6306 op1_elt->in_struct = op1_in_struct;
6309 qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6310 qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
6312 else
6314 qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6315 qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
6318 return;
6321 /* If either side is still missing an equivalence, make it now,
6322 then merge the equivalences. */
6324 if (op0_elt == 0)
6326 if (insert_regs (op0, NULL_PTR, 0))
6328 rehash_using_reg (op0);
6329 op0_hash = HASH (op0, mode);
6332 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6333 op0_elt->in_memory = op0_in_memory;
6334 op0_elt->in_struct = op0_in_struct;
6337 if (op1_elt == 0)
6339 if (insert_regs (op1, NULL_PTR, 0))
6341 rehash_using_reg (op1);
6342 op1_hash = HASH (op1, mode);
6345 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6346 op1_elt->in_memory = op1_in_memory;
6347 op1_elt->in_struct = op1_in_struct;
6350 merge_equiv_classes (op0_elt, op1_elt);
6351 last_jump_equiv_class = op0_elt;
6354 /* CSE processing for one instruction.
6355 First simplify sources and addresses of all assignments
6356 in the instruction, using previously-computed equivalents values.
6357 Then install the new sources and destinations in the table
6358 of available values.
6360 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6361 the insn. It means that INSN is inside libcall block. In this
6362 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6364 /* Data on one SET contained in the instruction. */
6366 struct set
6368 /* The SET rtx itself. */
6369 rtx rtl;
6370 /* The SET_SRC of the rtx (the original value, if it is changing). */
6371 rtx src;
6372 /* The hash-table element for the SET_SRC of the SET. */
6373 struct table_elt *src_elt;
6374 /* Hash value for the SET_SRC. */
6375 unsigned src_hash;
6376 /* Hash value for the SET_DEST. */
6377 unsigned dest_hash;
6378 /* The SET_DEST, with SUBREG, etc., stripped. */
6379 rtx inner_dest;
6380 /* Nonzero if the SET_SRC is in memory. */
6381 char src_in_memory;
6382 /* Nonzero if the SET_SRC is in a structure. */
6383 char src_in_struct;
6384 /* Nonzero if the SET_SRC contains something
6385 whose value cannot be predicted and understood. */
6386 char src_volatile;
6387 /* Original machine mode, in case it becomes a CONST_INT. */
6388 enum machine_mode mode;
6389 /* A constant equivalent for SET_SRC, if any. */
6390 rtx src_const;
6391 /* Hash value of constant equivalent for SET_SRC. */
6392 unsigned src_const_hash;
6393 /* Table entry for constant equivalent for SET_SRC, if any. */
6394 struct table_elt *src_const_elt;
6397 static void
6398 cse_insn (insn, libcall_insn)
6399 rtx insn;
6400 rtx libcall_insn;
6402 register rtx x = PATTERN (insn);
6403 register int i;
6404 rtx tem;
6405 register int n_sets = 0;
6407 #ifdef HAVE_cc0
6408 /* Records what this insn does to set CC0. */
6409 rtx this_insn_cc0 = 0;
6410 enum machine_mode this_insn_cc0_mode = VOIDmode;
6411 #endif
6413 rtx src_eqv = 0;
6414 struct table_elt *src_eqv_elt = 0;
6415 int src_eqv_volatile = 0;
6416 int src_eqv_in_memory = 0;
6417 int src_eqv_in_struct = 0;
6418 unsigned src_eqv_hash = 0;
6420 struct set *sets = NULL_PTR;
6422 this_insn = insn;
6424 /* Find all the SETs and CLOBBERs in this instruction.
6425 Record all the SETs in the array `set' and count them.
6426 Also determine whether there is a CLOBBER that invalidates
6427 all memory references, or all references at varying addresses. */
6429 if (GET_CODE (insn) == CALL_INSN)
6431 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6432 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6433 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6436 if (GET_CODE (x) == SET)
6438 sets = (struct set *) alloca (sizeof (struct set));
6439 sets[0].rtl = x;
6441 /* Ignore SETs that are unconditional jumps.
6442 They never need cse processing, so this does not hurt.
6443 The reason is not efficiency but rather
6444 so that we can test at the end for instructions
6445 that have been simplified to unconditional jumps
6446 and not be misled by unchanged instructions
6447 that were unconditional jumps to begin with. */
6448 if (SET_DEST (x) == pc_rtx
6449 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6452 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6453 The hard function value register is used only once, to copy to
6454 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6455 Ensure we invalidate the destination register. On the 80386 no
6456 other code would invalidate it since it is a fixed_reg.
6457 We need not check the return of apply_change_group; see canon_reg. */
6459 else if (GET_CODE (SET_SRC (x)) == CALL)
6461 canon_reg (SET_SRC (x), insn);
6462 apply_change_group ();
6463 fold_rtx (SET_SRC (x), insn);
6464 invalidate (SET_DEST (x), VOIDmode);
6466 else
6467 n_sets = 1;
6469 else if (GET_CODE (x) == PARALLEL)
6471 register int lim = XVECLEN (x, 0);
6473 sets = (struct set *) alloca (lim * sizeof (struct set));
6475 /* Find all regs explicitly clobbered in this insn,
6476 and ensure they are not replaced with any other regs
6477 elsewhere in this insn.
6478 When a reg that is clobbered is also used for input,
6479 we should presume that that is for a reason,
6480 and we should not substitute some other register
6481 which is not supposed to be clobbered.
6482 Therefore, this loop cannot be merged into the one below
6483 because a CALL may precede a CLOBBER and refer to the
6484 value clobbered. We must not let a canonicalization do
6485 anything in that case. */
6486 for (i = 0; i < lim; i++)
6488 register rtx y = XVECEXP (x, 0, i);
6489 if (GET_CODE (y) == CLOBBER)
6491 rtx clobbered = XEXP (y, 0);
6493 if (GET_CODE (clobbered) == REG
6494 || GET_CODE (clobbered) == SUBREG)
6495 invalidate (clobbered, VOIDmode);
6496 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6497 || GET_CODE (clobbered) == ZERO_EXTRACT)
6498 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6502 for (i = 0; i < lim; i++)
6504 register rtx y = XVECEXP (x, 0, i);
6505 if (GET_CODE (y) == SET)
6507 /* As above, we ignore unconditional jumps and call-insns and
6508 ignore the result of apply_change_group. */
6509 if (GET_CODE (SET_SRC (y)) == CALL)
6511 canon_reg (SET_SRC (y), insn);
6512 apply_change_group ();
6513 fold_rtx (SET_SRC (y), insn);
6514 invalidate (SET_DEST (y), VOIDmode);
6516 else if (SET_DEST (y) == pc_rtx
6517 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6519 else
6520 sets[n_sets++].rtl = y;
6522 else if (GET_CODE (y) == CLOBBER)
6524 /* If we clobber memory, canon the address.
6525 This does nothing when a register is clobbered
6526 because we have already invalidated the reg. */
6527 if (GET_CODE (XEXP (y, 0)) == MEM)
6528 canon_reg (XEXP (y, 0), NULL_RTX);
6530 else if (GET_CODE (y) == USE
6531 && ! (GET_CODE (XEXP (y, 0)) == REG
6532 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6533 canon_reg (y, NULL_RTX);
6534 else if (GET_CODE (y) == CALL)
6536 /* The result of apply_change_group can be ignored; see
6537 canon_reg. */
6538 canon_reg (y, insn);
6539 apply_change_group ();
6540 fold_rtx (y, insn);
6544 else if (GET_CODE (x) == CLOBBER)
6546 if (GET_CODE (XEXP (x, 0)) == MEM)
6547 canon_reg (XEXP (x, 0), NULL_RTX);
6550 /* Canonicalize a USE of a pseudo register or memory location. */
6551 else if (GET_CODE (x) == USE
6552 && ! (GET_CODE (XEXP (x, 0)) == REG
6553 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6554 canon_reg (XEXP (x, 0), NULL_RTX);
6555 else if (GET_CODE (x) == CALL)
6557 /* The result of apply_change_group can be ignored; see canon_reg. */
6558 canon_reg (x, insn);
6559 apply_change_group ();
6560 fold_rtx (x, insn);
6563 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6564 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6565 is handled specially for this case, and if it isn't set, then there will
6566 be no equivalence for the destination. */
6567 if (n_sets == 1 && REG_NOTES (insn) != 0
6568 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6569 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6570 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6571 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6573 /* Canonicalize sources and addresses of destinations.
6574 We do this in a separate pass to avoid problems when a MATCH_DUP is
6575 present in the insn pattern. In that case, we want to ensure that
6576 we don't break the duplicate nature of the pattern. So we will replace
6577 both operands at the same time. Otherwise, we would fail to find an
6578 equivalent substitution in the loop calling validate_change below.
6580 We used to suppress canonicalization of DEST if it appears in SRC,
6581 but we don't do this any more. */
6583 for (i = 0; i < n_sets; i++)
6585 rtx dest = SET_DEST (sets[i].rtl);
6586 rtx src = SET_SRC (sets[i].rtl);
6587 rtx new = canon_reg (src, insn);
6588 int insn_code;
6590 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6591 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6592 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6593 || (insn_code = recog_memoized (insn)) < 0
6594 || insn_data[insn_code].n_dups > 0)
6595 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6596 else
6597 SET_SRC (sets[i].rtl) = new;
6599 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6601 validate_change (insn, &XEXP (dest, 1),
6602 canon_reg (XEXP (dest, 1), insn), 1);
6603 validate_change (insn, &XEXP (dest, 2),
6604 canon_reg (XEXP (dest, 2), insn), 1);
6607 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6608 || GET_CODE (dest) == ZERO_EXTRACT
6609 || GET_CODE (dest) == SIGN_EXTRACT)
6610 dest = XEXP (dest, 0);
6612 if (GET_CODE (dest) == MEM)
6613 canon_reg (dest, insn);
6616 /* Now that we have done all the replacements, we can apply the change
6617 group and see if they all work. Note that this will cause some
6618 canonicalizations that would have worked individually not to be applied
6619 because some other canonicalization didn't work, but this should not
6620 occur often.
6622 The result of apply_change_group can be ignored; see canon_reg. */
6624 apply_change_group ();
6626 /* Set sets[i].src_elt to the class each source belongs to.
6627 Detect assignments from or to volatile things
6628 and set set[i] to zero so they will be ignored
6629 in the rest of this function.
6631 Nothing in this loop changes the hash table or the register chains. */
6633 for (i = 0; i < n_sets; i++)
6635 register rtx src, dest;
6636 register rtx src_folded;
6637 register struct table_elt *elt = 0, *p;
6638 enum machine_mode mode;
6639 rtx src_eqv_here;
6640 rtx src_const = 0;
6641 rtx src_related = 0;
6642 struct table_elt *src_const_elt = 0;
6643 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6644 int src_related_cost = 10000, src_elt_cost = 10000;
6645 /* Set non-zero if we need to call force_const_mem on with the
6646 contents of src_folded before using it. */
6647 int src_folded_force_flag = 0;
6649 dest = SET_DEST (sets[i].rtl);
6650 src = SET_SRC (sets[i].rtl);
6652 /* If SRC is a constant that has no machine mode,
6653 hash it with the destination's machine mode.
6654 This way we can keep different modes separate. */
6656 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6657 sets[i].mode = mode;
6659 if (src_eqv)
6661 enum machine_mode eqvmode = mode;
6662 if (GET_CODE (dest) == STRICT_LOW_PART)
6663 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6664 do_not_record = 0;
6665 hash_arg_in_memory = 0;
6666 hash_arg_in_struct = 0;
6667 src_eqv = fold_rtx (src_eqv, insn);
6668 src_eqv_hash = HASH (src_eqv, eqvmode);
6670 /* Find the equivalence class for the equivalent expression. */
6672 if (!do_not_record)
6673 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6675 src_eqv_volatile = do_not_record;
6676 src_eqv_in_memory = hash_arg_in_memory;
6677 src_eqv_in_struct = hash_arg_in_struct;
6680 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6681 value of the INNER register, not the destination. So it is not
6682 a valid substitution for the source. But save it for later. */
6683 if (GET_CODE (dest) == STRICT_LOW_PART)
6684 src_eqv_here = 0;
6685 else
6686 src_eqv_here = src_eqv;
6688 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6689 simplified result, which may not necessarily be valid. */
6690 src_folded = fold_rtx (src, insn);
6692 #if 0
6693 /* ??? This caused bad code to be generated for the m68k port with -O2.
6694 Suppose src is (CONST_INT -1), and that after truncation src_folded
6695 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6696 At the end we will add src and src_const to the same equivalence
6697 class. We now have 3 and -1 on the same equivalence class. This
6698 causes later instructions to be mis-optimized. */
6699 /* If storing a constant in a bitfield, pre-truncate the constant
6700 so we will be able to record it later. */
6701 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6702 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6704 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6706 if (GET_CODE (src) == CONST_INT
6707 && GET_CODE (width) == CONST_INT
6708 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6709 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6710 src_folded
6711 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6712 << INTVAL (width)) - 1));
6714 #endif
6716 /* Compute SRC's hash code, and also notice if it
6717 should not be recorded at all. In that case,
6718 prevent any further processing of this assignment. */
6719 do_not_record = 0;
6720 hash_arg_in_memory = 0;
6721 hash_arg_in_struct = 0;
6723 sets[i].src = src;
6724 sets[i].src_hash = HASH (src, mode);
6725 sets[i].src_volatile = do_not_record;
6726 sets[i].src_in_memory = hash_arg_in_memory;
6727 sets[i].src_in_struct = hash_arg_in_struct;
6729 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6730 a pseudo that is set more than once, do not record SRC. Using
6731 SRC as a replacement for anything else will be incorrect in that
6732 situation. Note that this usually occurs only for stack slots,
6733 in which case all the RTL would be referring to SRC, so we don't
6734 lose any optimization opportunities by not having SRC in the
6735 hash table. */
6737 if (GET_CODE (src) == MEM
6738 && find_reg_note (insn, REG_EQUIV, src) != 0
6739 && GET_CODE (dest) == REG
6740 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6741 && REG_N_SETS (REGNO (dest)) != 1)
6742 sets[i].src_volatile = 1;
6744 #if 0
6745 /* It is no longer clear why we used to do this, but it doesn't
6746 appear to still be needed. So let's try without it since this
6747 code hurts cse'ing widened ops. */
6748 /* If source is a perverse subreg (such as QI treated as an SI),
6749 treat it as volatile. It may do the work of an SI in one context
6750 where the extra bits are not being used, but cannot replace an SI
6751 in general. */
6752 if (GET_CODE (src) == SUBREG
6753 && (GET_MODE_SIZE (GET_MODE (src))
6754 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6755 sets[i].src_volatile = 1;
6756 #endif
6758 /* Locate all possible equivalent forms for SRC. Try to replace
6759 SRC in the insn with each cheaper equivalent.
6761 We have the following types of equivalents: SRC itself, a folded
6762 version, a value given in a REG_EQUAL note, or a value related
6763 to a constant.
6765 Each of these equivalents may be part of an additional class
6766 of equivalents (if more than one is in the table, they must be in
6767 the same class; we check for this).
6769 If the source is volatile, we don't do any table lookups.
6771 We note any constant equivalent for possible later use in a
6772 REG_NOTE. */
6774 if (!sets[i].src_volatile)
6775 elt = lookup (src, sets[i].src_hash, mode);
6777 sets[i].src_elt = elt;
6779 if (elt && src_eqv_here && src_eqv_elt)
6781 if (elt->first_same_value != src_eqv_elt->first_same_value)
6783 /* The REG_EQUAL is indicating that two formerly distinct
6784 classes are now equivalent. So merge them. */
6785 merge_equiv_classes (elt, src_eqv_elt);
6786 src_eqv_hash = HASH (src_eqv, elt->mode);
6787 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6790 src_eqv_here = 0;
6793 else if (src_eqv_elt)
6794 elt = src_eqv_elt;
6796 /* Try to find a constant somewhere and record it in `src_const'.
6797 Record its table element, if any, in `src_const_elt'. Look in
6798 any known equivalences first. (If the constant is not in the
6799 table, also set `sets[i].src_const_hash'). */
6800 if (elt)
6801 for (p = elt->first_same_value; p; p = p->next_same_value)
6802 if (p->is_const)
6804 src_const = p->exp;
6805 src_const_elt = elt;
6806 break;
6809 if (src_const == 0
6810 && (CONSTANT_P (src_folded)
6811 /* Consider (minus (label_ref L1) (label_ref L2)) as
6812 "constant" here so we will record it. This allows us
6813 to fold switch statements when an ADDR_DIFF_VEC is used. */
6814 || (GET_CODE (src_folded) == MINUS
6815 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6816 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6817 src_const = src_folded, src_const_elt = elt;
6818 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6819 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6821 /* If we don't know if the constant is in the table, get its
6822 hash code and look it up. */
6823 if (src_const && src_const_elt == 0)
6825 sets[i].src_const_hash = HASH (src_const, mode);
6826 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6829 sets[i].src_const = src_const;
6830 sets[i].src_const_elt = src_const_elt;
6832 /* If the constant and our source are both in the table, mark them as
6833 equivalent. Otherwise, if a constant is in the table but the source
6834 isn't, set ELT to it. */
6835 if (src_const_elt && elt
6836 && src_const_elt->first_same_value != elt->first_same_value)
6837 merge_equiv_classes (elt, src_const_elt);
6838 else if (src_const_elt && elt == 0)
6839 elt = src_const_elt;
6841 /* See if there is a register linearly related to a constant
6842 equivalent of SRC. */
6843 if (src_const
6844 && (GET_CODE (src_const) == CONST
6845 || (src_const_elt && src_const_elt->related_value != 0)))
6847 src_related = use_related_value (src_const, src_const_elt);
6848 if (src_related)
6850 struct table_elt *src_related_elt
6851 = lookup (src_related, HASH (src_related, mode), mode);
6852 if (src_related_elt && elt)
6854 if (elt->first_same_value
6855 != src_related_elt->first_same_value)
6856 /* This can occur when we previously saw a CONST
6857 involving a SYMBOL_REF and then see the SYMBOL_REF
6858 twice. Merge the involved classes. */
6859 merge_equiv_classes (elt, src_related_elt);
6861 src_related = 0;
6862 src_related_elt = 0;
6864 else if (src_related_elt && elt == 0)
6865 elt = src_related_elt;
6869 /* See if we have a CONST_INT that is already in a register in a
6870 wider mode. */
6872 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6873 && GET_MODE_CLASS (mode) == MODE_INT
6874 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6876 enum machine_mode wider_mode;
6878 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6879 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6880 && src_related == 0;
6881 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6883 struct table_elt *const_elt
6884 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6886 if (const_elt == 0)
6887 continue;
6889 for (const_elt = const_elt->first_same_value;
6890 const_elt; const_elt = const_elt->next_same_value)
6891 if (GET_CODE (const_elt->exp) == REG)
6893 src_related = gen_lowpart_if_possible (mode,
6894 const_elt->exp);
6895 break;
6900 /* Another possibility is that we have an AND with a constant in
6901 a mode narrower than a word. If so, it might have been generated
6902 as part of an "if" which would narrow the AND. If we already
6903 have done the AND in a wider mode, we can use a SUBREG of that
6904 value. */
6906 if (flag_expensive_optimizations && ! src_related
6907 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6908 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6910 enum machine_mode tmode;
6911 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6913 for (tmode = GET_MODE_WIDER_MODE (mode);
6914 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6915 tmode = GET_MODE_WIDER_MODE (tmode))
6917 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6918 struct table_elt *larger_elt;
6920 if (inner)
6922 PUT_MODE (new_and, tmode);
6923 XEXP (new_and, 0) = inner;
6924 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6925 if (larger_elt == 0)
6926 continue;
6928 for (larger_elt = larger_elt->first_same_value;
6929 larger_elt; larger_elt = larger_elt->next_same_value)
6930 if (GET_CODE (larger_elt->exp) == REG)
6932 src_related
6933 = gen_lowpart_if_possible (mode, larger_elt->exp);
6934 break;
6937 if (src_related)
6938 break;
6943 #ifdef LOAD_EXTEND_OP
6944 /* See if a MEM has already been loaded with a widening operation;
6945 if it has, we can use a subreg of that. Many CISC machines
6946 also have such operations, but this is only likely to be
6947 beneficial these machines. */
6949 if (flag_expensive_optimizations && src_related == 0
6950 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6951 && GET_MODE_CLASS (mode) == MODE_INT
6952 && GET_CODE (src) == MEM && ! do_not_record
6953 && LOAD_EXTEND_OP (mode) != NIL)
6955 enum machine_mode tmode;
6957 /* Set what we are trying to extend and the operation it might
6958 have been extended with. */
6959 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6960 XEXP (memory_extend_rtx, 0) = src;
6962 for (tmode = GET_MODE_WIDER_MODE (mode);
6963 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6964 tmode = GET_MODE_WIDER_MODE (tmode))
6966 struct table_elt *larger_elt;
6968 PUT_MODE (memory_extend_rtx, tmode);
6969 larger_elt = lookup (memory_extend_rtx,
6970 HASH (memory_extend_rtx, tmode), tmode);
6971 if (larger_elt == 0)
6972 continue;
6974 for (larger_elt = larger_elt->first_same_value;
6975 larger_elt; larger_elt = larger_elt->next_same_value)
6976 if (GET_CODE (larger_elt->exp) == REG)
6978 src_related = gen_lowpart_if_possible (mode,
6979 larger_elt->exp);
6980 break;
6983 if (src_related)
6984 break;
6987 #endif /* LOAD_EXTEND_OP */
6989 if (src == src_folded)
6990 src_folded = 0;
6992 /* At this point, ELT, if non-zero, points to a class of expressions
6993 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6994 and SRC_RELATED, if non-zero, each contain additional equivalent
6995 expressions. Prune these latter expressions by deleting expressions
6996 already in the equivalence class.
6998 Check for an equivalent identical to the destination. If found,
6999 this is the preferred equivalent since it will likely lead to
7000 elimination of the insn. Indicate this by placing it in
7001 `src_related'. */
7003 if (elt) elt = elt->first_same_value;
7004 for (p = elt; p; p = p->next_same_value)
7006 enum rtx_code code = GET_CODE (p->exp);
7008 /* If the expression is not valid, ignore it. Then we do not
7009 have to check for validity below. In most cases, we can use
7010 `rtx_equal_p', since canonicalization has already been done. */
7011 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
7012 continue;
7014 /* Also skip paradoxical subregs, unless that's what we're
7015 looking for. */
7016 if (code == SUBREG
7017 && (GET_MODE_SIZE (GET_MODE (p->exp))
7018 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
7019 && ! (src != 0
7020 && GET_CODE (src) == SUBREG
7021 && GET_MODE (src) == GET_MODE (p->exp)
7022 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7023 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
7024 continue;
7026 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7027 src = 0;
7028 else if (src_folded && GET_CODE (src_folded) == code
7029 && rtx_equal_p (src_folded, p->exp))
7030 src_folded = 0;
7031 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7032 && rtx_equal_p (src_eqv_here, p->exp))
7033 src_eqv_here = 0;
7034 else if (src_related && GET_CODE (src_related) == code
7035 && rtx_equal_p (src_related, p->exp))
7036 src_related = 0;
7038 /* This is the same as the destination of the insns, we want
7039 to prefer it. Copy it to src_related. The code below will
7040 then give it a negative cost. */
7041 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7042 src_related = dest;
7046 /* Find the cheapest valid equivalent, trying all the available
7047 possibilities. Prefer items not in the hash table to ones
7048 that are when they are equal cost. Note that we can never
7049 worsen an insn as the current contents will also succeed.
7050 If we find an equivalent identical to the destination, use it as best,
7051 since this insn will probably be eliminated in that case. */
7052 if (src)
7054 if (rtx_equal_p (src, dest))
7055 src_cost = -1;
7056 else
7057 src_cost = COST (src);
7060 if (src_eqv_here)
7062 if (rtx_equal_p (src_eqv_here, dest))
7063 src_eqv_cost = -1;
7064 else
7065 src_eqv_cost = COST (src_eqv_here);
7068 if (src_folded)
7070 if (rtx_equal_p (src_folded, dest))
7071 src_folded_cost = -1;
7072 else
7073 src_folded_cost = COST (src_folded);
7076 if (src_related)
7078 if (rtx_equal_p (src_related, dest))
7079 src_related_cost = -1;
7080 else
7081 src_related_cost = COST (src_related);
7084 /* If this was an indirect jump insn, a known label will really be
7085 cheaper even though it looks more expensive. */
7086 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7087 src_folded = src_const, src_folded_cost = -1;
7089 /* Terminate loop when replacement made. This must terminate since
7090 the current contents will be tested and will always be valid. */
7091 while (1)
7093 rtx trial, old_src;
7095 /* Skip invalid entries. */
7096 while (elt && GET_CODE (elt->exp) != REG
7097 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7098 elt = elt->next_same_value;
7100 /* A paradoxical subreg would be bad here: it'll be the right
7101 size, but later may be adjusted so that the upper bits aren't
7102 what we want. So reject it. */
7103 if (elt != 0
7104 && GET_CODE (elt->exp) == SUBREG
7105 && (GET_MODE_SIZE (GET_MODE (elt->exp))
7106 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7107 /* It is okay, though, if the rtx we're trying to match
7108 will ignore any of the bits we can't predict. */
7109 && ! (src != 0
7110 && GET_CODE (src) == SUBREG
7111 && GET_MODE (src) == GET_MODE (elt->exp)
7112 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7113 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7115 elt = elt->next_same_value;
7116 continue;
7119 if (elt) src_elt_cost = elt->cost;
7121 /* Find cheapest and skip it for the next time. For items
7122 of equal cost, use this order:
7123 src_folded, src, src_eqv, src_related and hash table entry. */
7124 if (src_folded_cost <= src_cost
7125 && src_folded_cost <= src_eqv_cost
7126 && src_folded_cost <= src_related_cost
7127 && src_folded_cost <= src_elt_cost)
7129 trial = src_folded, src_folded_cost = 10000;
7130 if (src_folded_force_flag)
7131 trial = force_const_mem (mode, trial);
7133 else if (src_cost <= src_eqv_cost
7134 && src_cost <= src_related_cost
7135 && src_cost <= src_elt_cost)
7136 trial = src, src_cost = 10000;
7137 else if (src_eqv_cost <= src_related_cost
7138 && src_eqv_cost <= src_elt_cost)
7139 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7140 else if (src_related_cost <= src_elt_cost)
7141 trial = copy_rtx (src_related), src_related_cost = 10000;
7142 else
7144 trial = copy_rtx (elt->exp);
7145 elt = elt->next_same_value;
7146 src_elt_cost = 10000;
7149 /* We don't normally have an insn matching (set (pc) (pc)), so
7150 check for this separately here. We will delete such an
7151 insn below.
7153 Tablejump insns contain a USE of the table, so simply replacing
7154 the operand with the constant won't match. This is simply an
7155 unconditional branch, however, and is therefore valid. Just
7156 insert the substitution here and we will delete and re-emit
7157 the insn later. */
7159 /* Keep track of the original SET_SRC so that we can fix notes
7160 on libcall instructions. */
7161 old_src = SET_SRC (sets[i].rtl);
7163 if (n_sets == 1 && dest == pc_rtx
7164 && (trial == pc_rtx
7165 || (GET_CODE (trial) == LABEL_REF
7166 && ! condjump_p (insn))))
7168 /* If TRIAL is a label in front of a jump table, we are
7169 really falling through the switch (this is how casesi
7170 insns work), so we must branch around the table. */
7171 if (GET_CODE (trial) == CODE_LABEL
7172 && NEXT_INSN (trial) != 0
7173 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7174 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7175 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7177 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7179 SET_SRC (sets[i].rtl) = trial;
7180 cse_jumps_altered = 1;
7181 break;
7184 /* Look for a substitution that makes a valid insn. */
7185 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7187 /* If we just made a substitution inside a libcall, then we
7188 need to make the same substitution in any notes attached
7189 to the RETVAL insn. */
7190 if (libcall_insn
7191 && (GET_CODE (old_src) == REG
7192 || GET_CODE (old_src) == SUBREG
7193 || GET_CODE (old_src) == MEM))
7194 replace_rtx (REG_NOTES (libcall_insn), old_src,
7195 canon_reg (SET_SRC (sets[i].rtl), insn));
7197 /* The result of apply_change_group can be ignored; see
7198 canon_reg. */
7200 validate_change (insn, &SET_SRC (sets[i].rtl),
7201 canon_reg (SET_SRC (sets[i].rtl), insn),
7203 apply_change_group ();
7204 break;
7207 /* If we previously found constant pool entries for
7208 constants and this is a constant, try making a
7209 pool entry. Put it in src_folded unless we already have done
7210 this since that is where it likely came from. */
7212 else if (constant_pool_entries_cost
7213 && CONSTANT_P (trial)
7214 && ! (GET_CODE (trial) == CONST
7215 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7216 && (src_folded == 0
7217 || (GET_CODE (src_folded) != MEM
7218 && ! src_folded_force_flag))
7219 && GET_MODE_CLASS (mode) != MODE_CC
7220 && mode != VOIDmode)
7222 src_folded_force_flag = 1;
7223 src_folded = trial;
7224 src_folded_cost = constant_pool_entries_cost;
7228 src = SET_SRC (sets[i].rtl);
7230 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7231 However, there is an important exception: If both are registers
7232 that are not the head of their equivalence class, replace SET_SRC
7233 with the head of the class. If we do not do this, we will have
7234 both registers live over a portion of the basic block. This way,
7235 their lifetimes will likely abut instead of overlapping. */
7236 if (GET_CODE (dest) == REG
7237 && REGNO_QTY_VALID_P (REGNO (dest))
7238 && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7239 && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7240 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7241 /* Don't do this if the original insn had a hard reg as
7242 SET_SRC or SET_DEST. */
7243 && (GET_CODE (sets[i].src) != REG
7244 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
7245 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
7246 /* We can't call canon_reg here because it won't do anything if
7247 SRC is a hard register. */
7249 int first = qty_first_reg[REG_QTY (REGNO (src))];
7250 rtx new_src
7251 = (first >= FIRST_PSEUDO_REGISTER
7252 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7254 /* We must use validate-change even for this, because this
7255 might be a special no-op instruction, suitable only to
7256 tag notes onto. */
7257 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7259 src = new_src;
7260 /* If we had a constant that is cheaper than what we are now
7261 setting SRC to, use that constant. We ignored it when we
7262 thought we could make this into a no-op. */
7263 if (src_const && COST (src_const) < COST (src)
7264 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7266 src = src_const;
7270 /* If we made a change, recompute SRC values. */
7271 if (src != sets[i].src)
7273 do_not_record = 0;
7274 hash_arg_in_memory = 0;
7275 hash_arg_in_struct = 0;
7276 sets[i].src = src;
7277 sets[i].src_hash = HASH (src, mode);
7278 sets[i].src_volatile = do_not_record;
7279 sets[i].src_in_memory = hash_arg_in_memory;
7280 sets[i].src_in_struct = hash_arg_in_struct;
7281 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7284 /* If this is a single SET, we are setting a register, and we have an
7285 equivalent constant, we want to add a REG_NOTE. We don't want
7286 to write a REG_EQUAL note for a constant pseudo since verifying that
7287 that pseudo hasn't been eliminated is a pain. Such a note also
7288 won't help anything.
7290 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7291 which can be created for a reference to a compile time computable
7292 entry in a jump table. */
7294 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7295 && GET_CODE (src_const) != REG
7296 && ! (GET_CODE (src_const) == CONST
7297 && GET_CODE (XEXP (src_const, 0)) == MINUS
7298 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7299 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7301 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7303 /* Make sure that the rtx is not shared with any other insn. */
7304 src_const = copy_rtx (src_const);
7306 /* Record the actual constant value in a REG_EQUAL note, making
7307 a new one if one does not already exist. */
7308 if (tem)
7309 XEXP (tem, 0) = src_const;
7310 else
7311 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7312 src_const, REG_NOTES (insn));
7314 /* If storing a constant value in a register that
7315 previously held the constant value 0,
7316 record this fact with a REG_WAS_0 note on this insn.
7318 Note that the *register* is required to have previously held 0,
7319 not just any register in the quantity and we must point to the
7320 insn that set that register to zero.
7322 Rather than track each register individually, we just see if
7323 the last set for this quantity was for this register. */
7325 if (REGNO_QTY_VALID_P (REGNO (dest))
7326 && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7328 /* See if we previously had a REG_WAS_0 note. */
7329 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7330 rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7332 if ((tem = single_set (const_insn)) != 0
7333 && rtx_equal_p (SET_DEST (tem), dest))
7335 if (note)
7336 XEXP (note, 0) = const_insn;
7337 else
7338 REG_NOTES (insn)
7339 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
7340 REG_NOTES (insn));
7345 /* Now deal with the destination. */
7346 do_not_record = 0;
7348 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7349 to the MEM or REG within it. */
7350 while (GET_CODE (dest) == SIGN_EXTRACT
7351 || GET_CODE (dest) == ZERO_EXTRACT
7352 || GET_CODE (dest) == SUBREG
7353 || GET_CODE (dest) == STRICT_LOW_PART)
7354 dest = XEXP (dest, 0);
7356 sets[i].inner_dest = dest;
7358 if (GET_CODE (dest) == MEM)
7360 #ifdef PUSH_ROUNDING
7361 /* Stack pushes invalidate the stack pointer. */
7362 rtx addr = XEXP (dest, 0);
7363 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7364 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7365 && XEXP (addr, 0) == stack_pointer_rtx)
7366 invalidate (stack_pointer_rtx, Pmode);
7367 #endif
7368 dest = fold_rtx (dest, insn);
7371 /* Compute the hash code of the destination now,
7372 before the effects of this instruction are recorded,
7373 since the register values used in the address computation
7374 are those before this instruction. */
7375 sets[i].dest_hash = HASH (dest, mode);
7377 /* Don't enter a bit-field in the hash table
7378 because the value in it after the store
7379 may not equal what was stored, due to truncation. */
7381 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7382 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7384 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7386 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7387 && GET_CODE (width) == CONST_INT
7388 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7389 && ! (INTVAL (src_const)
7390 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7391 /* Exception: if the value is constant,
7392 and it won't be truncated, record it. */
7394 else
7396 /* This is chosen so that the destination will be invalidated
7397 but no new value will be recorded.
7398 We must invalidate because sometimes constant
7399 values can be recorded for bitfields. */
7400 sets[i].src_elt = 0;
7401 sets[i].src_volatile = 1;
7402 src_eqv = 0;
7403 src_eqv_elt = 0;
7407 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7408 the insn. */
7409 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7411 /* One less use of the label this insn used to jump to. */
7412 if (JUMP_LABEL (insn) != 0)
7413 --LABEL_NUSES (JUMP_LABEL (insn));
7414 PUT_CODE (insn, NOTE);
7415 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7416 NOTE_SOURCE_FILE (insn) = 0;
7417 cse_jumps_altered = 1;
7418 /* No more processing for this set. */
7419 sets[i].rtl = 0;
7422 /* If this SET is now setting PC to a label, we know it used to
7423 be a conditional or computed branch. So we see if we can follow
7424 it. If it was a computed branch, delete it and re-emit. */
7425 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7427 /* If this is not in the format for a simple branch and
7428 we are the only SET in it, re-emit it. */
7429 if (! simplejump_p (insn) && n_sets == 1)
7431 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7432 JUMP_LABEL (new) = XEXP (src, 0);
7433 LABEL_NUSES (XEXP (src, 0))++;
7434 insn = new;
7436 else
7437 /* Otherwise, force rerecognition, since it probably had
7438 a different pattern before.
7439 This shouldn't really be necessary, since whatever
7440 changed the source value above should have done this.
7441 Until the right place is found, might as well do this here. */
7442 INSN_CODE (insn) = -1;
7444 never_reached_warning (insn);
7446 /* Now emit a BARRIER after the unconditional jump. Do not bother
7447 deleting any unreachable code, let jump/flow do that. */
7448 if (NEXT_INSN (insn) != 0
7449 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
7450 emit_barrier_after (insn);
7452 cse_jumps_altered = 1;
7453 sets[i].rtl = 0;
7456 /* If destination is volatile, invalidate it and then do no further
7457 processing for this assignment. */
7459 else if (do_not_record)
7461 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7462 || GET_CODE (dest) == MEM)
7463 invalidate (dest, VOIDmode);
7464 else if (GET_CODE (dest) == STRICT_LOW_PART
7465 || GET_CODE (dest) == ZERO_EXTRACT)
7466 invalidate (XEXP (dest, 0), GET_MODE (dest));
7467 sets[i].rtl = 0;
7470 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7471 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7473 #ifdef HAVE_cc0
7474 /* If setting CC0, record what it was set to, or a constant, if it
7475 is equivalent to a constant. If it is being set to a floating-point
7476 value, make a COMPARE with the appropriate constant of 0. If we
7477 don't do this, later code can interpret this as a test against
7478 const0_rtx, which can cause problems if we try to put it into an
7479 insn as a floating-point operand. */
7480 if (dest == cc0_rtx)
7482 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7483 this_insn_cc0_mode = mode;
7484 if (FLOAT_MODE_P (mode))
7485 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7486 CONST0_RTX (mode));
7488 #endif
7491 /* Now enter all non-volatile source expressions in the hash table
7492 if they are not already present.
7493 Record their equivalence classes in src_elt.
7494 This way we can insert the corresponding destinations into
7495 the same classes even if the actual sources are no longer in them
7496 (having been invalidated). */
7498 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7499 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7501 register struct table_elt *elt;
7502 register struct table_elt *classp = sets[0].src_elt;
7503 rtx dest = SET_DEST (sets[0].rtl);
7504 enum machine_mode eqvmode = GET_MODE (dest);
7506 if (GET_CODE (dest) == STRICT_LOW_PART)
7508 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7509 classp = 0;
7511 if (insert_regs (src_eqv, classp, 0))
7513 rehash_using_reg (src_eqv);
7514 src_eqv_hash = HASH (src_eqv, eqvmode);
7516 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7517 elt->in_memory = src_eqv_in_memory;
7518 elt->in_struct = src_eqv_in_struct;
7519 src_eqv_elt = elt;
7521 /* Check to see if src_eqv_elt is the same as a set source which
7522 does not yet have an elt, and if so set the elt of the set source
7523 to src_eqv_elt. */
7524 for (i = 0; i < n_sets; i++)
7525 if (sets[i].rtl && sets[i].src_elt == 0
7526 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7527 sets[i].src_elt = src_eqv_elt;
7530 for (i = 0; i < n_sets; i++)
7531 if (sets[i].rtl && ! sets[i].src_volatile
7532 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7534 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7536 /* REG_EQUAL in setting a STRICT_LOW_PART
7537 gives an equivalent for the entire destination register,
7538 not just for the subreg being stored in now.
7539 This is a more interesting equivalence, so we arrange later
7540 to treat the entire reg as the destination. */
7541 sets[i].src_elt = src_eqv_elt;
7542 sets[i].src_hash = src_eqv_hash;
7544 else
7546 /* Insert source and constant equivalent into hash table, if not
7547 already present. */
7548 register struct table_elt *classp = src_eqv_elt;
7549 register rtx src = sets[i].src;
7550 register rtx dest = SET_DEST (sets[i].rtl);
7551 enum machine_mode mode
7552 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7554 /* Don't put a hard register source into the table if this is
7555 the last insn of a libcall. */
7556 if (sets[i].src_elt == 0
7557 && (GET_CODE (src) != REG
7558 || REGNO (src) >= FIRST_PSEUDO_REGISTER
7559 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7561 register struct table_elt *elt;
7563 /* Note that these insert_regs calls cannot remove
7564 any of the src_elt's, because they would have failed to
7565 match if not still valid. */
7566 if (insert_regs (src, classp, 0))
7568 rehash_using_reg (src);
7569 sets[i].src_hash = HASH (src, mode);
7571 elt = insert (src, classp, sets[i].src_hash, mode);
7572 elt->in_memory = sets[i].src_in_memory;
7573 elt->in_struct = sets[i].src_in_struct;
7574 sets[i].src_elt = classp = elt;
7577 if (sets[i].src_const && sets[i].src_const_elt == 0
7578 && src != sets[i].src_const
7579 && ! rtx_equal_p (sets[i].src_const, src))
7580 sets[i].src_elt = insert (sets[i].src_const, classp,
7581 sets[i].src_const_hash, mode);
7584 else if (sets[i].src_elt == 0)
7585 /* If we did not insert the source into the hash table (e.g., it was
7586 volatile), note the equivalence class for the REG_EQUAL value, if any,
7587 so that the destination goes into that class. */
7588 sets[i].src_elt = src_eqv_elt;
7590 invalidate_from_clobbers (x);
7592 /* Some registers are invalidated by subroutine calls. Memory is
7593 invalidated by non-constant calls. */
7595 if (GET_CODE (insn) == CALL_INSN)
7597 if (! CONST_CALL_P (insn))
7598 invalidate_memory ();
7599 invalidate_for_call ();
7602 /* Now invalidate everything set by this instruction.
7603 If a SUBREG or other funny destination is being set,
7604 sets[i].rtl is still nonzero, so here we invalidate the reg
7605 a part of which is being set. */
7607 for (i = 0; i < n_sets; i++)
7608 if (sets[i].rtl)
7610 /* We can't use the inner dest, because the mode associated with
7611 a ZERO_EXTRACT is significant. */
7612 register rtx dest = SET_DEST (sets[i].rtl);
7614 /* Needed for registers to remove the register from its
7615 previous quantity's chain.
7616 Needed for memory if this is a nonvarying address, unless
7617 we have just done an invalidate_memory that covers even those. */
7618 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7619 || GET_CODE (dest) == MEM)
7620 invalidate (dest, VOIDmode);
7621 else if (GET_CODE (dest) == STRICT_LOW_PART
7622 || GET_CODE (dest) == ZERO_EXTRACT)
7623 invalidate (XEXP (dest, 0), GET_MODE (dest));
7626 /* A volatile ASM invalidates everything. */
7627 if (GET_CODE (insn) == INSN
7628 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7629 && MEM_VOLATILE_P (PATTERN (insn)))
7630 flush_hash_table ();
7632 /* Make sure registers mentioned in destinations
7633 are safe for use in an expression to be inserted.
7634 This removes from the hash table
7635 any invalid entry that refers to one of these registers.
7637 We don't care about the return value from mention_regs because
7638 we are going to hash the SET_DEST values unconditionally. */
7640 for (i = 0; i < n_sets; i++)
7642 if (sets[i].rtl)
7644 rtx x = SET_DEST (sets[i].rtl);
7646 if (GET_CODE (x) != REG)
7647 mention_regs (x);
7648 else
7650 /* We used to rely on all references to a register becoming
7651 inaccessible when a register changes to a new quantity,
7652 since that changes the hash code. However, that is not
7653 safe, since after NBUCKETS new quantities we get a
7654 hash 'collision' of a register with its own invalid
7655 entries. And since SUBREGs have been changed not to
7656 change their hash code with the hash code of the register,
7657 it wouldn't work any longer at all. So we have to check
7658 for any invalid references lying around now.
7659 This code is similar to the REG case in mention_regs,
7660 but it knows that reg_tick has been incremented, and
7661 it leaves reg_in_table as -1 . */
7662 register int regno = REGNO (x);
7663 register int endregno
7664 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7665 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7666 int i;
7668 for (i = regno; i < endregno; i++)
7670 if (REG_IN_TABLE (i) >= 0)
7672 remove_invalid_refs (i);
7673 REG_IN_TABLE (i) = -1;
7680 /* We may have just removed some of the src_elt's from the hash table.
7681 So replace each one with the current head of the same class. */
7683 for (i = 0; i < n_sets; i++)
7684 if (sets[i].rtl)
7686 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7687 /* If elt was removed, find current head of same class,
7688 or 0 if nothing remains of that class. */
7690 register struct table_elt *elt = sets[i].src_elt;
7692 while (elt && elt->prev_same_value)
7693 elt = elt->prev_same_value;
7695 while (elt && elt->first_same_value == 0)
7696 elt = elt->next_same_value;
7697 sets[i].src_elt = elt ? elt->first_same_value : 0;
7701 /* Now insert the destinations into their equivalence classes. */
7703 for (i = 0; i < n_sets; i++)
7704 if (sets[i].rtl)
7706 register rtx dest = SET_DEST (sets[i].rtl);
7707 rtx inner_dest = sets[i].inner_dest;
7708 register struct table_elt *elt;
7710 /* Don't record value if we are not supposed to risk allocating
7711 floating-point values in registers that might be wider than
7712 memory. */
7713 if ((flag_float_store
7714 && GET_CODE (dest) == MEM
7715 && FLOAT_MODE_P (GET_MODE (dest)))
7716 /* Don't record BLKmode values, because we don't know the
7717 size of it, and can't be sure that other BLKmode values
7718 have the same or smaller size. */
7719 || GET_MODE (dest) == BLKmode
7720 /* Don't record values of destinations set inside a libcall block
7721 since we might delete the libcall. Things should have been set
7722 up so we won't want to reuse such a value, but we play it safe
7723 here. */
7724 || libcall_insn
7725 /* If we didn't put a REG_EQUAL value or a source into the hash
7726 table, there is no point is recording DEST. */
7727 || sets[i].src_elt == 0
7728 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7729 or SIGN_EXTEND, don't record DEST since it can cause
7730 some tracking to be wrong.
7732 ??? Think about this more later. */
7733 || (GET_CODE (dest) == SUBREG
7734 && (GET_MODE_SIZE (GET_MODE (dest))
7735 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7736 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7737 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7738 continue;
7740 /* STRICT_LOW_PART isn't part of the value BEING set,
7741 and neither is the SUBREG inside it.
7742 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7743 if (GET_CODE (dest) == STRICT_LOW_PART)
7744 dest = SUBREG_REG (XEXP (dest, 0));
7746 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7747 /* Registers must also be inserted into chains for quantities. */
7748 if (insert_regs (dest, sets[i].src_elt, 1))
7750 /* If `insert_regs' changes something, the hash code must be
7751 recalculated. */
7752 rehash_using_reg (dest);
7753 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7756 if (GET_CODE (inner_dest) == MEM
7757 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7758 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7759 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7760 Consider the case in which the address of the MEM is
7761 passed to a function, which alters the MEM. Then, if we
7762 later use Y instead of the MEM we'll miss the update. */
7763 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7764 else
7765 elt = insert (dest, sets[i].src_elt,
7766 sets[i].dest_hash, GET_MODE (dest));
7768 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7769 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7770 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7771 0))));
7773 if (elt->in_memory)
7775 /* This implicitly assumes a whole struct
7776 need not have MEM_IN_STRUCT_P.
7777 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7778 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7779 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7782 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7783 narrower than M2, and both M1 and M2 are the same number of words,
7784 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7785 make that equivalence as well.
7787 However, BAR may have equivalences for which gen_lowpart_if_possible
7788 will produce a simpler value than gen_lowpart_if_possible applied to
7789 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7790 BAR's equivalences. If we don't get a simplified form, make
7791 the SUBREG. It will not be used in an equivalence, but will
7792 cause two similar assignments to be detected.
7794 Note the loop below will find SUBREG_REG (DEST) since we have
7795 already entered SRC and DEST of the SET in the table. */
7797 if (GET_CODE (dest) == SUBREG
7798 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7799 / UNITS_PER_WORD)
7800 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7801 && (GET_MODE_SIZE (GET_MODE (dest))
7802 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7803 && sets[i].src_elt != 0)
7805 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7806 struct table_elt *elt, *classp = 0;
7808 for (elt = sets[i].src_elt->first_same_value; elt;
7809 elt = elt->next_same_value)
7811 rtx new_src = 0;
7812 unsigned src_hash;
7813 struct table_elt *src_elt;
7815 /* Ignore invalid entries. */
7816 if (GET_CODE (elt->exp) != REG
7817 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7818 continue;
7820 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7821 if (new_src == 0)
7822 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7824 src_hash = HASH (new_src, new_mode);
7825 src_elt = lookup (new_src, src_hash, new_mode);
7827 /* Put the new source in the hash table is if isn't
7828 already. */
7829 if (src_elt == 0)
7831 if (insert_regs (new_src, classp, 0))
7833 rehash_using_reg (new_src);
7834 src_hash = HASH (new_src, new_mode);
7836 src_elt = insert (new_src, classp, src_hash, new_mode);
7837 src_elt->in_memory = elt->in_memory;
7838 src_elt->in_struct = elt->in_struct;
7840 else if (classp && classp != src_elt->first_same_value)
7841 /* Show that two things that we've seen before are
7842 actually the same. */
7843 merge_equiv_classes (src_elt, classp);
7845 classp = src_elt->first_same_value;
7846 /* Ignore invalid entries. */
7847 while (classp
7848 && GET_CODE (classp->exp) != REG
7849 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7850 classp = classp->next_same_value;
7855 /* Special handling for (set REG0 REG1)
7856 where REG0 is the "cheapest", cheaper than REG1.
7857 After cse, REG1 will probably not be used in the sequel,
7858 so (if easily done) change this insn to (set REG1 REG0) and
7859 replace REG1 with REG0 in the previous insn that computed their value.
7860 Then REG1 will become a dead store and won't cloud the situation
7861 for later optimizations.
7863 Do not make this change if REG1 is a hard register, because it will
7864 then be used in the sequel and we may be changing a two-operand insn
7865 into a three-operand insn.
7867 Also do not do this if we are operating on a copy of INSN.
7869 Also don't do this if INSN ends a libcall; this would cause an unrelated
7870 register to be set in the middle of a libcall, and we then get bad code
7871 if the libcall is deleted. */
7873 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7874 && NEXT_INSN (PREV_INSN (insn)) == insn
7875 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7876 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7877 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7878 && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
7879 == REGNO (SET_DEST (sets[0].rtl)))
7880 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7882 rtx prev = PREV_INSN (insn);
7883 while (prev && GET_CODE (prev) == NOTE)
7884 prev = PREV_INSN (prev);
7886 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7887 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7889 rtx dest = SET_DEST (sets[0].rtl);
7890 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7892 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7893 validate_change (insn, & SET_DEST (sets[0].rtl),
7894 SET_SRC (sets[0].rtl), 1);
7895 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7896 apply_change_group ();
7898 /* If REG1 was equivalent to a constant, REG0 is not. */
7899 if (note)
7900 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7902 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7903 any REG_WAS_0 note on INSN to PREV. */
7904 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7905 if (note)
7906 remove_note (prev, note);
7908 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7909 if (note)
7911 remove_note (insn, note);
7912 XEXP (note, 1) = REG_NOTES (prev);
7913 REG_NOTES (prev) = note;
7916 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7917 then we must delete it, because the value in REG0 has changed. */
7918 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7919 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7920 remove_note (insn, note);
7924 /* If this is a conditional jump insn, record any known equivalences due to
7925 the condition being tested. */
7927 last_jump_equiv_class = 0;
7928 if (GET_CODE (insn) == JUMP_INSN
7929 && n_sets == 1 && GET_CODE (x) == SET
7930 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7931 record_jump_equiv (insn, 0);
7933 #ifdef HAVE_cc0
7934 /* If the previous insn set CC0 and this insn no longer references CC0,
7935 delete the previous insn. Here we use the fact that nothing expects CC0
7936 to be valid over an insn, which is true until the final pass. */
7937 if (prev_insn && GET_CODE (prev_insn) == INSN
7938 && (tem = single_set (prev_insn)) != 0
7939 && SET_DEST (tem) == cc0_rtx
7940 && ! reg_mentioned_p (cc0_rtx, x))
7942 PUT_CODE (prev_insn, NOTE);
7943 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7944 NOTE_SOURCE_FILE (prev_insn) = 0;
7947 prev_insn_cc0 = this_insn_cc0;
7948 prev_insn_cc0_mode = this_insn_cc0_mode;
7949 #endif
7951 prev_insn = insn;
7954 /* Remove from the hash table all expressions that reference memory. */
7955 static void
7956 invalidate_memory ()
7958 register int i;
7959 register struct table_elt *p, *next;
7961 for (i = 0; i < NBUCKETS; i++)
7962 for (p = table[i]; p; p = next)
7964 next = p->next_same_hash;
7965 if (p->in_memory)
7966 remove_from_table (p, i);
7970 /* XXX ??? The name of this function bears little resemblance to
7971 what this function actually does. FIXME. */
7972 static int
7973 note_mem_written (addr)
7974 register rtx addr;
7976 /* Pushing or popping the stack invalidates just the stack pointer. */
7977 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7978 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7979 && GET_CODE (XEXP (addr, 0)) == REG
7980 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7982 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
7983 REG_TICK (STACK_POINTER_REGNUM)++;
7985 /* This should be *very* rare. */
7986 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7987 invalidate (stack_pointer_rtx, VOIDmode);
7988 return 1;
7990 return 0;
7993 /* Perform invalidation on the basis of everything about an insn
7994 except for invalidating the actual places that are SET in it.
7995 This includes the places CLOBBERed, and anything that might
7996 alias with something that is SET or CLOBBERed.
7998 X is the pattern of the insn. */
8000 static void
8001 invalidate_from_clobbers (x)
8002 rtx x;
8004 if (GET_CODE (x) == CLOBBER)
8006 rtx ref = XEXP (x, 0);
8007 if (ref)
8009 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8010 || GET_CODE (ref) == MEM)
8011 invalidate (ref, VOIDmode);
8012 else if (GET_CODE (ref) == STRICT_LOW_PART
8013 || GET_CODE (ref) == ZERO_EXTRACT)
8014 invalidate (XEXP (ref, 0), GET_MODE (ref));
8017 else if (GET_CODE (x) == PARALLEL)
8019 register int i;
8020 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8022 register rtx y = XVECEXP (x, 0, i);
8023 if (GET_CODE (y) == CLOBBER)
8025 rtx ref = XEXP (y, 0);
8026 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8027 || GET_CODE (ref) == MEM)
8028 invalidate (ref, VOIDmode);
8029 else if (GET_CODE (ref) == STRICT_LOW_PART
8030 || GET_CODE (ref) == ZERO_EXTRACT)
8031 invalidate (XEXP (ref, 0), GET_MODE (ref));
8037 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
8038 and replace any registers in them with either an equivalent constant
8039 or the canonical form of the register. If we are inside an address,
8040 only do this if the address remains valid.
8042 OBJECT is 0 except when within a MEM in which case it is the MEM.
8044 Return the replacement for X. */
8046 static rtx
8047 cse_process_notes (x, object)
8048 rtx x;
8049 rtx object;
8051 enum rtx_code code = GET_CODE (x);
8052 const char *fmt = GET_RTX_FORMAT (code);
8053 int i;
8055 switch (code)
8057 case CONST_INT:
8058 case CONST:
8059 case SYMBOL_REF:
8060 case LABEL_REF:
8061 case CONST_DOUBLE:
8062 case PC:
8063 case CC0:
8064 case LO_SUM:
8065 return x;
8067 case MEM:
8068 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8069 return x;
8071 case EXPR_LIST:
8072 case INSN_LIST:
8073 if (REG_NOTE_KIND (x) == REG_EQUAL)
8074 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
8075 if (XEXP (x, 1))
8076 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
8077 return x;
8079 case SIGN_EXTEND:
8080 case ZERO_EXTEND:
8081 case SUBREG:
8083 rtx new = cse_process_notes (XEXP (x, 0), object);
8084 /* We don't substitute VOIDmode constants into these rtx,
8085 since they would impede folding. */
8086 if (GET_MODE (new) != VOIDmode)
8087 validate_change (object, &XEXP (x, 0), new, 0);
8088 return x;
8091 case REG:
8092 i = REG_QTY (REGNO (x));
8094 /* Return a constant or a constant register. */
8095 if (REGNO_QTY_VALID_P (REGNO (x))
8096 && qty_const[i] != 0
8097 && (CONSTANT_P (qty_const[i])
8098 || GET_CODE (qty_const[i]) == REG))
8100 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8101 if (new)
8102 return new;
8105 /* Otherwise, canonicalize this register. */
8106 return canon_reg (x, NULL_RTX);
8108 default:
8109 break;
8112 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8113 if (fmt[i] == 'e')
8114 validate_change (object, &XEXP (x, i),
8115 cse_process_notes (XEXP (x, i), object), 0);
8117 return x;
8120 /* Find common subexpressions between the end test of a loop and the beginning
8121 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8123 Often we have a loop where an expression in the exit test is used
8124 in the body of the loop. For example "while (*p) *q++ = *p++;".
8125 Because of the way we duplicate the loop exit test in front of the loop,
8126 however, we don't detect that common subexpression. This will be caught
8127 when global cse is implemented, but this is a quite common case.
8129 This function handles the most common cases of these common expressions.
8130 It is called after we have processed the basic block ending with the
8131 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8132 jumps to a label used only once. */
8134 static void
8135 cse_around_loop (loop_start)
8136 rtx loop_start;
8138 rtx insn;
8139 int i;
8140 struct table_elt *p;
8142 /* If the jump at the end of the loop doesn't go to the start, we don't
8143 do anything. */
8144 for (insn = PREV_INSN (loop_start);
8145 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8146 insn = PREV_INSN (insn))
8149 if (insn == 0
8150 || GET_CODE (insn) != NOTE
8151 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8152 return;
8154 /* If the last insn of the loop (the end test) was an NE comparison,
8155 we will interpret it as an EQ comparison, since we fell through
8156 the loop. Any equivalences resulting from that comparison are
8157 therefore not valid and must be invalidated. */
8158 if (last_jump_equiv_class)
8159 for (p = last_jump_equiv_class->first_same_value; p;
8160 p = p->next_same_value)
8162 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8163 || (GET_CODE (p->exp) == SUBREG
8164 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8165 invalidate (p->exp, VOIDmode);
8166 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8167 || GET_CODE (p->exp) == ZERO_EXTRACT)
8168 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8171 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8172 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8174 The only thing we do with SET_DEST is invalidate entries, so we
8175 can safely process each SET in order. It is slightly less efficient
8176 to do so, but we only want to handle the most common cases.
8178 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8179 These pseudos won't have valid entries in any of the tables indexed
8180 by register number, such as reg_qty. We avoid out-of-range array
8181 accesses by not processing any instructions created after cse started. */
8183 for (insn = NEXT_INSN (loop_start);
8184 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8185 && INSN_UID (insn) < max_insn_uid
8186 && ! (GET_CODE (insn) == NOTE
8187 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8188 insn = NEXT_INSN (insn))
8190 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8191 && (GET_CODE (PATTERN (insn)) == SET
8192 || GET_CODE (PATTERN (insn)) == CLOBBER))
8193 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8194 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8195 && GET_CODE (PATTERN (insn)) == PARALLEL)
8196 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8197 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8198 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8199 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8200 loop_start);
8204 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
8205 since they are done elsewhere. This function is called via note_stores. */
8207 static void
8208 invalidate_skipped_set (dest, set, data)
8209 rtx set;
8210 rtx dest;
8211 void *data ATTRIBUTE_UNUSED;
8213 enum rtx_code code = GET_CODE (dest);
8215 if (code == MEM
8216 && ! note_mem_written (dest) /* If this is not a stack push ... */
8217 /* There are times when an address can appear varying and be a PLUS
8218 during this scan when it would be a fixed address were we to know
8219 the proper equivalences. So invalidate all memory if there is
8220 a BLKmode or nonscalar memory reference or a reference to a
8221 variable address. */
8222 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8223 || cse_rtx_varies_p (XEXP (dest, 0))))
8225 invalidate_memory ();
8226 return;
8229 if (GET_CODE (set) == CLOBBER
8230 #ifdef HAVE_cc0
8231 || dest == cc0_rtx
8232 #endif
8233 || dest == pc_rtx)
8234 return;
8236 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8237 invalidate (XEXP (dest, 0), GET_MODE (dest));
8238 else if (code == REG || code == SUBREG || code == MEM)
8239 invalidate (dest, VOIDmode);
8242 /* Invalidate all insns from START up to the end of the function or the
8243 next label. This called when we wish to CSE around a block that is
8244 conditionally executed. */
8246 static void
8247 invalidate_skipped_block (start)
8248 rtx start;
8250 rtx insn;
8252 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8253 insn = NEXT_INSN (insn))
8255 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8256 continue;
8258 if (GET_CODE (insn) == CALL_INSN)
8260 if (! CONST_CALL_P (insn))
8261 invalidate_memory ();
8262 invalidate_for_call ();
8265 invalidate_from_clobbers (PATTERN (insn));
8266 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
8270 /* If modifying X will modify the value in *DATA (which is really an
8271 `rtx *'), indicate that fact by setting the pointed to value to
8272 NULL_RTX. */
8274 static void
8275 cse_check_loop_start (x, set, data)
8276 rtx x;
8277 rtx set ATTRIBUTE_UNUSED;
8278 void *data;
8280 rtx *cse_check_loop_start_value = (rtx *) data;
8282 if (*cse_check_loop_start_value == NULL_RTX
8283 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8284 return;
8286 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
8287 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
8288 *cse_check_loop_start_value = NULL_RTX;
8291 /* X is a SET or CLOBBER contained in INSN that was found near the start of
8292 a loop that starts with the label at LOOP_START.
8294 If X is a SET, we see if its SET_SRC is currently in our hash table.
8295 If so, we see if it has a value equal to some register used only in the
8296 loop exit code (as marked by jump.c).
8298 If those two conditions are true, we search backwards from the start of
8299 the loop to see if that same value was loaded into a register that still
8300 retains its value at the start of the loop.
8302 If so, we insert an insn after the load to copy the destination of that
8303 load into the equivalent register and (try to) replace our SET_SRC with that
8304 register.
8306 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8308 static void
8309 cse_set_around_loop (x, insn, loop_start)
8310 rtx x;
8311 rtx insn;
8312 rtx loop_start;
8314 struct table_elt *src_elt;
8316 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8317 are setting PC or CC0 or whose SET_SRC is already a register. */
8318 if (GET_CODE (x) == SET
8319 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8320 && GET_CODE (SET_SRC (x)) != REG)
8322 src_elt = lookup (SET_SRC (x),
8323 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8324 GET_MODE (SET_DEST (x)));
8326 if (src_elt)
8327 for (src_elt = src_elt->first_same_value; src_elt;
8328 src_elt = src_elt->next_same_value)
8329 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8330 && COST (src_elt->exp) < COST (SET_SRC (x)))
8332 rtx p, set;
8334 /* Look for an insn in front of LOOP_START that sets
8335 something in the desired mode to SET_SRC (x) before we hit
8336 a label or CALL_INSN. */
8338 for (p = prev_nonnote_insn (loop_start);
8339 p && GET_CODE (p) != CALL_INSN
8340 && GET_CODE (p) != CODE_LABEL;
8341 p = prev_nonnote_insn (p))
8342 if ((set = single_set (p)) != 0
8343 && GET_CODE (SET_DEST (set)) == REG
8344 && GET_MODE (SET_DEST (set)) == src_elt->mode
8345 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8347 /* We now have to ensure that nothing between P
8348 and LOOP_START modified anything referenced in
8349 SET_SRC (x). We know that nothing within the loop
8350 can modify it, or we would have invalidated it in
8351 the hash table. */
8352 rtx q;
8353 rtx cse_check_loop_start_value = SET_SRC (x);
8354 for (q = p; q != loop_start; q = NEXT_INSN (q))
8355 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8356 note_stores (PATTERN (q),
8357 cse_check_loop_start,
8358 &cse_check_loop_start_value);
8360 /* If nothing was changed and we can replace our
8361 SET_SRC, add an insn after P to copy its destination
8362 to what we will be replacing SET_SRC with. */
8363 if (cse_check_loop_start_value
8364 && validate_change (insn, &SET_SRC (x),
8365 src_elt->exp, 0))
8367 /* If this creates new pseudos, this is unsafe,
8368 because the regno of new pseudo is unsuitable
8369 to index into reg_qty when cse_insn processes
8370 the new insn. Therefore, if a new pseudo was
8371 created, discard this optimization. */
8372 int nregs = max_reg_num ();
8373 rtx move
8374 = gen_move_insn (src_elt->exp, SET_DEST (set));
8375 if (nregs != max_reg_num ())
8377 if (! validate_change (insn, &SET_SRC (x),
8378 SET_SRC (set), 0))
8379 abort ();
8381 else
8382 emit_insn_after (move, p);
8384 break;
8389 /* Now invalidate anything modified by X. */
8390 note_mem_written (SET_DEST (x));
8392 /* See comment on similar code in cse_insn for explanation of these tests. */
8393 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8394 || GET_CODE (SET_DEST (x)) == MEM)
8395 invalidate (SET_DEST (x), VOIDmode);
8396 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8397 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8398 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8401 /* Find the end of INSN's basic block and return its range,
8402 the total number of SETs in all the insns of the block, the last insn of the
8403 block, and the branch path.
8405 The branch path indicates which branches should be followed. If a non-zero
8406 path size is specified, the block should be rescanned and a different set
8407 of branches will be taken. The branch path is only used if
8408 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8410 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8411 used to describe the block. It is filled in with the information about
8412 the current block. The incoming structure's branch path, if any, is used
8413 to construct the output branch path. */
8415 void
8416 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8417 rtx insn;
8418 struct cse_basic_block_data *data;
8419 int follow_jumps;
8420 int after_loop;
8421 int skip_blocks;
8423 rtx p = insn, q;
8424 int nsets = 0;
8425 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8426 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8427 int path_size = data->path_size;
8428 int path_entry = 0;
8429 int i;
8431 /* Update the previous branch path, if any. If the last branch was
8432 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8433 shorten the path by one and look at the previous branch. We know that
8434 at least one branch must have been taken if PATH_SIZE is non-zero. */
8435 while (path_size > 0)
8437 if (data->path[path_size - 1].status != NOT_TAKEN)
8439 data->path[path_size - 1].status = NOT_TAKEN;
8440 break;
8442 else
8443 path_size--;
8446 /* If the first instruction is marked with QImode, that means we've
8447 already processed this block. Our caller will look at DATA->LAST
8448 to figure out where to go next. We want to return the next block
8449 in the instruction stream, not some branched-to block somewhere
8450 else. We accomplish this by pretending our called forbid us to
8451 follow jumps, or skip blocks. */
8452 if (GET_MODE (insn) == QImode)
8453 follow_jumps = skip_blocks = 0;
8455 /* Scan to end of this basic block. */
8456 while (p && GET_CODE (p) != CODE_LABEL)
8458 /* Don't cse out the end of a loop. This makes a difference
8459 only for the unusual loops that always execute at least once;
8460 all other loops have labels there so we will stop in any case.
8461 Cse'ing out the end of the loop is dangerous because it
8462 might cause an invariant expression inside the loop
8463 to be reused after the end of the loop. This would make it
8464 hard to move the expression out of the loop in loop.c,
8465 especially if it is one of several equivalent expressions
8466 and loop.c would like to eliminate it.
8468 If we are running after loop.c has finished, we can ignore
8469 the NOTE_INSN_LOOP_END. */
8471 if (! after_loop && GET_CODE (p) == NOTE
8472 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8473 break;
8475 /* Don't cse over a call to setjmp; on some machines (eg vax)
8476 the regs restored by the longjmp come from
8477 a later time than the setjmp. */
8478 if (GET_CODE (p) == NOTE
8479 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8480 break;
8482 /* A PARALLEL can have lots of SETs in it,
8483 especially if it is really an ASM_OPERANDS. */
8484 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8485 && GET_CODE (PATTERN (p)) == PARALLEL)
8486 nsets += XVECLEN (PATTERN (p), 0);
8487 else if (GET_CODE (p) != NOTE)
8488 nsets += 1;
8490 /* Ignore insns made by CSE; they cannot affect the boundaries of
8491 the basic block. */
8493 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8494 high_cuid = INSN_CUID (p);
8495 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8496 low_cuid = INSN_CUID (p);
8498 /* See if this insn is in our branch path. If it is and we are to
8499 take it, do so. */
8500 if (path_entry < path_size && data->path[path_entry].branch == p)
8502 if (data->path[path_entry].status != NOT_TAKEN)
8503 p = JUMP_LABEL (p);
8505 /* Point to next entry in path, if any. */
8506 path_entry++;
8509 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8510 was specified, we haven't reached our maximum path length, there are
8511 insns following the target of the jump, this is the only use of the
8512 jump label, and the target label is preceded by a BARRIER.
8514 Alternatively, we can follow the jump if it branches around a
8515 block of code and there are no other branches into the block.
8516 In this case invalidate_skipped_block will be called to invalidate any
8517 registers set in the block when following the jump. */
8519 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8520 && GET_CODE (p) == JUMP_INSN
8521 && GET_CODE (PATTERN (p)) == SET
8522 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8523 && JUMP_LABEL (p) != 0
8524 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8525 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8527 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8528 if ((GET_CODE (q) != NOTE
8529 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8530 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8531 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8532 break;
8534 /* If we ran into a BARRIER, this code is an extension of the
8535 basic block when the branch is taken. */
8536 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8538 /* Don't allow ourself to keep walking around an
8539 always-executed loop. */
8540 if (next_real_insn (q) == next)
8542 p = NEXT_INSN (p);
8543 continue;
8546 /* Similarly, don't put a branch in our path more than once. */
8547 for (i = 0; i < path_entry; i++)
8548 if (data->path[i].branch == p)
8549 break;
8551 if (i != path_entry)
8552 break;
8554 data->path[path_entry].branch = p;
8555 data->path[path_entry++].status = TAKEN;
8557 /* This branch now ends our path. It was possible that we
8558 didn't see this branch the last time around (when the
8559 insn in front of the target was a JUMP_INSN that was
8560 turned into a no-op). */
8561 path_size = path_entry;
8563 p = JUMP_LABEL (p);
8564 /* Mark block so we won't scan it again later. */
8565 PUT_MODE (NEXT_INSN (p), QImode);
8567 /* Detect a branch around a block of code. */
8568 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8570 register rtx tmp;
8572 if (next_real_insn (q) == next)
8574 p = NEXT_INSN (p);
8575 continue;
8578 for (i = 0; i < path_entry; i++)
8579 if (data->path[i].branch == p)
8580 break;
8582 if (i != path_entry)
8583 break;
8585 /* This is no_labels_between_p (p, q) with an added check for
8586 reaching the end of a function (in case Q precedes P). */
8587 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8588 if (GET_CODE (tmp) == CODE_LABEL)
8589 break;
8591 if (tmp == q)
8593 data->path[path_entry].branch = p;
8594 data->path[path_entry++].status = AROUND;
8596 path_size = path_entry;
8598 p = JUMP_LABEL (p);
8599 /* Mark block so we won't scan it again later. */
8600 PUT_MODE (NEXT_INSN (p), QImode);
8604 p = NEXT_INSN (p);
8607 data->low_cuid = low_cuid;
8608 data->high_cuid = high_cuid;
8609 data->nsets = nsets;
8610 data->last = p;
8612 /* If all jumps in the path are not taken, set our path length to zero
8613 so a rescan won't be done. */
8614 for (i = path_size - 1; i >= 0; i--)
8615 if (data->path[i].status != NOT_TAKEN)
8616 break;
8618 if (i == -1)
8619 data->path_size = 0;
8620 else
8621 data->path_size = path_size;
8623 /* End the current branch path. */
8624 data->path[path_size].branch = 0;
8627 /* Perform cse on the instructions of a function.
8628 F is the first instruction.
8629 NREGS is one plus the highest pseudo-reg number used in the instruction.
8631 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8632 (only if -frerun-cse-after-loop).
8634 Returns 1 if jump_optimize should be redone due to simplifications
8635 in conditional jump instructions. */
8638 cse_main (f, nregs, after_loop, file)
8639 rtx f;
8640 int nregs;
8641 int after_loop;
8642 FILE *file;
8644 struct cse_basic_block_data val;
8645 register rtx insn = f;
8646 register int i;
8648 cse_jumps_altered = 0;
8649 recorded_label_ref = 0;
8650 constant_pool_entries_cost = 0;
8651 val.path_size = 0;
8653 init_recog ();
8654 init_alias_analysis ();
8656 max_reg = nregs;
8658 max_insn_uid = get_max_uid ();
8660 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8661 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8663 #ifdef LOAD_EXTEND_OP
8665 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8666 and change the code and mode as appropriate. */
8667 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8668 #endif
8670 /* Discard all the free elements of the previous function
8671 since they are allocated in the temporarily obstack. */
8672 bzero ((char *) table, sizeof table);
8673 free_element_chain = 0;
8674 n_elements_made = 0;
8676 /* Find the largest uid. */
8678 max_uid = get_max_uid ();
8679 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8680 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8682 /* Compute the mapping from uids to cuids.
8683 CUIDs are numbers assigned to insns, like uids,
8684 except that cuids increase monotonically through the code.
8685 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8686 between two insns is not affected by -g. */
8688 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8690 if (GET_CODE (insn) != NOTE
8691 || NOTE_LINE_NUMBER (insn) < 0)
8692 INSN_CUID (insn) = ++i;
8693 else
8694 /* Give a line number note the same cuid as preceding insn. */
8695 INSN_CUID (insn) = i;
8698 /* Initialize which registers are clobbered by calls. */
8700 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8702 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8703 if ((call_used_regs[i]
8704 /* Used to check !fixed_regs[i] here, but that isn't safe;
8705 fixed regs are still call-clobbered, and sched can get
8706 confused if they can "live across calls".
8708 The frame pointer is always preserved across calls. The arg
8709 pointer is if it is fixed. The stack pointer usually is, unless
8710 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8711 will be present. If we are generating PIC code, the PIC offset
8712 table register is preserved across calls. */
8714 && i != STACK_POINTER_REGNUM
8715 && i != FRAME_POINTER_REGNUM
8716 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8717 && i != HARD_FRAME_POINTER_REGNUM
8718 #endif
8719 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8720 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8721 #endif
8722 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8723 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8724 #endif
8726 || global_regs[i])
8727 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8729 if (ggc_p)
8730 ggc_push_context ();
8732 /* Loop over basic blocks.
8733 Compute the maximum number of qty's needed for each basic block
8734 (which is 2 for each SET). */
8735 insn = f;
8736 while (insn)
8738 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8739 flag_cse_skip_blocks);
8741 /* If this basic block was already processed or has no sets, skip it. */
8742 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8744 PUT_MODE (insn, VOIDmode);
8745 insn = (val.last ? NEXT_INSN (val.last) : 0);
8746 val.path_size = 0;
8747 continue;
8750 cse_basic_block_start = val.low_cuid;
8751 cse_basic_block_end = val.high_cuid;
8752 max_qty = val.nsets * 2;
8754 if (file)
8755 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
8756 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8757 val.nsets);
8759 /* Make MAX_QTY bigger to give us room to optimize
8760 past the end of this basic block, if that should prove useful. */
8761 if (max_qty < 500)
8762 max_qty = 500;
8764 max_qty += max_reg;
8766 /* If this basic block is being extended by following certain jumps,
8767 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8768 Otherwise, we start after this basic block. */
8769 if (val.path_size > 0)
8770 cse_basic_block (insn, val.last, val.path, 0);
8771 else
8773 int old_cse_jumps_altered = cse_jumps_altered;
8774 rtx temp;
8776 /* When cse changes a conditional jump to an unconditional
8777 jump, we want to reprocess the block, since it will give
8778 us a new branch path to investigate. */
8779 cse_jumps_altered = 0;
8780 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8781 if (cse_jumps_altered == 0
8782 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8783 insn = temp;
8785 cse_jumps_altered |= old_cse_jumps_altered;
8788 if (ggc_p)
8789 ggc_collect ();
8791 #ifdef USE_C_ALLOCA
8792 alloca (0);
8793 #endif
8796 if (ggc_p)
8797 ggc_pop_context ();
8799 /* Tell refers_to_mem_p that qty_const info is not available. */
8800 qty_const = 0;
8802 if (max_elements_made < n_elements_made)
8803 max_elements_made = n_elements_made;
8805 /* Clean up. */
8806 end_alias_analysis ();
8808 return cse_jumps_altered || recorded_label_ref;
8811 /* Process a single basic block. FROM and TO and the limits of the basic
8812 block. NEXT_BRANCH points to the branch path when following jumps or
8813 a null path when not following jumps.
8815 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8816 loop. This is true when we are being called for the last time on a
8817 block and this CSE pass is before loop.c. */
8819 static rtx
8820 cse_basic_block (from, to, next_branch, around_loop)
8821 register rtx from, to;
8822 struct branch_path *next_branch;
8823 int around_loop;
8825 register rtx insn;
8826 int to_usage = 0;
8827 rtx libcall_insn = NULL_RTX;
8828 int num_insns = 0;
8830 /* Each of these arrays is undefined before max_reg, so only allocate
8831 the space actually needed and adjust the start below. */
8833 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8834 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8835 qty_mode = (enum machine_mode *) alloca ((max_qty - max_reg)
8836 * sizeof (enum machine_mode));
8837 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8838 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8839 qty_comparison_code
8840 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8841 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8842 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8844 qty_first_reg -= max_reg;
8845 qty_last_reg -= max_reg;
8846 qty_mode -= max_reg;
8847 qty_const -= max_reg;
8848 qty_const_insn -= max_reg;
8849 qty_comparison_code -= max_reg;
8850 qty_comparison_qty -= max_reg;
8851 qty_comparison_const -= max_reg;
8853 new_basic_block ();
8855 /* TO might be a label. If so, protect it from being deleted. */
8856 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8857 ++LABEL_NUSES (to);
8859 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8861 register enum rtx_code code = GET_CODE (insn);
8863 /* If we have processed 1,000 insns, flush the hash table to
8864 avoid extreme quadratic behavior. We must not include NOTEs
8865 in the count since there may be more or them when generating
8866 debugging information. If we clear the table at different
8867 times, code generated with -g -O might be different than code
8868 generated with -O but not -g.
8870 ??? This is a real kludge and needs to be done some other way.
8871 Perhaps for 2.9. */
8872 if (code != NOTE && num_insns++ > 1000)
8874 flush_hash_table ();
8875 num_insns = 0;
8878 /* See if this is a branch that is part of the path. If so, and it is
8879 to be taken, do so. */
8880 if (next_branch->branch == insn)
8882 enum taken status = next_branch++->status;
8883 if (status != NOT_TAKEN)
8885 if (status == TAKEN)
8886 record_jump_equiv (insn, 1);
8887 else
8888 invalidate_skipped_block (NEXT_INSN (insn));
8890 /* Set the last insn as the jump insn; it doesn't affect cc0.
8891 Then follow this branch. */
8892 #ifdef HAVE_cc0
8893 prev_insn_cc0 = 0;
8894 #endif
8895 prev_insn = insn;
8896 insn = JUMP_LABEL (insn);
8897 continue;
8901 if (GET_MODE (insn) == QImode)
8902 PUT_MODE (insn, VOIDmode);
8904 if (GET_RTX_CLASS (code) == 'i')
8906 rtx p;
8908 /* Process notes first so we have all notes in canonical forms when
8909 looking for duplicate operations. */
8911 if (REG_NOTES (insn))
8912 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8914 /* Track when we are inside in LIBCALL block. Inside such a block,
8915 we do not want to record destinations. The last insn of a
8916 LIBCALL block is not considered to be part of the block, since
8917 its destination is the result of the block and hence should be
8918 recorded. */
8920 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8921 libcall_insn = XEXP (p, 0);
8922 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8923 libcall_insn = NULL_RTX;
8925 cse_insn (insn, libcall_insn);
8928 /* If INSN is now an unconditional jump, skip to the end of our
8929 basic block by pretending that we just did the last insn in the
8930 basic block. If we are jumping to the end of our block, show
8931 that we can have one usage of TO. */
8933 if (simplejump_p (insn))
8935 if (to == 0)
8936 return 0;
8938 if (JUMP_LABEL (insn) == to)
8939 to_usage = 1;
8941 /* Maybe TO was deleted because the jump is unconditional.
8942 If so, there is nothing left in this basic block. */
8943 /* ??? Perhaps it would be smarter to set TO
8944 to whatever follows this insn,
8945 and pretend the basic block had always ended here. */
8946 if (INSN_DELETED_P (to))
8947 break;
8949 insn = PREV_INSN (to);
8952 /* See if it is ok to keep on going past the label
8953 which used to end our basic block. Remember that we incremented
8954 the count of that label, so we decrement it here. If we made
8955 a jump unconditional, TO_USAGE will be one; in that case, we don't
8956 want to count the use in that jump. */
8958 if (to != 0 && NEXT_INSN (insn) == to
8959 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8961 struct cse_basic_block_data val;
8962 rtx prev;
8964 insn = NEXT_INSN (to);
8966 /* If TO was the last insn in the function, we are done. */
8967 if (insn == 0)
8968 return 0;
8970 /* If TO was preceded by a BARRIER we are done with this block
8971 because it has no continuation. */
8972 prev = prev_nonnote_insn (to);
8973 if (prev && GET_CODE (prev) == BARRIER)
8974 return insn;
8976 /* Find the end of the following block. Note that we won't be
8977 following branches in this case. */
8978 to_usage = 0;
8979 val.path_size = 0;
8980 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8982 /* If the tables we allocated have enough space left
8983 to handle all the SETs in the next basic block,
8984 continue through it. Otherwise, return,
8985 and that block will be scanned individually. */
8986 if (val.nsets * 2 + next_qty > max_qty)
8987 break;
8989 cse_basic_block_start = val.low_cuid;
8990 cse_basic_block_end = val.high_cuid;
8991 to = val.last;
8993 /* Prevent TO from being deleted if it is a label. */
8994 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8995 ++LABEL_NUSES (to);
8997 /* Back up so we process the first insn in the extension. */
8998 insn = PREV_INSN (insn);
9002 if (next_qty > max_qty)
9003 abort ();
9005 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
9006 the previous insn is the only insn that branches to the head of a loop,
9007 we can cse into the loop. Don't do this if we changed the jump
9008 structure of a loop unless we aren't going to be following jumps. */
9010 if ((cse_jumps_altered == 0
9011 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
9012 && around_loop && to != 0
9013 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
9014 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
9015 && JUMP_LABEL (PREV_INSN (to)) != 0
9016 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9017 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9019 return to ? NEXT_INSN (to) : 0;
9022 /* Count the number of times registers are used (not set) in X.
9023 COUNTS is an array in which we accumulate the count, INCR is how much
9024 we count each register usage.
9026 Don't count a usage of DEST, which is the SET_DEST of a SET which
9027 contains X in its SET_SRC. This is because such a SET does not
9028 modify the liveness of DEST. */
9030 static void
9031 count_reg_usage (x, counts, dest, incr)
9032 rtx x;
9033 int *counts;
9034 rtx dest;
9035 int incr;
9037 enum rtx_code code;
9038 const char *fmt;
9039 int i, j;
9041 if (x == 0)
9042 return;
9044 switch (code = GET_CODE (x))
9046 case REG:
9047 if (x != dest)
9048 counts[REGNO (x)] += incr;
9049 return;
9051 case PC:
9052 case CC0:
9053 case CONST:
9054 case CONST_INT:
9055 case CONST_DOUBLE:
9056 case SYMBOL_REF:
9057 case LABEL_REF:
9058 return;
9060 case CLOBBER:
9061 /* If we are clobbering a MEM, mark any registers inside the address
9062 as being used. */
9063 if (GET_CODE (XEXP (x, 0)) == MEM)
9064 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
9065 return;
9067 case SET:
9068 /* Unless we are setting a REG, count everything in SET_DEST. */
9069 if (GET_CODE (SET_DEST (x)) != REG)
9070 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9072 /* If SRC has side-effects, then we can't delete this insn, so the
9073 usage of SET_DEST inside SRC counts.
9075 ??? Strictly-speaking, we might be preserving this insn
9076 because some other SET has side-effects, but that's hard
9077 to do and can't happen now. */
9078 count_reg_usage (SET_SRC (x), counts,
9079 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9080 incr);
9081 return;
9083 case CALL_INSN:
9084 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9086 /* ... falls through ... */
9087 case INSN:
9088 case JUMP_INSN:
9089 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
9091 /* Things used in a REG_EQUAL note aren't dead since loop may try to
9092 use them. */
9094 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
9095 return;
9097 case EXPR_LIST:
9098 case INSN_LIST:
9099 if (REG_NOTE_KIND (x) == REG_EQUAL
9100 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
9101 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
9102 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
9103 return;
9105 default:
9106 break;
9109 fmt = GET_RTX_FORMAT (code);
9110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9112 if (fmt[i] == 'e')
9113 count_reg_usage (XEXP (x, i), counts, dest, incr);
9114 else if (fmt[i] == 'E')
9115 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9116 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
9120 /* Scan all the insns and delete any that are dead; i.e., they store a register
9121 that is never used or they copy a register to itself.
9123 This is used to remove insns made obviously dead by cse, loop or other
9124 optimizations. It improves the heuristics in loop since it won't try to
9125 move dead invariants out of loops or make givs for dead quantities. The
9126 remaining passes of the compilation are also sped up. */
9128 void
9129 delete_trivially_dead_insns (insns, nreg)
9130 rtx insns;
9131 int nreg;
9133 int *counts = (int *) alloca (nreg * sizeof (int));
9134 rtx insn, prev;
9135 #ifdef HAVE_cc0
9136 rtx tem;
9137 #endif
9138 int i;
9139 int in_libcall = 0, dead_libcall = 0;
9141 /* First count the number of times each register is used. */
9142 bzero ((char *) counts, sizeof (int) * nreg);
9143 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9144 count_reg_usage (insn, counts, NULL_RTX, 1);
9146 /* Go from the last insn to the first and delete insns that only set unused
9147 registers or copy a register to itself. As we delete an insn, remove
9148 usage counts for registers it uses.
9150 The first jump optimization pass may leave a real insn as the last
9151 insn in the function. We must not skip that insn or we may end
9152 up deleting code that is not really dead. */
9153 insn = get_last_insn ();
9154 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9155 insn = prev_real_insn (insn);
9157 for ( ; insn; insn = prev)
9159 int live_insn = 0;
9160 rtx note;
9162 prev = prev_real_insn (insn);
9164 /* Don't delete any insns that are part of a libcall block unless
9165 we can delete the whole libcall block.
9167 Flow or loop might get confused if we did that. Remember
9168 that we are scanning backwards. */
9169 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9171 in_libcall = 1;
9172 live_insn = 1;
9173 dead_libcall = 0;
9175 /* See if there's a REG_EQUAL note on this insn and try to
9176 replace the source with the REG_EQUAL expression.
9178 We assume that insns with REG_RETVALs can only be reg->reg
9179 copies at this point. */
9180 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9181 if (note)
9183 rtx set = single_set (insn);
9184 if (set
9185 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9187 remove_note (insn,
9188 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9189 dead_libcall = 1;
9193 else if (in_libcall)
9194 live_insn = ! dead_libcall;
9195 else if (GET_CODE (PATTERN (insn)) == SET)
9197 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9198 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9201 #ifdef HAVE_cc0
9202 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9203 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9204 && ((tem = next_nonnote_insn (insn)) == 0
9205 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9206 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9208 #endif
9209 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9210 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9211 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9212 || side_effects_p (SET_SRC (PATTERN (insn)))
9213 /* An ADDRESSOF expression can turn into a use of the
9214 internal arg pointer, so always consider the
9215 internal arg pointer live. If it is truly dead,
9216 flow will delete the initializing insn. */
9217 || (SET_DEST (PATTERN (insn))
9218 == current_function_internal_arg_pointer))
9219 live_insn = 1;
9221 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9222 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9224 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9226 if (GET_CODE (elt) == SET)
9228 if (GET_CODE (SET_DEST (elt)) == REG
9229 && SET_DEST (elt) == SET_SRC (elt))
9232 #ifdef HAVE_cc0
9233 else if (GET_CODE (SET_DEST (elt)) == CC0
9234 && ! side_effects_p (SET_SRC (elt))
9235 && ((tem = next_nonnote_insn (insn)) == 0
9236 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9237 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9239 #endif
9240 else if (GET_CODE (SET_DEST (elt)) != REG
9241 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9242 || counts[REGNO (SET_DEST (elt))] != 0
9243 || side_effects_p (SET_SRC (elt))
9244 /* An ADDRESSOF expression can turn into a use of the
9245 internal arg pointer, so always consider the
9246 internal arg pointer live. If it is truly dead,
9247 flow will delete the initializing insn. */
9248 || (SET_DEST (elt)
9249 == current_function_internal_arg_pointer))
9250 live_insn = 1;
9252 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9253 live_insn = 1;
9255 else
9256 live_insn = 1;
9258 /* If this is a dead insn, delete it and show registers in it aren't
9259 being used. */
9261 if (! live_insn)
9263 count_reg_usage (insn, counts, NULL_RTX, -1);
9264 delete_insn (insn);
9267 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9269 in_libcall = 0;
9270 dead_libcall = 0;