Remove Dwarf2 restriction on EH frame generation
[official-gcc.git] / gcc / cse.c
blobaa26539fc6949225dcf569d7b4125d3bde7fb41f
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 /* stdio.h must precede rtl.h for FFS. */
25 #include "system.h"
26 #include <setjmp.h>
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
51 global CSE.
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
70 Registers and "quantity numbers":
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
79 of as containing.
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
104 Constants and quantity numbers
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
116 `const_rtx'.
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
124 Other expressions:
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
130 hash codes.
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
159 must be removed.
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
184 the register.
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
190 Related expressions:
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
199 /* One plus largest register number used in this function. */
201 static int max_reg;
203 /* One plus largest instruction UID used in this function at time of
204 cse_main call. */
206 static int max_insn_uid;
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
211 static int max_qty;
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
216 static int next_qty;
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
254 /* The table of all qtys, indexed by qty number. */
255 static struct qty_table_elem *qty_table;
257 #ifdef HAVE_cc0
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
269 #endif
271 /* Previous actual insn. 0 if at first insn of basic block. */
273 static rtx prev_insn;
275 /* Insn being scanned. */
277 static rtx this_insn;
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
281 value.
283 Or -1 if this register is at the end of the chain.
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
287 /* Per-register equivalence chain. */
288 struct reg_eqv_elem
290 int next, prev;
293 /* The table of all register equivalence chains. */
294 static struct reg_eqv_elem *reg_eqv_table;
296 struct cse_reg_info
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
304 /* Search key */
305 unsigned int regno;
307 /* The quantity number of the register's current contents. */
308 int reg_qty;
310 /* The number of times the register has been altered in the current
311 basic block. */
312 int reg_tick;
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
317 invalid. */
318 int reg_in_table;
321 /* A free list of cse_reg_info entries. */
322 static struct cse_reg_info *cse_reg_info_free_list;
324 /* A used list of cse_reg_info entries. */
325 static struct cse_reg_info *cse_reg_info_used_list;
326 static struct cse_reg_info *cse_reg_info_used_list_end;
328 /* A mapping from registers to cse_reg_info data structures. */
329 #define REGHASH_SHIFT 7
330 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
331 #define REGHASH_MASK (REGHASH_SIZE - 1)
332 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
334 #define REGHASH_FN(REGNO) \
335 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
337 /* The last lookup we did into the cse_reg_info_tree. This allows us
338 to cache repeated lookups. */
339 static unsigned int cached_regno;
340 static struct cse_reg_info *cached_cse_reg_info;
342 /* A HARD_REG_SET containing all the hard registers for which there is
343 currently a REG expression in the hash table. Note the difference
344 from the above variables, which indicate if the REG is mentioned in some
345 expression in the table. */
347 static HARD_REG_SET hard_regs_in_table;
349 /* A HARD_REG_SET containing all the hard registers that are invalidated
350 by a CALL_INSN. */
352 static HARD_REG_SET regs_invalidated_by_call;
354 /* CUID of insn that starts the basic block currently being cse-processed. */
356 static int cse_basic_block_start;
358 /* CUID of insn that ends the basic block currently being cse-processed. */
360 static int cse_basic_block_end;
362 /* Vector mapping INSN_UIDs to cuids.
363 The cuids are like uids but increase monotonically always.
364 We use them to see whether a reg is used outside a given basic block. */
366 static int *uid_cuid;
368 /* Highest UID in UID_CUID. */
369 static int max_uid;
371 /* Get the cuid of an insn. */
373 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
375 /* Nonzero if this pass has made changes, and therefore it's
376 worthwhile to run the garbage collector. */
378 static int cse_altered;
380 /* Nonzero if cse has altered conditional jump insns
381 in such a way that jump optimization should be redone. */
383 static int cse_jumps_altered;
385 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
386 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
387 to put in the note. */
388 static int recorded_label_ref;
390 /* canon_hash stores 1 in do_not_record
391 if it notices a reference to CC0, PC, or some other volatile
392 subexpression. */
394 static int do_not_record;
396 #ifdef LOAD_EXTEND_OP
398 /* Scratch rtl used when looking for load-extended copy of a MEM. */
399 static rtx memory_extend_rtx;
400 #endif
402 /* canon_hash stores 1 in hash_arg_in_memory
403 if it notices a reference to memory within the expression being hashed. */
405 static int hash_arg_in_memory;
407 /* The hash table contains buckets which are chains of `struct table_elt's,
408 each recording one expression's information.
409 That expression is in the `exp' field.
411 The canon_exp field contains a canonical (from the point of view of
412 alias analysis) version of the `exp' field.
414 Those elements with the same hash code are chained in both directions
415 through the `next_same_hash' and `prev_same_hash' fields.
417 Each set of expressions with equivalent values
418 are on a two-way chain through the `next_same_value'
419 and `prev_same_value' fields, and all point with
420 the `first_same_value' field at the first element in
421 that chain. The chain is in order of increasing cost.
422 Each element's cost value is in its `cost' field.
424 The `in_memory' field is nonzero for elements that
425 involve any reference to memory. These elements are removed
426 whenever a write is done to an unidentified location in memory.
427 To be safe, we assume that a memory address is unidentified unless
428 the address is either a symbol constant or a constant plus
429 the frame pointer or argument pointer.
431 The `related_value' field is used to connect related expressions
432 (that differ by adding an integer).
433 The related expressions are chained in a circular fashion.
434 `related_value' is zero for expressions for which this
435 chain is not useful.
437 The `cost' field stores the cost of this element's expression.
439 The `is_const' flag is set if the element is a constant (including
440 a fixed address).
442 The `flag' field is used as a temporary during some search routines.
444 The `mode' field is usually the same as GET_MODE (`exp'), but
445 if `exp' is a CONST_INT and has no machine mode then the `mode'
446 field is the mode it was being used as. Each constant is
447 recorded separately for each mode it is used with. */
450 struct table_elt
452 rtx exp;
453 rtx canon_exp;
454 struct table_elt *next_same_hash;
455 struct table_elt *prev_same_hash;
456 struct table_elt *next_same_value;
457 struct table_elt *prev_same_value;
458 struct table_elt *first_same_value;
459 struct table_elt *related_value;
460 int cost;
461 enum machine_mode mode;
462 char in_memory;
463 char is_const;
464 char flag;
467 /* We don't want a lot of buckets, because we rarely have very many
468 things stored in the hash table, and a lot of buckets slows
469 down a lot of loops that happen frequently. */
470 #define HASH_SHIFT 5
471 #define HASH_SIZE (1 << HASH_SHIFT)
472 #define HASH_MASK (HASH_SIZE - 1)
474 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
475 register (hard registers may require `do_not_record' to be set). */
477 #define HASH(X, M) \
478 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
479 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
480 : canon_hash (X, M)) & HASH_MASK)
482 /* Determine whether register number N is considered a fixed register for CSE.
483 It is desirable to replace other regs with fixed regs, to reduce need for
484 non-fixed hard regs.
485 A reg wins if it is either the frame pointer or designated as fixed. */
486 #define FIXED_REGNO_P(N) \
487 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
488 || fixed_regs[N] || global_regs[N])
490 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
491 hard registers and pointers into the frame are the cheapest with a cost
492 of 0. Next come pseudos with a cost of one and other hard registers with
493 a cost of 2. Aside from these special cases, call `rtx_cost'. */
495 #define CHEAP_REGNO(N) \
496 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
497 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
498 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
499 || ((N) < FIRST_PSEUDO_REGISTER \
500 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
502 /* A register is cheap if it is a user variable assigned to the register
503 or if its register number always corresponds to a cheap register. */
505 #define CHEAP_REG(N) \
506 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
507 || CHEAP_REGNO (REGNO (N)))
509 #define COST(X) \
510 (GET_CODE (X) == REG \
511 ? (CHEAP_REG (X) ? 0 \
512 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
513 : 2) \
514 : notreg_cost(X))
516 /* Get the info associated with register N. */
518 #define GET_CSE_REG_INFO(N) \
519 (((N) == cached_regno && cached_cse_reg_info) \
520 ? cached_cse_reg_info : get_cse_reg_info ((N)))
522 /* Get the number of times this register has been updated in this
523 basic block. */
525 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
527 /* Get the point at which REG was recorded in the table. */
529 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
531 /* Get the quantity number for REG. */
533 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
535 /* Determine if the quantity number for register X represents a valid index
536 into the qty_table. */
538 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
540 #ifdef ADDRESS_COST
541 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
542 during CSE, such nodes are present. Using an ADDRESSOF node which
543 refers to the address of a REG is a good thing because we can then
544 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
545 #define CSE_ADDRESS_COST(RTX) \
546 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
547 ? -1 : ADDRESS_COST(RTX))
548 #endif
550 static struct table_elt *table[HASH_SIZE];
552 /* Chain of `struct table_elt's made so far for this function
553 but currently removed from the table. */
555 static struct table_elt *free_element_chain;
557 /* Number of `struct table_elt' structures made so far for this function. */
559 static int n_elements_made;
561 /* Maximum value `n_elements_made' has had so far in this compilation
562 for functions previously processed. */
564 static int max_elements_made;
566 /* Surviving equivalence class when two equivalence classes are merged
567 by recording the effects of a jump in the last insn. Zero if the
568 last insn was not a conditional jump. */
570 static struct table_elt *last_jump_equiv_class;
572 /* Set to the cost of a constant pool reference if one was found for a
573 symbolic constant. If this was found, it means we should try to
574 convert constants into constant pool entries if they don't fit in
575 the insn. */
577 static int constant_pool_entries_cost;
579 /* Define maximum length of a branch path. */
581 #define PATHLENGTH 10
583 /* This data describes a block that will be processed by cse_basic_block. */
585 struct cse_basic_block_data
587 /* Lowest CUID value of insns in block. */
588 int low_cuid;
589 /* Highest CUID value of insns in block. */
590 int high_cuid;
591 /* Total number of SETs in block. */
592 int nsets;
593 /* Last insn in the block. */
594 rtx last;
595 /* Size of current branch path, if any. */
596 int path_size;
597 /* Current branch path, indicating which branches will be taken. */
598 struct branch_path
600 /* The branch insn. */
601 rtx branch;
602 /* Whether it should be taken or not. AROUND is the same as taken
603 except that it is used when the destination label is not preceded
604 by a BARRIER. */
605 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
606 } path[PATHLENGTH];
609 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
610 virtual regs here because the simplify_*_operation routines are called
611 by integrate.c, which is called before virtual register instantiation.
613 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
614 a header file so that their definitions can be shared with the
615 simplification routines in simplify-rtx.c. Until then, do not
616 change these macros without also changing the copy in simplify-rtx.c. */
618 #define FIXED_BASE_PLUS_P(X) \
619 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
620 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
621 || (X) == virtual_stack_vars_rtx \
622 || (X) == virtual_incoming_args_rtx \
623 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
624 && (XEXP (X, 0) == frame_pointer_rtx \
625 || XEXP (X, 0) == hard_frame_pointer_rtx \
626 || ((X) == arg_pointer_rtx \
627 && fixed_regs[ARG_POINTER_REGNUM]) \
628 || XEXP (X, 0) == virtual_stack_vars_rtx \
629 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
630 || GET_CODE (X) == ADDRESSOF)
632 /* Similar, but also allows reference to the stack pointer.
634 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
635 arg_pointer_rtx by itself is nonzero, because on at least one machine,
636 the i960, the arg pointer is zero when it is unused. */
638 #define NONZERO_BASE_PLUS_P(X) \
639 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
640 || (X) == virtual_stack_vars_rtx \
641 || (X) == virtual_incoming_args_rtx \
642 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
643 && (XEXP (X, 0) == frame_pointer_rtx \
644 || XEXP (X, 0) == hard_frame_pointer_rtx \
645 || ((X) == arg_pointer_rtx \
646 && fixed_regs[ARG_POINTER_REGNUM]) \
647 || XEXP (X, 0) == virtual_stack_vars_rtx \
648 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
649 || (X) == stack_pointer_rtx \
650 || (X) == virtual_stack_dynamic_rtx \
651 || (X) == virtual_outgoing_args_rtx \
652 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
653 && (XEXP (X, 0) == stack_pointer_rtx \
654 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
655 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
656 || GET_CODE (X) == ADDRESSOF)
658 static int notreg_cost PARAMS ((rtx));
659 static void new_basic_block PARAMS ((void));
660 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
661 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
662 static void delete_reg_equiv PARAMS ((unsigned int));
663 static int mention_regs PARAMS ((rtx));
664 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
665 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
666 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
667 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
668 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
669 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
670 enum machine_mode));
671 static void merge_equiv_classes PARAMS ((struct table_elt *,
672 struct table_elt *));
673 static void invalidate PARAMS ((rtx, enum machine_mode));
674 static int cse_rtx_varies_p PARAMS ((rtx));
675 static void remove_invalid_refs PARAMS ((unsigned int));
676 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
677 enum machine_mode));
678 static void rehash_using_reg PARAMS ((rtx));
679 static void invalidate_memory PARAMS ((void));
680 static void invalidate_for_call PARAMS ((void));
681 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
682 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
683 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
684 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
685 static rtx canon_reg PARAMS ((rtx, rtx));
686 static void find_best_addr PARAMS ((rtx, rtx *));
687 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
688 enum machine_mode *,
689 enum machine_mode *));
690 static rtx fold_rtx PARAMS ((rtx, rtx));
691 static rtx equiv_constant PARAMS ((rtx));
692 static void record_jump_equiv PARAMS ((rtx, int));
693 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
694 rtx, rtx, int));
695 static void cse_insn PARAMS ((rtx, rtx));
696 static int addr_affects_sp_p PARAMS ((rtx));
697 static void invalidate_from_clobbers PARAMS ((rtx));
698 static rtx cse_process_notes PARAMS ((rtx, rtx));
699 static void cse_around_loop PARAMS ((rtx));
700 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
701 static void invalidate_skipped_block PARAMS ((rtx));
702 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
703 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
704 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
705 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
706 extern void dump_class PARAMS ((struct table_elt*));
707 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
709 static void flush_hash_table PARAMS ((void));
711 /* Dump the expressions in the equivalence class indicated by CLASSP.
712 This function is used only for debugging. */
713 void
714 dump_class (classp)
715 struct table_elt *classp;
717 struct table_elt *elt;
719 fprintf (stderr, "Equivalence chain for ");
720 print_rtl (stderr, classp->exp);
721 fprintf (stderr, ": \n");
723 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
725 print_rtl (stderr, elt->exp);
726 fprintf (stderr, "\n");
730 /* Internal function, to compute cost when X is not a register; called
731 from COST macro to keep it simple. */
733 static int
734 notreg_cost (x)
735 rtx x;
737 return ((GET_CODE (x) == SUBREG
738 && GET_CODE (SUBREG_REG (x)) == REG
739 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
740 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
741 && (GET_MODE_SIZE (GET_MODE (x))
742 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
743 && subreg_lowpart_p (x)
744 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
745 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
746 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
747 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
748 : 2))
749 : rtx_cost (x, SET) * 2);
752 /* Return the right cost to give to an operation
753 to make the cost of the corresponding register-to-register instruction
754 N times that of a fast register-to-register instruction. */
756 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
758 /* Return an estimate of the cost of computing rtx X.
759 One use is in cse, to decide which expression to keep in the hash table.
760 Another is in rtl generation, to pick the cheapest way to multiply.
761 Other uses like the latter are expected in the future. */
764 rtx_cost (x, outer_code)
765 rtx x;
766 enum rtx_code outer_code ATTRIBUTE_UNUSED;
768 register int i, j;
769 register enum rtx_code code;
770 register const char *fmt;
771 register int total;
773 if (x == 0)
774 return 0;
776 /* Compute the default costs of certain things.
777 Note that RTX_COSTS can override the defaults. */
779 code = GET_CODE (x);
780 switch (code)
782 case MULT:
783 /* Count multiplication by 2**n as a shift,
784 because if we are considering it, we would output it as a shift. */
785 if (GET_CODE (XEXP (x, 1)) == CONST_INT
786 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
787 total = 2;
788 else
789 total = COSTS_N_INSNS (5);
790 break;
791 case DIV:
792 case UDIV:
793 case MOD:
794 case UMOD:
795 total = COSTS_N_INSNS (7);
796 break;
797 case USE:
798 /* Used in loop.c and combine.c as a marker. */
799 total = 0;
800 break;
801 case ASM_OPERANDS:
802 /* We don't want these to be used in substitutions because
803 we have no way of validating the resulting insn. So assign
804 anything containing an ASM_OPERANDS a very high cost. */
805 total = 1000;
806 break;
807 default:
808 total = 2;
811 switch (code)
813 case REG:
814 return ! CHEAP_REG (x);
816 case SUBREG:
817 /* If we can't tie these modes, make this expensive. The larger
818 the mode, the more expensive it is. */
819 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
820 return COSTS_N_INSNS (2
821 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
822 return 2;
823 #ifdef RTX_COSTS
824 RTX_COSTS (x, code, outer_code);
825 #endif
826 #ifdef CONST_COSTS
827 CONST_COSTS (x, code, outer_code);
828 #endif
830 default:
831 #ifdef DEFAULT_RTX_COSTS
832 DEFAULT_RTX_COSTS(x, code, outer_code);
833 #endif
834 break;
837 /* Sum the costs of the sub-rtx's, plus cost of this operation,
838 which is already in total. */
840 fmt = GET_RTX_FORMAT (code);
841 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
842 if (fmt[i] == 'e')
843 total += rtx_cost (XEXP (x, i), code);
844 else if (fmt[i] == 'E')
845 for (j = 0; j < XVECLEN (x, i); j++)
846 total += rtx_cost (XVECEXP (x, i, j), code);
848 return total;
851 static struct cse_reg_info *
852 get_cse_reg_info (regno)
853 unsigned int regno;
855 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
856 struct cse_reg_info *p;
858 for (p = *hash_head ; p != NULL; p = p->hash_next)
859 if (p->regno == regno)
860 break;
862 if (p == NULL)
864 /* Get a new cse_reg_info structure. */
865 if (cse_reg_info_free_list)
867 p = cse_reg_info_free_list;
868 cse_reg_info_free_list = p->next;
870 else
871 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
873 /* Insert into hash table. */
874 p->hash_next = *hash_head;
875 *hash_head = p;
877 /* Initialize it. */
878 p->reg_tick = 1;
879 p->reg_in_table = -1;
880 p->reg_qty = regno;
881 p->regno = regno;
882 p->next = cse_reg_info_used_list;
883 cse_reg_info_used_list = p;
884 if (!cse_reg_info_used_list_end)
885 cse_reg_info_used_list_end = p;
888 /* Cache this lookup; we tend to be looking up information about the
889 same register several times in a row. */
890 cached_regno = regno;
891 cached_cse_reg_info = p;
893 return p;
896 /* Clear the hash table and initialize each register with its own quantity,
897 for a new basic block. */
899 static void
900 new_basic_block ()
902 register int i;
904 next_qty = max_reg;
906 /* Clear out hash table state for this pass. */
908 bzero ((char *) reg_hash, sizeof reg_hash);
910 if (cse_reg_info_used_list)
912 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
913 cse_reg_info_free_list = cse_reg_info_used_list;
914 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
916 cached_cse_reg_info = 0;
918 CLEAR_HARD_REG_SET (hard_regs_in_table);
920 /* The per-quantity values used to be initialized here, but it is
921 much faster to initialize each as it is made in `make_new_qty'. */
923 for (i = 0; i < HASH_SIZE; i++)
925 struct table_elt *first;
927 first = table[i];
928 if (first != NULL)
930 struct table_elt *last = first;
932 table[i] = NULL;
934 while (last->next_same_hash != NULL)
935 last = last->next_same_hash;
937 /* Now relink this hash entire chain into
938 the free element list. */
940 last->next_same_hash = free_element_chain;
941 free_element_chain = first;
945 prev_insn = 0;
947 #ifdef HAVE_cc0
948 prev_insn_cc0 = 0;
949 #endif
952 /* Say that register REG contains a quantity in mode MODE not in any
953 register before and initialize that quantity. */
955 static void
956 make_new_qty (reg, mode)
957 unsigned int reg;
958 enum machine_mode mode;
960 register int q;
961 register struct qty_table_elem *ent;
962 register struct reg_eqv_elem *eqv;
964 if (next_qty >= max_qty)
965 abort ();
967 q = REG_QTY (reg) = next_qty++;
968 ent = &qty_table[q];
969 ent->first_reg = reg;
970 ent->last_reg = reg;
971 ent->mode = mode;
972 ent->const_rtx = ent->const_insn = NULL_RTX;
973 ent->comparison_code = UNKNOWN;
975 eqv = &reg_eqv_table[reg];
976 eqv->next = eqv->prev = -1;
979 /* Make reg NEW equivalent to reg OLD.
980 OLD is not changing; NEW is. */
982 static void
983 make_regs_eqv (new, old)
984 unsigned int new, old;
986 unsigned int lastr, firstr;
987 int q = REG_QTY (old);
988 struct qty_table_elem *ent;
990 ent = &qty_table[q];
992 /* Nothing should become eqv until it has a "non-invalid" qty number. */
993 if (! REGNO_QTY_VALID_P (old))
994 abort ();
996 REG_QTY (new) = q;
997 firstr = ent->first_reg;
998 lastr = ent->last_reg;
1000 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1001 hard regs. Among pseudos, if NEW will live longer than any other reg
1002 of the same qty, and that is beyond the current basic block,
1003 make it the new canonical replacement for this qty. */
1004 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1005 /* Certain fixed registers might be of the class NO_REGS. This means
1006 that not only can they not be allocated by the compiler, but
1007 they cannot be used in substitutions or canonicalizations
1008 either. */
1009 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1010 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1011 || (new >= FIRST_PSEUDO_REGISTER
1012 && (firstr < FIRST_PSEUDO_REGISTER
1013 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1014 || (uid_cuid[REGNO_FIRST_UID (new)]
1015 < cse_basic_block_start))
1016 && (uid_cuid[REGNO_LAST_UID (new)]
1017 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1019 reg_eqv_table[firstr].prev = new;
1020 reg_eqv_table[new].next = firstr;
1021 reg_eqv_table[new].prev = -1;
1022 ent->first_reg = new;
1024 else
1026 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1027 Otherwise, insert before any non-fixed hard regs that are at the
1028 end. Registers of class NO_REGS cannot be used as an
1029 equivalent for anything. */
1030 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1031 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1032 && new >= FIRST_PSEUDO_REGISTER)
1033 lastr = reg_eqv_table[lastr].prev;
1034 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1035 if (reg_eqv_table[lastr].next >= 0)
1036 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1037 else
1038 qty_table[q].last_reg = new;
1039 reg_eqv_table[lastr].next = new;
1040 reg_eqv_table[new].prev = lastr;
1044 /* Remove REG from its equivalence class. */
1046 static void
1047 delete_reg_equiv (reg)
1048 unsigned int reg;
1050 register struct qty_table_elem *ent;
1051 register int q = REG_QTY (reg);
1052 register int p, n;
1054 /* If invalid, do nothing. */
1055 if (q == (int) reg)
1056 return;
1058 ent = &qty_table[q];
1060 p = reg_eqv_table[reg].prev;
1061 n = reg_eqv_table[reg].next;
1063 if (n != -1)
1064 reg_eqv_table[n].prev = p;
1065 else
1066 ent->last_reg = p;
1067 if (p != -1)
1068 reg_eqv_table[p].next = n;
1069 else
1070 ent->first_reg = n;
1072 REG_QTY (reg) = reg;
1075 /* Remove any invalid expressions from the hash table
1076 that refer to any of the registers contained in expression X.
1078 Make sure that newly inserted references to those registers
1079 as subexpressions will be considered valid.
1081 mention_regs is not called when a register itself
1082 is being stored in the table.
1084 Return 1 if we have done something that may have changed the hash code
1085 of X. */
1087 static int
1088 mention_regs (x)
1089 rtx x;
1091 register enum rtx_code code;
1092 register int i, j;
1093 register const char *fmt;
1094 register int changed = 0;
1096 if (x == 0)
1097 return 0;
1099 code = GET_CODE (x);
1100 if (code == REG)
1102 unsigned int regno = REGNO (x);
1103 unsigned int endregno
1104 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1105 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1106 unsigned int i;
1108 for (i = regno; i < endregno; i++)
1110 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1111 remove_invalid_refs (i);
1113 REG_IN_TABLE (i) = REG_TICK (i);
1116 return 0;
1119 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1120 pseudo if they don't use overlapping words. We handle only pseudos
1121 here for simplicity. */
1122 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1123 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1125 unsigned int i = REGNO (SUBREG_REG (x));
1127 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1129 /* If reg_tick has been incremented more than once since
1130 reg_in_table was last set, that means that the entire
1131 register has been set before, so discard anything memorized
1132 for the entrire register, including all SUBREG expressions. */
1133 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1134 remove_invalid_refs (i);
1135 else
1136 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1139 REG_IN_TABLE (i) = REG_TICK (i);
1140 return 0;
1143 /* If X is a comparison or a COMPARE and either operand is a register
1144 that does not have a quantity, give it one. This is so that a later
1145 call to record_jump_equiv won't cause X to be assigned a different
1146 hash code and not found in the table after that call.
1148 It is not necessary to do this here, since rehash_using_reg can
1149 fix up the table later, but doing this here eliminates the need to
1150 call that expensive function in the most common case where the only
1151 use of the register is in the comparison. */
1153 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1155 if (GET_CODE (XEXP (x, 0)) == REG
1156 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1157 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1159 rehash_using_reg (XEXP (x, 0));
1160 changed = 1;
1163 if (GET_CODE (XEXP (x, 1)) == REG
1164 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1165 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1167 rehash_using_reg (XEXP (x, 1));
1168 changed = 1;
1172 fmt = GET_RTX_FORMAT (code);
1173 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1174 if (fmt[i] == 'e')
1175 changed |= mention_regs (XEXP (x, i));
1176 else if (fmt[i] == 'E')
1177 for (j = 0; j < XVECLEN (x, i); j++)
1178 changed |= mention_regs (XVECEXP (x, i, j));
1180 return changed;
1183 /* Update the register quantities for inserting X into the hash table
1184 with a value equivalent to CLASSP.
1185 (If the class does not contain a REG, it is irrelevant.)
1186 If MODIFIED is nonzero, X is a destination; it is being modified.
1187 Note that delete_reg_equiv should be called on a register
1188 before insert_regs is done on that register with MODIFIED != 0.
1190 Nonzero value means that elements of reg_qty have changed
1191 so X's hash code may be different. */
1193 static int
1194 insert_regs (x, classp, modified)
1195 rtx x;
1196 struct table_elt *classp;
1197 int modified;
1199 if (GET_CODE (x) == REG)
1201 unsigned int regno = REGNO (x);
1202 int qty_valid;
1204 /* If REGNO is in the equivalence table already but is of the
1205 wrong mode for that equivalence, don't do anything here. */
1207 qty_valid = REGNO_QTY_VALID_P (regno);
1208 if (qty_valid)
1210 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1212 if (ent->mode != GET_MODE (x))
1213 return 0;
1216 if (modified || ! qty_valid)
1218 if (classp)
1219 for (classp = classp->first_same_value;
1220 classp != 0;
1221 classp = classp->next_same_value)
1222 if (GET_CODE (classp->exp) == REG
1223 && GET_MODE (classp->exp) == GET_MODE (x))
1225 make_regs_eqv (regno, REGNO (classp->exp));
1226 return 1;
1229 make_new_qty (regno, GET_MODE (x));
1230 return 1;
1233 return 0;
1236 /* If X is a SUBREG, we will likely be inserting the inner register in the
1237 table. If that register doesn't have an assigned quantity number at
1238 this point but does later, the insertion that we will be doing now will
1239 not be accessible because its hash code will have changed. So assign
1240 a quantity number now. */
1242 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1243 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1245 unsigned int regno = REGNO (SUBREG_REG (x));
1247 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1248 /* Mention_regs checks if REG_TICK is exactly one larger than
1249 REG_IN_TABLE to find out if there was only a single preceding
1250 invalidation - for the SUBREG - or another one, which would be
1251 for the full register. Since we don't invalidate the SUBREG
1252 here first, we might have to bump up REG_TICK so that mention_regs
1253 will do the right thing. */
1254 if (REG_IN_TABLE (regno) >= 0
1255 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1256 REG_TICK (regno)++;
1257 mention_regs (x);
1258 return 1;
1260 else
1261 return mention_regs (x);
1264 /* Look in or update the hash table. */
1266 /* Remove table element ELT from use in the table.
1267 HASH is its hash code, made using the HASH macro.
1268 It's an argument because often that is known in advance
1269 and we save much time not recomputing it. */
1271 static void
1272 remove_from_table (elt, hash)
1273 register struct table_elt *elt;
1274 unsigned hash;
1276 if (elt == 0)
1277 return;
1279 /* Mark this element as removed. See cse_insn. */
1280 elt->first_same_value = 0;
1282 /* Remove the table element from its equivalence class. */
1285 register struct table_elt *prev = elt->prev_same_value;
1286 register struct table_elt *next = elt->next_same_value;
1288 if (next) next->prev_same_value = prev;
1290 if (prev)
1291 prev->next_same_value = next;
1292 else
1294 register struct table_elt *newfirst = next;
1295 while (next)
1297 next->first_same_value = newfirst;
1298 next = next->next_same_value;
1303 /* Remove the table element from its hash bucket. */
1306 register struct table_elt *prev = elt->prev_same_hash;
1307 register struct table_elt *next = elt->next_same_hash;
1309 if (next) next->prev_same_hash = prev;
1311 if (prev)
1312 prev->next_same_hash = next;
1313 else if (table[hash] == elt)
1314 table[hash] = next;
1315 else
1317 /* This entry is not in the proper hash bucket. This can happen
1318 when two classes were merged by `merge_equiv_classes'. Search
1319 for the hash bucket that it heads. This happens only very
1320 rarely, so the cost is acceptable. */
1321 for (hash = 0; hash < HASH_SIZE; hash++)
1322 if (table[hash] == elt)
1323 table[hash] = next;
1327 /* Remove the table element from its related-value circular chain. */
1329 if (elt->related_value != 0 && elt->related_value != elt)
1331 register struct table_elt *p = elt->related_value;
1333 while (p->related_value != elt)
1334 p = p->related_value;
1335 p->related_value = elt->related_value;
1336 if (p->related_value == p)
1337 p->related_value = 0;
1340 /* Now add it to the free element chain. */
1341 elt->next_same_hash = free_element_chain;
1342 free_element_chain = elt;
1345 /* Look up X in the hash table and return its table element,
1346 or 0 if X is not in the table.
1348 MODE is the machine-mode of X, or if X is an integer constant
1349 with VOIDmode then MODE is the mode with which X will be used.
1351 Here we are satisfied to find an expression whose tree structure
1352 looks like X. */
1354 static struct table_elt *
1355 lookup (x, hash, mode)
1356 rtx x;
1357 unsigned hash;
1358 enum machine_mode mode;
1360 register struct table_elt *p;
1362 for (p = table[hash]; p; p = p->next_same_hash)
1363 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1364 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1365 return p;
1367 return 0;
1370 /* Like `lookup' but don't care whether the table element uses invalid regs.
1371 Also ignore discrepancies in the machine mode of a register. */
1373 static struct table_elt *
1374 lookup_for_remove (x, hash, mode)
1375 rtx x;
1376 unsigned hash;
1377 enum machine_mode mode;
1379 register struct table_elt *p;
1381 if (GET_CODE (x) == REG)
1383 unsigned int regno = REGNO (x);
1385 /* Don't check the machine mode when comparing registers;
1386 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1387 for (p = table[hash]; p; p = p->next_same_hash)
1388 if (GET_CODE (p->exp) == REG
1389 && REGNO (p->exp) == regno)
1390 return p;
1392 else
1394 for (p = table[hash]; p; p = p->next_same_hash)
1395 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1396 return p;
1399 return 0;
1402 /* Look for an expression equivalent to X and with code CODE.
1403 If one is found, return that expression. */
1405 static rtx
1406 lookup_as_function (x, code)
1407 rtx x;
1408 enum rtx_code code;
1410 register struct table_elt *p
1411 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1413 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1414 long as we are narrowing. So if we looked in vain for a mode narrower
1415 than word_mode before, look for word_mode now. */
1416 if (p == 0 && code == CONST_INT
1417 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1419 x = copy_rtx (x);
1420 PUT_MODE (x, word_mode);
1421 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1424 if (p == 0)
1425 return 0;
1427 for (p = p->first_same_value; p; p = p->next_same_value)
1428 if (GET_CODE (p->exp) == code
1429 /* Make sure this is a valid entry in the table. */
1430 && exp_equiv_p (p->exp, p->exp, 1, 0))
1431 return p->exp;
1433 return 0;
1436 /* Insert X in the hash table, assuming HASH is its hash code
1437 and CLASSP is an element of the class it should go in
1438 (or 0 if a new class should be made).
1439 It is inserted at the proper position to keep the class in
1440 the order cheapest first.
1442 MODE is the machine-mode of X, or if X is an integer constant
1443 with VOIDmode then MODE is the mode with which X will be used.
1445 For elements of equal cheapness, the most recent one
1446 goes in front, except that the first element in the list
1447 remains first unless a cheaper element is added. The order of
1448 pseudo-registers does not matter, as canon_reg will be called to
1449 find the cheapest when a register is retrieved from the table.
1451 The in_memory field in the hash table element is set to 0.
1452 The caller must set it nonzero if appropriate.
1454 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1455 and if insert_regs returns a nonzero value
1456 you must then recompute its hash code before calling here.
1458 If necessary, update table showing constant values of quantities. */
1460 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1462 static struct table_elt *
1463 insert (x, classp, hash, mode)
1464 register rtx x;
1465 register struct table_elt *classp;
1466 unsigned hash;
1467 enum machine_mode mode;
1469 register struct table_elt *elt;
1471 /* If X is a register and we haven't made a quantity for it,
1472 something is wrong. */
1473 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1474 abort ();
1476 /* If X is a hard register, show it is being put in the table. */
1477 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1479 unsigned int regno = REGNO (x);
1480 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1481 unsigned int i;
1483 for (i = regno; i < endregno; i++)
1484 SET_HARD_REG_BIT (hard_regs_in_table, i);
1487 /* If X is a label, show we recorded it. */
1488 if (GET_CODE (x) == LABEL_REF
1489 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1490 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1491 recorded_label_ref = 1;
1493 /* Put an element for X into the right hash bucket. */
1495 elt = free_element_chain;
1496 if (elt)
1497 free_element_chain = elt->next_same_hash;
1498 else
1500 n_elements_made++;
1501 elt = (struct table_elt *) oballoc (sizeof (struct table_elt));
1504 elt->exp = x;
1505 elt->canon_exp = NULL_RTX;
1506 elt->cost = COST (x);
1507 elt->next_same_value = 0;
1508 elt->prev_same_value = 0;
1509 elt->next_same_hash = table[hash];
1510 elt->prev_same_hash = 0;
1511 elt->related_value = 0;
1512 elt->in_memory = 0;
1513 elt->mode = mode;
1514 elt->is_const = (CONSTANT_P (x)
1515 /* GNU C++ takes advantage of this for `this'
1516 (and other const values). */
1517 || (RTX_UNCHANGING_P (x)
1518 && GET_CODE (x) == REG
1519 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1520 || FIXED_BASE_PLUS_P (x));
1522 if (table[hash])
1523 table[hash]->prev_same_hash = elt;
1524 table[hash] = elt;
1526 /* Put it into the proper value-class. */
1527 if (classp)
1529 classp = classp->first_same_value;
1530 if (CHEAPER (elt, classp))
1531 /* Insert at the head of the class */
1533 register struct table_elt *p;
1534 elt->next_same_value = classp;
1535 classp->prev_same_value = elt;
1536 elt->first_same_value = elt;
1538 for (p = classp; p; p = p->next_same_value)
1539 p->first_same_value = elt;
1541 else
1543 /* Insert not at head of the class. */
1544 /* Put it after the last element cheaper than X. */
1545 register struct table_elt *p, *next;
1547 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1548 p = next);
1550 /* Put it after P and before NEXT. */
1551 elt->next_same_value = next;
1552 if (next)
1553 next->prev_same_value = elt;
1555 elt->prev_same_value = p;
1556 p->next_same_value = elt;
1557 elt->first_same_value = classp;
1560 else
1561 elt->first_same_value = elt;
1563 /* If this is a constant being set equivalent to a register or a register
1564 being set equivalent to a constant, note the constant equivalence.
1566 If this is a constant, it cannot be equivalent to a different constant,
1567 and a constant is the only thing that can be cheaper than a register. So
1568 we know the register is the head of the class (before the constant was
1569 inserted).
1571 If this is a register that is not already known equivalent to a
1572 constant, we must check the entire class.
1574 If this is a register that is already known equivalent to an insn,
1575 update the qtys `const_insn' to show that `this_insn' is the latest
1576 insn making that quantity equivalent to the constant. */
1578 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1579 && GET_CODE (x) != REG)
1581 int exp_q = REG_QTY (REGNO (classp->exp));
1582 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1584 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1585 exp_ent->const_insn = this_insn;
1588 else if (GET_CODE (x) == REG
1589 && classp
1590 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1591 && ! elt->is_const)
1593 register struct table_elt *p;
1595 for (p = classp; p != 0; p = p->next_same_value)
1597 if (p->is_const && GET_CODE (p->exp) != REG)
1599 int x_q = REG_QTY (REGNO (x));
1600 struct qty_table_elem *x_ent = &qty_table[x_q];
1602 x_ent->const_rtx
1603 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1604 x_ent->const_insn = this_insn;
1605 break;
1610 else if (GET_CODE (x) == REG
1611 && qty_table[REG_QTY (REGNO (x))].const_rtx
1612 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1613 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1615 /* If this is a constant with symbolic value,
1616 and it has a term with an explicit integer value,
1617 link it up with related expressions. */
1618 if (GET_CODE (x) == CONST)
1620 rtx subexp = get_related_value (x);
1621 unsigned subhash;
1622 struct table_elt *subelt, *subelt_prev;
1624 if (subexp != 0)
1626 /* Get the integer-free subexpression in the hash table. */
1627 subhash = safe_hash (subexp, mode) & HASH_MASK;
1628 subelt = lookup (subexp, subhash, mode);
1629 if (subelt == 0)
1630 subelt = insert (subexp, NULL_PTR, subhash, mode);
1631 /* Initialize SUBELT's circular chain if it has none. */
1632 if (subelt->related_value == 0)
1633 subelt->related_value = subelt;
1634 /* Find the element in the circular chain that precedes SUBELT. */
1635 subelt_prev = subelt;
1636 while (subelt_prev->related_value != subelt)
1637 subelt_prev = subelt_prev->related_value;
1638 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1639 This way the element that follows SUBELT is the oldest one. */
1640 elt->related_value = subelt_prev->related_value;
1641 subelt_prev->related_value = elt;
1645 return elt;
1648 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1649 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1650 the two classes equivalent.
1652 CLASS1 will be the surviving class; CLASS2 should not be used after this
1653 call.
1655 Any invalid entries in CLASS2 will not be copied. */
1657 static void
1658 merge_equiv_classes (class1, class2)
1659 struct table_elt *class1, *class2;
1661 struct table_elt *elt, *next, *new;
1663 /* Ensure we start with the head of the classes. */
1664 class1 = class1->first_same_value;
1665 class2 = class2->first_same_value;
1667 /* If they were already equal, forget it. */
1668 if (class1 == class2)
1669 return;
1671 for (elt = class2; elt; elt = next)
1673 unsigned int hash;
1674 rtx exp = elt->exp;
1675 enum machine_mode mode = elt->mode;
1677 next = elt->next_same_value;
1679 /* Remove old entry, make a new one in CLASS1's class.
1680 Don't do this for invalid entries as we cannot find their
1681 hash code (it also isn't necessary). */
1682 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1684 hash_arg_in_memory = 0;
1685 hash = HASH (exp, mode);
1687 if (GET_CODE (exp) == REG)
1688 delete_reg_equiv (REGNO (exp));
1690 remove_from_table (elt, hash);
1692 if (insert_regs (exp, class1, 0))
1694 rehash_using_reg (exp);
1695 hash = HASH (exp, mode);
1697 new = insert (exp, class1, hash, mode);
1698 new->in_memory = hash_arg_in_memory;
1704 /* Flush the entire hash table. */
1706 static void
1707 flush_hash_table ()
1709 int i;
1710 struct table_elt *p;
1712 for (i = 0; i < HASH_SIZE; i++)
1713 for (p = table[i]; p; p = table[i])
1715 /* Note that invalidate can remove elements
1716 after P in the current hash chain. */
1717 if (GET_CODE (p->exp) == REG)
1718 invalidate (p->exp, p->mode);
1719 else
1720 remove_from_table (p, i);
1724 /* Remove from the hash table, or mark as invalid, all expressions whose
1725 values could be altered by storing in X. X is a register, a subreg, or
1726 a memory reference with nonvarying address (because, when a memory
1727 reference with a varying address is stored in, all memory references are
1728 removed by invalidate_memory so specific invalidation is superfluous).
1729 FULL_MODE, if not VOIDmode, indicates that this much should be
1730 invalidated instead of just the amount indicated by the mode of X. This
1731 is only used for bitfield stores into memory.
1733 A nonvarying address may be just a register or just a symbol reference,
1734 or it may be either of those plus a numeric offset. */
1736 static void
1737 invalidate (x, full_mode)
1738 rtx x;
1739 enum machine_mode full_mode;
1741 register int i;
1742 register struct table_elt *p;
1744 switch (GET_CODE (x))
1746 case REG:
1748 /* If X is a register, dependencies on its contents are recorded
1749 through the qty number mechanism. Just change the qty number of
1750 the register, mark it as invalid for expressions that refer to it,
1751 and remove it itself. */
1752 unsigned int regno = REGNO (x);
1753 unsigned int hash = HASH (x, GET_MODE (x));
1755 /* Remove REGNO from any quantity list it might be on and indicate
1756 that its value might have changed. If it is a pseudo, remove its
1757 entry from the hash table.
1759 For a hard register, we do the first two actions above for any
1760 additional hard registers corresponding to X. Then, if any of these
1761 registers are in the table, we must remove any REG entries that
1762 overlap these registers. */
1764 delete_reg_equiv (regno);
1765 REG_TICK (regno)++;
1767 if (regno >= FIRST_PSEUDO_REGISTER)
1769 /* Because a register can be referenced in more than one mode,
1770 we might have to remove more than one table entry. */
1771 struct table_elt *elt;
1773 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1774 remove_from_table (elt, hash);
1776 else
1778 HOST_WIDE_INT in_table
1779 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1780 unsigned int endregno
1781 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1782 unsigned int tregno, tendregno, rn;
1783 register struct table_elt *p, *next;
1785 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1787 for (rn = regno + 1; rn < endregno; rn++)
1789 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1790 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1791 delete_reg_equiv (rn);
1792 REG_TICK (rn)++;
1795 if (in_table)
1796 for (hash = 0; hash < HASH_SIZE; hash++)
1797 for (p = table[hash]; p; p = next)
1799 next = p->next_same_hash;
1801 if (GET_CODE (p->exp) != REG
1802 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1803 continue;
1805 tregno = REGNO (p->exp);
1806 tendregno
1807 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1808 if (tendregno > regno && tregno < endregno)
1809 remove_from_table (p, hash);
1813 return;
1815 case SUBREG:
1816 invalidate (SUBREG_REG (x), VOIDmode);
1817 return;
1819 case PARALLEL:
1820 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1821 invalidate (XVECEXP (x, 0, i), VOIDmode);
1822 return;
1824 case EXPR_LIST:
1825 /* This is part of a disjoint return value; extract the location in
1826 question ignoring the offset. */
1827 invalidate (XEXP (x, 0), VOIDmode);
1828 return;
1830 case MEM:
1831 /* Calculate the canonical version of X here so that
1832 true_dependence doesn't generate new RTL for X on each call. */
1833 x = canon_rtx (x);
1835 /* Remove all hash table elements that refer to overlapping pieces of
1836 memory. */
1837 if (full_mode == VOIDmode)
1838 full_mode = GET_MODE (x);
1840 for (i = 0; i < HASH_SIZE; i++)
1842 register struct table_elt *next;
1844 for (p = table[i]; p; p = next)
1846 next = p->next_same_hash;
1847 if (p->in_memory)
1849 if (GET_CODE (p->exp) != MEM)
1850 remove_from_table (p, i);
1851 else
1853 /* Just canonicalize the expression once;
1854 otherwise each time we call invalidate
1855 true_dependence will canonicalize the
1856 expression again. */
1857 if (!p->canon_exp)
1858 p->canon_exp = canon_rtx (p->exp);
1859 if (true_dependence (x, full_mode, p->canon_exp,
1860 cse_rtx_varies_p))
1861 remove_from_table (p, i);
1866 return;
1868 default:
1869 abort ();
1873 /* Remove all expressions that refer to register REGNO,
1874 since they are already invalid, and we are about to
1875 mark that register valid again and don't want the old
1876 expressions to reappear as valid. */
1878 static void
1879 remove_invalid_refs (regno)
1880 unsigned int regno;
1882 unsigned int i;
1883 struct table_elt *p, *next;
1885 for (i = 0; i < HASH_SIZE; i++)
1886 for (p = table[i]; p; p = next)
1888 next = p->next_same_hash;
1889 if (GET_CODE (p->exp) != REG
1890 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1891 remove_from_table (p, i);
1895 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1896 static void
1897 remove_invalid_subreg_refs (regno, word, mode)
1898 unsigned int regno;
1899 unsigned int word;
1900 enum machine_mode mode;
1902 unsigned int i;
1903 struct table_elt *p, *next;
1904 unsigned int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1906 for (i = 0; i < HASH_SIZE; i++)
1907 for (p = table[i]; p; p = next)
1909 rtx exp;
1910 next = p->next_same_hash;
1912 exp = p->exp;
1913 if (GET_CODE (p->exp) != REG
1914 && (GET_CODE (exp) != SUBREG
1915 || GET_CODE (SUBREG_REG (exp)) != REG
1916 || REGNO (SUBREG_REG (exp)) != regno
1917 || (((SUBREG_WORD (exp)
1918 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1919 >= word)
1920 && SUBREG_WORD (exp) <= end))
1921 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1922 remove_from_table (p, i);
1926 /* Recompute the hash codes of any valid entries in the hash table that
1927 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1929 This is called when we make a jump equivalence. */
1931 static void
1932 rehash_using_reg (x)
1933 rtx x;
1935 unsigned int i;
1936 struct table_elt *p, *next;
1937 unsigned hash;
1939 if (GET_CODE (x) == SUBREG)
1940 x = SUBREG_REG (x);
1942 /* If X is not a register or if the register is known not to be in any
1943 valid entries in the table, we have no work to do. */
1945 if (GET_CODE (x) != REG
1946 || REG_IN_TABLE (REGNO (x)) < 0
1947 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1948 return;
1950 /* Scan all hash chains looking for valid entries that mention X.
1951 If we find one and it is in the wrong hash chain, move it. We can skip
1952 objects that are registers, since they are handled specially. */
1954 for (i = 0; i < HASH_SIZE; i++)
1955 for (p = table[i]; p; p = next)
1957 next = p->next_same_hash;
1958 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1959 && exp_equiv_p (p->exp, p->exp, 1, 0)
1960 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
1962 if (p->next_same_hash)
1963 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1965 if (p->prev_same_hash)
1966 p->prev_same_hash->next_same_hash = p->next_same_hash;
1967 else
1968 table[i] = p->next_same_hash;
1970 p->next_same_hash = table[hash];
1971 p->prev_same_hash = 0;
1972 if (table[hash])
1973 table[hash]->prev_same_hash = p;
1974 table[hash] = p;
1979 /* Remove from the hash table any expression that is a call-clobbered
1980 register. Also update their TICK values. */
1982 static void
1983 invalidate_for_call ()
1985 unsigned int regno, endregno;
1986 unsigned int i;
1987 unsigned hash;
1988 struct table_elt *p, *next;
1989 int in_table = 0;
1991 /* Go through all the hard registers. For each that is clobbered in
1992 a CALL_INSN, remove the register from quantity chains and update
1993 reg_tick if defined. Also see if any of these registers is currently
1994 in the table. */
1996 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1997 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1999 delete_reg_equiv (regno);
2000 if (REG_TICK (regno) >= 0)
2001 REG_TICK (regno)++;
2003 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2006 /* In the case where we have no call-clobbered hard registers in the
2007 table, we are done. Otherwise, scan the table and remove any
2008 entry that overlaps a call-clobbered register. */
2010 if (in_table)
2011 for (hash = 0; hash < HASH_SIZE; hash++)
2012 for (p = table[hash]; p; p = next)
2014 next = p->next_same_hash;
2016 if (GET_CODE (p->exp) != REG
2017 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2018 continue;
2020 regno = REGNO (p->exp);
2021 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2023 for (i = regno; i < endregno; i++)
2024 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2026 remove_from_table (p, hash);
2027 break;
2032 /* Given an expression X of type CONST,
2033 and ELT which is its table entry (or 0 if it
2034 is not in the hash table),
2035 return an alternate expression for X as a register plus integer.
2036 If none can be found, return 0. */
2038 static rtx
2039 use_related_value (x, elt)
2040 rtx x;
2041 struct table_elt *elt;
2043 register struct table_elt *relt = 0;
2044 register struct table_elt *p, *q;
2045 HOST_WIDE_INT offset;
2047 /* First, is there anything related known?
2048 If we have a table element, we can tell from that.
2049 Otherwise, must look it up. */
2051 if (elt != 0 && elt->related_value != 0)
2052 relt = elt;
2053 else if (elt == 0 && GET_CODE (x) == CONST)
2055 rtx subexp = get_related_value (x);
2056 if (subexp != 0)
2057 relt = lookup (subexp,
2058 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2059 GET_MODE (subexp));
2062 if (relt == 0)
2063 return 0;
2065 /* Search all related table entries for one that has an
2066 equivalent register. */
2068 p = relt;
2069 while (1)
2071 /* This loop is strange in that it is executed in two different cases.
2072 The first is when X is already in the table. Then it is searching
2073 the RELATED_VALUE list of X's class (RELT). The second case is when
2074 X is not in the table. Then RELT points to a class for the related
2075 value.
2077 Ensure that, whatever case we are in, that we ignore classes that have
2078 the same value as X. */
2080 if (rtx_equal_p (x, p->exp))
2081 q = 0;
2082 else
2083 for (q = p->first_same_value; q; q = q->next_same_value)
2084 if (GET_CODE (q->exp) == REG)
2085 break;
2087 if (q)
2088 break;
2090 p = p->related_value;
2092 /* We went all the way around, so there is nothing to be found.
2093 Alternatively, perhaps RELT was in the table for some other reason
2094 and it has no related values recorded. */
2095 if (p == relt || p == 0)
2096 break;
2099 if (q == 0)
2100 return 0;
2102 offset = (get_integer_term (x) - get_integer_term (p->exp));
2103 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2104 return plus_constant (q->exp, offset);
2107 /* Hash an rtx. We are careful to make sure the value is never negative.
2108 Equivalent registers hash identically.
2109 MODE is used in hashing for CONST_INTs only;
2110 otherwise the mode of X is used.
2112 Store 1 in do_not_record if any subexpression is volatile.
2114 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2115 which does not have the RTX_UNCHANGING_P bit set.
2117 Note that cse_insn knows that the hash code of a MEM expression
2118 is just (int) MEM plus the hash code of the address. */
2120 static unsigned
2121 canon_hash (x, mode)
2122 rtx x;
2123 enum machine_mode mode;
2125 register int i, j;
2126 register unsigned hash = 0;
2127 register enum rtx_code code;
2128 register const char *fmt;
2130 /* repeat is used to turn tail-recursion into iteration. */
2131 repeat:
2132 if (x == 0)
2133 return hash;
2135 code = GET_CODE (x);
2136 switch (code)
2138 case REG:
2140 unsigned int regno = REGNO (x);
2142 /* On some machines, we can't record any non-fixed hard register,
2143 because extending its life will cause reload problems. We
2144 consider ap, fp, and sp to be fixed for this purpose.
2146 We also consider CCmode registers to be fixed for this purpose;
2147 failure to do so leads to failure to simplify 0<100 type of
2148 conditionals.
2150 On all machines, we can't record any global registers. */
2152 if (regno < FIRST_PSEUDO_REGISTER
2153 && (global_regs[regno]
2154 || (SMALL_REGISTER_CLASSES
2155 && ! fixed_regs[regno]
2156 && regno != FRAME_POINTER_REGNUM
2157 && regno != HARD_FRAME_POINTER_REGNUM
2158 && regno != ARG_POINTER_REGNUM
2159 && regno != STACK_POINTER_REGNUM
2160 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2162 do_not_record = 1;
2163 return 0;
2166 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2167 return hash;
2170 /* We handle SUBREG of a REG specially because the underlying
2171 reg changes its hash value with every value change; we don't
2172 want to have to forget unrelated subregs when one subreg changes. */
2173 case SUBREG:
2175 if (GET_CODE (SUBREG_REG (x)) == REG)
2177 hash += (((unsigned) SUBREG << 7)
2178 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2179 return hash;
2181 break;
2184 case CONST_INT:
2186 unsigned HOST_WIDE_INT tem = INTVAL (x);
2187 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2188 return hash;
2191 case CONST_DOUBLE:
2192 /* This is like the general case, except that it only counts
2193 the integers representing the constant. */
2194 hash += (unsigned) code + (unsigned) GET_MODE (x);
2195 if (GET_MODE (x) != VOIDmode)
2196 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2198 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2199 hash += tem;
2201 else
2202 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2203 + (unsigned) CONST_DOUBLE_HIGH (x));
2204 return hash;
2206 /* Assume there is only one rtx object for any given label. */
2207 case LABEL_REF:
2208 hash
2209 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2210 return hash;
2212 case SYMBOL_REF:
2213 hash
2214 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2215 return hash;
2217 case MEM:
2218 /* We don't record if marked volatile or if BLKmode since we don't
2219 know the size of the move. */
2220 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2222 do_not_record = 1;
2223 return 0;
2225 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2227 hash_arg_in_memory = 1;
2229 /* Now that we have already found this special case,
2230 might as well speed it up as much as possible. */
2231 hash += (unsigned) MEM;
2232 x = XEXP (x, 0);
2233 goto repeat;
2235 case PRE_DEC:
2236 case PRE_INC:
2237 case POST_DEC:
2238 case POST_INC:
2239 case PC:
2240 case CC0:
2241 case CALL:
2242 case UNSPEC_VOLATILE:
2243 do_not_record = 1;
2244 return 0;
2246 case ASM_OPERANDS:
2247 if (MEM_VOLATILE_P (x))
2249 do_not_record = 1;
2250 return 0;
2252 break;
2254 default:
2255 break;
2258 i = GET_RTX_LENGTH (code) - 1;
2259 hash += (unsigned) code + (unsigned) GET_MODE (x);
2260 fmt = GET_RTX_FORMAT (code);
2261 for (; i >= 0; i--)
2263 if (fmt[i] == 'e')
2265 rtx tem = XEXP (x, i);
2267 /* If we are about to do the last recursive call
2268 needed at this level, change it into iteration.
2269 This function is called enough to be worth it. */
2270 if (i == 0)
2272 x = tem;
2273 goto repeat;
2275 hash += canon_hash (tem, 0);
2277 else if (fmt[i] == 'E')
2278 for (j = 0; j < XVECLEN (x, i); j++)
2279 hash += canon_hash (XVECEXP (x, i, j), 0);
2280 else if (fmt[i] == 's')
2282 register const unsigned char *p =
2283 (const unsigned char *) XSTR (x, i);
2285 if (p)
2286 while (*p)
2287 hash += *p++;
2289 else if (fmt[i] == 'i')
2291 register unsigned tem = XINT (x, i);
2292 hash += tem;
2294 else if (fmt[i] == '0' || fmt[i] == 't')
2295 /* unused */;
2296 else
2297 abort ();
2299 return hash;
2302 /* Like canon_hash but with no side effects. */
2304 static unsigned
2305 safe_hash (x, mode)
2306 rtx x;
2307 enum machine_mode mode;
2309 int save_do_not_record = do_not_record;
2310 int save_hash_arg_in_memory = hash_arg_in_memory;
2311 unsigned hash = canon_hash (x, mode);
2312 hash_arg_in_memory = save_hash_arg_in_memory;
2313 do_not_record = save_do_not_record;
2314 return hash;
2317 /* Return 1 iff X and Y would canonicalize into the same thing,
2318 without actually constructing the canonicalization of either one.
2319 If VALIDATE is nonzero,
2320 we assume X is an expression being processed from the rtl
2321 and Y was found in the hash table. We check register refs
2322 in Y for being marked as valid.
2324 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2325 that is known to be in the register. Ordinarily, we don't allow them
2326 to match, because letting them match would cause unpredictable results
2327 in all the places that search a hash table chain for an equivalent
2328 for a given value. A possible equivalent that has different structure
2329 has its hash code computed from different data. Whether the hash code
2330 is the same as that of the given value is pure luck. */
2332 static int
2333 exp_equiv_p (x, y, validate, equal_values)
2334 rtx x, y;
2335 int validate;
2336 int equal_values;
2338 register int i, j;
2339 register enum rtx_code code;
2340 register const char *fmt;
2342 /* Note: it is incorrect to assume an expression is equivalent to itself
2343 if VALIDATE is nonzero. */
2344 if (x == y && !validate)
2345 return 1;
2346 if (x == 0 || y == 0)
2347 return x == y;
2349 code = GET_CODE (x);
2350 if (code != GET_CODE (y))
2352 if (!equal_values)
2353 return 0;
2355 /* If X is a constant and Y is a register or vice versa, they may be
2356 equivalent. We only have to validate if Y is a register. */
2357 if (CONSTANT_P (x) && GET_CODE (y) == REG
2358 && REGNO_QTY_VALID_P (REGNO (y)))
2360 int y_q = REG_QTY (REGNO (y));
2361 struct qty_table_elem *y_ent = &qty_table[y_q];
2363 if (GET_MODE (y) == y_ent->mode
2364 && rtx_equal_p (x, y_ent->const_rtx)
2365 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2366 return 1;
2369 if (CONSTANT_P (y) && code == REG
2370 && REGNO_QTY_VALID_P (REGNO (x)))
2372 int x_q = REG_QTY (REGNO (x));
2373 struct qty_table_elem *x_ent = &qty_table[x_q];
2375 if (GET_MODE (x) == x_ent->mode
2376 && rtx_equal_p (y, x_ent->const_rtx))
2377 return 1;
2380 return 0;
2383 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2384 if (GET_MODE (x) != GET_MODE (y))
2385 return 0;
2387 switch (code)
2389 case PC:
2390 case CC0:
2391 case CONST_INT:
2392 return x == y;
2394 case LABEL_REF:
2395 return XEXP (x, 0) == XEXP (y, 0);
2397 case SYMBOL_REF:
2398 return XSTR (x, 0) == XSTR (y, 0);
2400 case REG:
2402 unsigned int regno = REGNO (y);
2403 unsigned int endregno
2404 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2405 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2406 unsigned int i;
2408 /* If the quantities are not the same, the expressions are not
2409 equivalent. If there are and we are not to validate, they
2410 are equivalent. Otherwise, ensure all regs are up-to-date. */
2412 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2413 return 0;
2415 if (! validate)
2416 return 1;
2418 for (i = regno; i < endregno; i++)
2419 if (REG_IN_TABLE (i) != REG_TICK (i))
2420 return 0;
2422 return 1;
2425 /* For commutative operations, check both orders. */
2426 case PLUS:
2427 case MULT:
2428 case AND:
2429 case IOR:
2430 case XOR:
2431 case NE:
2432 case EQ:
2433 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2434 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2435 validate, equal_values))
2436 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2437 validate, equal_values)
2438 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2439 validate, equal_values)));
2441 default:
2442 break;
2445 /* Compare the elements. If any pair of corresponding elements
2446 fail to match, return 0 for the whole things. */
2448 fmt = GET_RTX_FORMAT (code);
2449 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2451 switch (fmt[i])
2453 case 'e':
2454 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2455 return 0;
2456 break;
2458 case 'E':
2459 if (XVECLEN (x, i) != XVECLEN (y, i))
2460 return 0;
2461 for (j = 0; j < XVECLEN (x, i); j++)
2462 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2463 validate, equal_values))
2464 return 0;
2465 break;
2467 case 's':
2468 if (strcmp (XSTR (x, i), XSTR (y, i)))
2469 return 0;
2470 break;
2472 case 'i':
2473 if (XINT (x, i) != XINT (y, i))
2474 return 0;
2475 break;
2477 case 'w':
2478 if (XWINT (x, i) != XWINT (y, i))
2479 return 0;
2480 break;
2482 case '0':
2483 case 't':
2484 break;
2486 default:
2487 abort ();
2491 return 1;
2494 /* Return 1 if X has a value that can vary even between two
2495 executions of the program. 0 means X can be compared reliably
2496 against certain constants or near-constants. */
2498 static int
2499 cse_rtx_varies_p (x)
2500 register rtx x;
2502 /* We need not check for X and the equivalence class being of the same
2503 mode because if X is equivalent to a constant in some mode, it
2504 doesn't vary in any mode. */
2506 if (GET_CODE (x) == REG
2507 && REGNO_QTY_VALID_P (REGNO (x)))
2509 int x_q = REG_QTY (REGNO (x));
2510 struct qty_table_elem *x_ent = &qty_table[x_q];
2512 if (GET_MODE (x) == x_ent->mode
2513 && x_ent->const_rtx != NULL_RTX)
2514 return 0;
2517 if (GET_CODE (x) == PLUS
2518 && GET_CODE (XEXP (x, 1)) == CONST_INT
2519 && GET_CODE (XEXP (x, 0)) == REG
2520 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2522 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2523 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2525 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2526 && x0_ent->const_rtx != NULL_RTX)
2527 return 0;
2530 /* This can happen as the result of virtual register instantiation, if
2531 the initial constant is too large to be a valid address. This gives
2532 us a three instruction sequence, load large offset into a register,
2533 load fp minus a constant into a register, then a MEM which is the
2534 sum of the two `constant' registers. */
2535 if (GET_CODE (x) == PLUS
2536 && GET_CODE (XEXP (x, 0)) == REG
2537 && GET_CODE (XEXP (x, 1)) == REG
2538 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2539 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2541 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2542 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2543 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2544 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2546 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2547 && x0_ent->const_rtx != NULL_RTX
2548 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2549 && x1_ent->const_rtx != NULL_RTX)
2550 return 0;
2553 return rtx_varies_p (x);
2556 /* Canonicalize an expression:
2557 replace each register reference inside it
2558 with the "oldest" equivalent register.
2560 If INSN is non-zero and we are replacing a pseudo with a hard register
2561 or vice versa, validate_change is used to ensure that INSN remains valid
2562 after we make our substitution. The calls are made with IN_GROUP non-zero
2563 so apply_change_group must be called upon the outermost return from this
2564 function (unless INSN is zero). The result of apply_change_group can
2565 generally be discarded since the changes we are making are optional. */
2567 static rtx
2568 canon_reg (x, insn)
2569 rtx x;
2570 rtx insn;
2572 register int i;
2573 register enum rtx_code code;
2574 register const char *fmt;
2576 if (x == 0)
2577 return x;
2579 code = GET_CODE (x);
2580 switch (code)
2582 case PC:
2583 case CC0:
2584 case CONST:
2585 case CONST_INT:
2586 case CONST_DOUBLE:
2587 case SYMBOL_REF:
2588 case LABEL_REF:
2589 case ADDR_VEC:
2590 case ADDR_DIFF_VEC:
2591 return x;
2593 case REG:
2595 register int first;
2596 register int q;
2597 register struct qty_table_elem *ent;
2599 /* Never replace a hard reg, because hard regs can appear
2600 in more than one machine mode, and we must preserve the mode
2601 of each occurrence. Also, some hard regs appear in
2602 MEMs that are shared and mustn't be altered. Don't try to
2603 replace any reg that maps to a reg of class NO_REGS. */
2604 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2605 || ! REGNO_QTY_VALID_P (REGNO (x)))
2606 return x;
2608 q = REG_QTY (REGNO(x));
2609 ent = &qty_table[q];
2610 first = ent->first_reg;
2611 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2612 : REGNO_REG_CLASS (first) == NO_REGS ? x
2613 : gen_rtx_REG (ent->mode, first));
2616 default:
2617 break;
2620 fmt = GET_RTX_FORMAT (code);
2621 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2623 register int j;
2625 if (fmt[i] == 'e')
2627 rtx new = canon_reg (XEXP (x, i), insn);
2628 int insn_code;
2630 /* If replacing pseudo with hard reg or vice versa, ensure the
2631 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2632 if (insn != 0 && new != 0
2633 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2634 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2635 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2636 || (insn_code = recog_memoized (insn)) < 0
2637 || insn_data[insn_code].n_dups > 0))
2638 validate_change (insn, &XEXP (x, i), new, 1);
2639 else
2640 XEXP (x, i) = new;
2642 else if (fmt[i] == 'E')
2643 for (j = 0; j < XVECLEN (x, i); j++)
2644 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2647 return x;
2650 /* LOC is a location within INSN that is an operand address (the contents of
2651 a MEM). Find the best equivalent address to use that is valid for this
2652 insn.
2654 On most CISC machines, complicated address modes are costly, and rtx_cost
2655 is a good approximation for that cost. However, most RISC machines have
2656 only a few (usually only one) memory reference formats. If an address is
2657 valid at all, it is often just as cheap as any other address. Hence, for
2658 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2659 costs of various addresses. For two addresses of equal cost, choose the one
2660 with the highest `rtx_cost' value as that has the potential of eliminating
2661 the most insns. For equal costs, we choose the first in the equivalence
2662 class. Note that we ignore the fact that pseudo registers are cheaper
2663 than hard registers here because we would also prefer the pseudo registers.
2666 static void
2667 find_best_addr (insn, loc)
2668 rtx insn;
2669 rtx *loc;
2671 struct table_elt *elt;
2672 rtx addr = *loc;
2673 #ifdef ADDRESS_COST
2674 struct table_elt *p;
2675 int found_better = 1;
2676 #endif
2677 int save_do_not_record = do_not_record;
2678 int save_hash_arg_in_memory = hash_arg_in_memory;
2679 int addr_volatile;
2680 int regno;
2681 unsigned hash;
2683 /* Do not try to replace constant addresses or addresses of local and
2684 argument slots. These MEM expressions are made only once and inserted
2685 in many instructions, as well as being used to control symbol table
2686 output. It is not safe to clobber them.
2688 There are some uncommon cases where the address is already in a register
2689 for some reason, but we cannot take advantage of that because we have
2690 no easy way to unshare the MEM. In addition, looking up all stack
2691 addresses is costly. */
2692 if ((GET_CODE (addr) == PLUS
2693 && GET_CODE (XEXP (addr, 0)) == REG
2694 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2695 && (regno = REGNO (XEXP (addr, 0)),
2696 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2697 || regno == ARG_POINTER_REGNUM))
2698 || (GET_CODE (addr) == REG
2699 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2700 || regno == HARD_FRAME_POINTER_REGNUM
2701 || regno == ARG_POINTER_REGNUM))
2702 || GET_CODE (addr) == ADDRESSOF
2703 || CONSTANT_ADDRESS_P (addr))
2704 return;
2706 /* If this address is not simply a register, try to fold it. This will
2707 sometimes simplify the expression. Many simplifications
2708 will not be valid, but some, usually applying the associative rule, will
2709 be valid and produce better code. */
2710 if (GET_CODE (addr) != REG)
2712 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2714 if (1
2715 #ifdef ADDRESS_COST
2716 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2717 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2718 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2719 #else
2720 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2721 #endif
2722 && validate_change (insn, loc, folded, 0))
2723 addr = folded;
2726 /* If this address is not in the hash table, we can't look for equivalences
2727 of the whole address. Also, ignore if volatile. */
2729 do_not_record = 0;
2730 hash = HASH (addr, Pmode);
2731 addr_volatile = do_not_record;
2732 do_not_record = save_do_not_record;
2733 hash_arg_in_memory = save_hash_arg_in_memory;
2735 if (addr_volatile)
2736 return;
2738 elt = lookup (addr, hash, Pmode);
2740 #ifndef ADDRESS_COST
2741 if (elt)
2743 int our_cost = elt->cost;
2745 /* Find the lowest cost below ours that works. */
2746 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2747 if (elt->cost < our_cost
2748 && (GET_CODE (elt->exp) == REG
2749 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2750 && validate_change (insn, loc,
2751 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2752 return;
2754 #else
2756 if (elt)
2758 /* We need to find the best (under the criteria documented above) entry
2759 in the class that is valid. We use the `flag' field to indicate
2760 choices that were invalid and iterate until we can't find a better
2761 one that hasn't already been tried. */
2763 for (p = elt->first_same_value; p; p = p->next_same_value)
2764 p->flag = 0;
2766 while (found_better)
2768 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2769 int best_rtx_cost = (elt->cost + 1) >> 1;
2770 struct table_elt *best_elt = elt;
2772 found_better = 0;
2773 for (p = elt->first_same_value; p; p = p->next_same_value)
2774 if (! p->flag)
2776 if ((GET_CODE (p->exp) == REG
2777 || exp_equiv_p (p->exp, p->exp, 1, 0))
2778 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2779 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2780 && (p->cost + 1) >> 1 > best_rtx_cost)))
2782 found_better = 1;
2783 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2784 best_rtx_cost = (p->cost + 1) >> 1;
2785 best_elt = p;
2789 if (found_better)
2791 if (validate_change (insn, loc,
2792 canon_reg (copy_rtx (best_elt->exp),
2793 NULL_RTX), 0))
2794 return;
2795 else
2796 best_elt->flag = 1;
2801 /* If the address is a binary operation with the first operand a register
2802 and the second a constant, do the same as above, but looking for
2803 equivalences of the register. Then try to simplify before checking for
2804 the best address to use. This catches a few cases: First is when we
2805 have REG+const and the register is another REG+const. We can often merge
2806 the constants and eliminate one insn and one register. It may also be
2807 that a machine has a cheap REG+REG+const. Finally, this improves the
2808 code on the Alpha for unaligned byte stores. */
2810 if (flag_expensive_optimizations
2811 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2812 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2813 && GET_CODE (XEXP (*loc, 0)) == REG
2814 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2816 rtx c = XEXP (*loc, 1);
2818 do_not_record = 0;
2819 hash = HASH (XEXP (*loc, 0), Pmode);
2820 do_not_record = save_do_not_record;
2821 hash_arg_in_memory = save_hash_arg_in_memory;
2823 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2824 if (elt == 0)
2825 return;
2827 /* We need to find the best (under the criteria documented above) entry
2828 in the class that is valid. We use the `flag' field to indicate
2829 choices that were invalid and iterate until we can't find a better
2830 one that hasn't already been tried. */
2832 for (p = elt->first_same_value; p; p = p->next_same_value)
2833 p->flag = 0;
2835 while (found_better)
2837 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2838 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2839 struct table_elt *best_elt = elt;
2840 rtx best_rtx = *loc;
2841 int count;
2843 /* This is at worst case an O(n^2) algorithm, so limit our search
2844 to the first 32 elements on the list. This avoids trouble
2845 compiling code with very long basic blocks that can easily
2846 call simplify_gen_binary so many times that we run out of
2847 memory. */
2849 found_better = 0;
2850 for (p = elt->first_same_value, count = 0;
2851 p && count < 32;
2852 p = p->next_same_value, count++)
2853 if (! p->flag
2854 && (GET_CODE (p->exp) == REG
2855 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2857 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2858 p->exp, c);
2860 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2861 || (CSE_ADDRESS_COST (new) == best_addr_cost
2862 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2864 found_better = 1;
2865 best_addr_cost = CSE_ADDRESS_COST (new);
2866 best_rtx_cost = (COST (new) + 1) >> 1;
2867 best_elt = p;
2868 best_rtx = new;
2872 if (found_better)
2874 if (validate_change (insn, loc,
2875 canon_reg (copy_rtx (best_rtx),
2876 NULL_RTX), 0))
2877 return;
2878 else
2879 best_elt->flag = 1;
2883 #endif
2886 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2887 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2888 what values are being compared.
2890 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2891 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2892 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2893 compared to produce cc0.
2895 The return value is the comparison operator and is either the code of
2896 A or the code corresponding to the inverse of the comparison. */
2898 static enum rtx_code
2899 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2900 enum rtx_code code;
2901 rtx *parg1, *parg2;
2902 enum machine_mode *pmode1, *pmode2;
2904 rtx arg1, arg2;
2906 arg1 = *parg1, arg2 = *parg2;
2908 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2910 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2912 /* Set non-zero when we find something of interest. */
2913 rtx x = 0;
2914 int reverse_code = 0;
2915 struct table_elt *p = 0;
2917 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2918 On machines with CC0, this is the only case that can occur, since
2919 fold_rtx will return the COMPARE or item being compared with zero
2920 when given CC0. */
2922 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2923 x = arg1;
2925 /* If ARG1 is a comparison operator and CODE is testing for
2926 STORE_FLAG_VALUE, get the inner arguments. */
2928 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2930 if (code == NE
2931 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2932 && code == LT && STORE_FLAG_VALUE == -1)
2933 #ifdef FLOAT_STORE_FLAG_VALUE
2934 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2935 && (REAL_VALUE_NEGATIVE
2936 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2937 #endif
2939 x = arg1;
2940 else if (code == EQ
2941 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2942 && code == GE && STORE_FLAG_VALUE == -1)
2943 #ifdef FLOAT_STORE_FLAG_VALUE
2944 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2945 && (REAL_VALUE_NEGATIVE
2946 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2947 #endif
2949 x = arg1, reverse_code = 1;
2952 /* ??? We could also check for
2954 (ne (and (eq (...) (const_int 1))) (const_int 0))
2956 and related forms, but let's wait until we see them occurring. */
2958 if (x == 0)
2959 /* Look up ARG1 in the hash table and see if it has an equivalence
2960 that lets us see what is being compared. */
2961 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
2962 GET_MODE (arg1));
2963 if (p) p = p->first_same_value;
2965 for (; p; p = p->next_same_value)
2967 enum machine_mode inner_mode = GET_MODE (p->exp);
2969 /* If the entry isn't valid, skip it. */
2970 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2971 continue;
2973 if (GET_CODE (p->exp) == COMPARE
2974 /* Another possibility is that this machine has a compare insn
2975 that includes the comparison code. In that case, ARG1 would
2976 be equivalent to a comparison operation that would set ARG1 to
2977 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2978 ORIG_CODE is the actual comparison being done; if it is an EQ,
2979 we must reverse ORIG_CODE. On machine with a negative value
2980 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2981 || ((code == NE
2982 || (code == LT
2983 && GET_MODE_CLASS (inner_mode) == MODE_INT
2984 && (GET_MODE_BITSIZE (inner_mode)
2985 <= HOST_BITS_PER_WIDE_INT)
2986 && (STORE_FLAG_VALUE
2987 & ((HOST_WIDE_INT) 1
2988 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2989 #ifdef FLOAT_STORE_FLAG_VALUE
2990 || (code == LT
2991 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2992 && (REAL_VALUE_NEGATIVE
2993 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2994 #endif
2996 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2998 x = p->exp;
2999 break;
3001 else if ((code == EQ
3002 || (code == GE
3003 && GET_MODE_CLASS (inner_mode) == MODE_INT
3004 && (GET_MODE_BITSIZE (inner_mode)
3005 <= HOST_BITS_PER_WIDE_INT)
3006 && (STORE_FLAG_VALUE
3007 & ((HOST_WIDE_INT) 1
3008 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3009 #ifdef FLOAT_STORE_FLAG_VALUE
3010 || (code == GE
3011 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3012 && (REAL_VALUE_NEGATIVE
3013 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3014 #endif
3016 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3018 reverse_code = 1;
3019 x = p->exp;
3020 break;
3023 /* If this is fp + constant, the equivalent is a better operand since
3024 it may let us predict the value of the comparison. */
3025 else if (NONZERO_BASE_PLUS_P (p->exp))
3027 arg1 = p->exp;
3028 continue;
3032 /* If we didn't find a useful equivalence for ARG1, we are done.
3033 Otherwise, set up for the next iteration. */
3034 if (x == 0)
3035 break;
3037 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3038 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3039 code = GET_CODE (x);
3041 if (reverse_code)
3042 code = reverse_condition (code);
3045 /* Return our results. Return the modes from before fold_rtx
3046 because fold_rtx might produce const_int, and then it's too late. */
3047 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3048 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3050 return code;
3053 /* If X is a nontrivial arithmetic operation on an argument
3054 for which a constant value can be determined, return
3055 the result of operating on that value, as a constant.
3056 Otherwise, return X, possibly with one or more operands
3057 modified by recursive calls to this function.
3059 If X is a register whose contents are known, we do NOT
3060 return those contents here. equiv_constant is called to
3061 perform that task.
3063 INSN is the insn that we may be modifying. If it is 0, make a copy
3064 of X before modifying it. */
3066 static rtx
3067 fold_rtx (x, insn)
3068 rtx x;
3069 rtx insn;
3071 register enum rtx_code code;
3072 register enum machine_mode mode;
3073 register const char *fmt;
3074 register int i;
3075 rtx new = 0;
3076 int copied = 0;
3077 int must_swap = 0;
3079 /* Folded equivalents of first two operands of X. */
3080 rtx folded_arg0;
3081 rtx folded_arg1;
3083 /* Constant equivalents of first three operands of X;
3084 0 when no such equivalent is known. */
3085 rtx const_arg0;
3086 rtx const_arg1;
3087 rtx const_arg2;
3089 /* The mode of the first operand of X. We need this for sign and zero
3090 extends. */
3091 enum machine_mode mode_arg0;
3093 if (x == 0)
3094 return x;
3096 mode = GET_MODE (x);
3097 code = GET_CODE (x);
3098 switch (code)
3100 case CONST:
3101 case CONST_INT:
3102 case CONST_DOUBLE:
3103 case SYMBOL_REF:
3104 case LABEL_REF:
3105 case REG:
3106 /* No use simplifying an EXPR_LIST
3107 since they are used only for lists of args
3108 in a function call's REG_EQUAL note. */
3109 case EXPR_LIST:
3110 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3111 want to (e.g.,) make (addressof (const_int 0)) just because
3112 the location is known to be zero. */
3113 case ADDRESSOF:
3114 return x;
3116 #ifdef HAVE_cc0
3117 case CC0:
3118 return prev_insn_cc0;
3119 #endif
3121 case PC:
3122 /* If the next insn is a CODE_LABEL followed by a jump table,
3123 PC's value is a LABEL_REF pointing to that label. That
3124 lets us fold switch statements on the Vax. */
3125 if (insn && GET_CODE (insn) == JUMP_INSN)
3127 rtx next = next_nonnote_insn (insn);
3129 if (next && GET_CODE (next) == CODE_LABEL
3130 && NEXT_INSN (next) != 0
3131 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3132 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3133 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3134 return gen_rtx_LABEL_REF (Pmode, next);
3136 break;
3138 case SUBREG:
3139 /* See if we previously assigned a constant value to this SUBREG. */
3140 if ((new = lookup_as_function (x, CONST_INT)) != 0
3141 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3142 return new;
3144 /* If this is a paradoxical SUBREG, we have no idea what value the
3145 extra bits would have. However, if the operand is equivalent
3146 to a SUBREG whose operand is the same as our mode, and all the
3147 modes are within a word, we can just use the inner operand
3148 because these SUBREGs just say how to treat the register.
3150 Similarly if we find an integer constant. */
3152 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3154 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3155 struct table_elt *elt;
3157 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3158 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3159 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3160 imode)) != 0)
3161 for (elt = elt->first_same_value;
3162 elt; elt = elt->next_same_value)
3164 if (CONSTANT_P (elt->exp)
3165 && GET_MODE (elt->exp) == VOIDmode)
3166 return elt->exp;
3168 if (GET_CODE (elt->exp) == SUBREG
3169 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3170 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3171 return copy_rtx (SUBREG_REG (elt->exp));
3174 return x;
3177 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3178 We might be able to if the SUBREG is extracting a single word in an
3179 integral mode or extracting the low part. */
3181 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3182 const_arg0 = equiv_constant (folded_arg0);
3183 if (const_arg0)
3184 folded_arg0 = const_arg0;
3186 if (folded_arg0 != SUBREG_REG (x))
3188 new = 0;
3190 if (GET_MODE_CLASS (mode) == MODE_INT
3191 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3192 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
3193 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
3194 GET_MODE (SUBREG_REG (x)));
3195 if (new == 0 && subreg_lowpart_p (x))
3196 new = gen_lowpart_if_possible (mode, folded_arg0);
3197 if (new)
3198 return new;
3201 /* If this is a narrowing SUBREG and our operand is a REG, see if
3202 we can find an equivalence for REG that is an arithmetic operation
3203 in a wider mode where both operands are paradoxical SUBREGs
3204 from objects of our result mode. In that case, we couldn't report
3205 an equivalent value for that operation, since we don't know what the
3206 extra bits will be. But we can find an equivalence for this SUBREG
3207 by folding that operation is the narrow mode. This allows us to
3208 fold arithmetic in narrow modes when the machine only supports
3209 word-sized arithmetic.
3211 Also look for a case where we have a SUBREG whose operand is the
3212 same as our result. If both modes are smaller than a word, we
3213 are simply interpreting a register in different modes and we
3214 can use the inner value. */
3216 if (GET_CODE (folded_arg0) == REG
3217 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3218 && subreg_lowpart_p (x))
3220 struct table_elt *elt;
3222 /* We can use HASH here since we know that canon_hash won't be
3223 called. */
3224 elt = lookup (folded_arg0,
3225 HASH (folded_arg0, GET_MODE (folded_arg0)),
3226 GET_MODE (folded_arg0));
3228 if (elt)
3229 elt = elt->first_same_value;
3231 for (; elt; elt = elt->next_same_value)
3233 enum rtx_code eltcode = GET_CODE (elt->exp);
3235 /* Just check for unary and binary operations. */
3236 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3237 && GET_CODE (elt->exp) != SIGN_EXTEND
3238 && GET_CODE (elt->exp) != ZERO_EXTEND
3239 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3240 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3242 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3244 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3245 op0 = fold_rtx (op0, NULL_RTX);
3247 op0 = equiv_constant (op0);
3248 if (op0)
3249 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3250 op0, mode);
3252 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3253 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3254 && eltcode != DIV && eltcode != MOD
3255 && eltcode != UDIV && eltcode != UMOD
3256 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3257 && eltcode != ROTATE && eltcode != ROTATERT
3258 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3259 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3260 == mode))
3261 || CONSTANT_P (XEXP (elt->exp, 0)))
3262 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3263 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3264 == mode))
3265 || CONSTANT_P (XEXP (elt->exp, 1))))
3267 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3268 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3270 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3271 op0 = fold_rtx (op0, NULL_RTX);
3273 if (op0)
3274 op0 = equiv_constant (op0);
3276 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3277 op1 = fold_rtx (op1, NULL_RTX);
3279 if (op1)
3280 op1 = equiv_constant (op1);
3282 /* If we are looking for the low SImode part of
3283 (ashift:DI c (const_int 32)), it doesn't work
3284 to compute that in SImode, because a 32-bit shift
3285 in SImode is unpredictable. We know the value is 0. */
3286 if (op0 && op1
3287 && GET_CODE (elt->exp) == ASHIFT
3288 && GET_CODE (op1) == CONST_INT
3289 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3291 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3293 /* If the count fits in the inner mode's width,
3294 but exceeds the outer mode's width,
3295 the value will get truncated to 0
3296 by the subreg. */
3297 new = const0_rtx;
3298 else
3299 /* If the count exceeds even the inner mode's width,
3300 don't fold this expression. */
3301 new = 0;
3303 else if (op0 && op1)
3304 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3305 op0, op1);
3308 else if (GET_CODE (elt->exp) == SUBREG
3309 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3310 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3311 <= UNITS_PER_WORD)
3312 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3313 new = copy_rtx (SUBREG_REG (elt->exp));
3315 if (new)
3316 return new;
3320 return x;
3322 case NOT:
3323 case NEG:
3324 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3325 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3326 new = lookup_as_function (XEXP (x, 0), code);
3327 if (new)
3328 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3329 break;
3331 case MEM:
3332 /* If we are not actually processing an insn, don't try to find the
3333 best address. Not only don't we care, but we could modify the
3334 MEM in an invalid way since we have no insn to validate against. */
3335 if (insn != 0)
3336 find_best_addr (insn, &XEXP (x, 0));
3339 /* Even if we don't fold in the insn itself,
3340 we can safely do so here, in hopes of getting a constant. */
3341 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3342 rtx base = 0;
3343 HOST_WIDE_INT offset = 0;
3345 if (GET_CODE (addr) == REG
3346 && REGNO_QTY_VALID_P (REGNO (addr)))
3348 int addr_q = REG_QTY (REGNO (addr));
3349 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3351 if (GET_MODE (addr) == addr_ent->mode
3352 && addr_ent->const_rtx != NULL_RTX)
3353 addr = addr_ent->const_rtx;
3356 /* If address is constant, split it into a base and integer offset. */
3357 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3358 base = addr;
3359 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3360 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3362 base = XEXP (XEXP (addr, 0), 0);
3363 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3365 else if (GET_CODE (addr) == LO_SUM
3366 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3367 base = XEXP (addr, 1);
3368 else if (GET_CODE (addr) == ADDRESSOF)
3369 return change_address (x, VOIDmode, addr);
3371 /* If this is a constant pool reference, we can fold it into its
3372 constant to allow better value tracking. */
3373 if (base && GET_CODE (base) == SYMBOL_REF
3374 && CONSTANT_POOL_ADDRESS_P (base))
3376 rtx constant = get_pool_constant (base);
3377 enum machine_mode const_mode = get_pool_mode (base);
3378 rtx new;
3380 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3381 constant_pool_entries_cost = COST (constant);
3383 /* If we are loading the full constant, we have an equivalence. */
3384 if (offset == 0 && mode == const_mode)
3385 return constant;
3387 /* If this actually isn't a constant (weird!), we can't do
3388 anything. Otherwise, handle the two most common cases:
3389 extracting a word from a multi-word constant, and extracting
3390 the low-order bits. Other cases don't seem common enough to
3391 worry about. */
3392 if (! CONSTANT_P (constant))
3393 return x;
3395 if (GET_MODE_CLASS (mode) == MODE_INT
3396 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3397 && offset % UNITS_PER_WORD == 0
3398 && (new = operand_subword (constant,
3399 offset / UNITS_PER_WORD,
3400 0, const_mode)) != 0)
3401 return new;
3403 if (((BYTES_BIG_ENDIAN
3404 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3405 || (! BYTES_BIG_ENDIAN && offset == 0))
3406 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3407 return new;
3410 /* If this is a reference to a label at a known position in a jump
3411 table, we also know its value. */
3412 if (base && GET_CODE (base) == LABEL_REF)
3414 rtx label = XEXP (base, 0);
3415 rtx table_insn = NEXT_INSN (label);
3417 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3418 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3420 rtx table = PATTERN (table_insn);
3422 if (offset >= 0
3423 && (offset / GET_MODE_SIZE (GET_MODE (table))
3424 < XVECLEN (table, 0)))
3425 return XVECEXP (table, 0,
3426 offset / GET_MODE_SIZE (GET_MODE (table)));
3428 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3429 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3431 rtx table = PATTERN (table_insn);
3433 if (offset >= 0
3434 && (offset / GET_MODE_SIZE (GET_MODE (table))
3435 < XVECLEN (table, 1)))
3437 offset /= GET_MODE_SIZE (GET_MODE (table));
3438 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3439 XEXP (table, 0));
3441 if (GET_MODE (table) != Pmode)
3442 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3444 /* Indicate this is a constant. This isn't a
3445 valid form of CONST, but it will only be used
3446 to fold the next insns and then discarded, so
3447 it should be safe.
3449 Note this expression must be explicitly discarded,
3450 by cse_insn, else it may end up in a REG_EQUAL note
3451 and "escape" to cause problems elsewhere. */
3452 return gen_rtx_CONST (GET_MODE (new), new);
3457 return x;
3460 case ASM_OPERANDS:
3461 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
3462 validate_change (insn, &XVECEXP (x, 3, i),
3463 fold_rtx (XVECEXP (x, 3, i), insn), 0);
3464 break;
3466 default:
3467 break;
3470 const_arg0 = 0;
3471 const_arg1 = 0;
3472 const_arg2 = 0;
3473 mode_arg0 = VOIDmode;
3475 /* Try folding our operands.
3476 Then see which ones have constant values known. */
3478 fmt = GET_RTX_FORMAT (code);
3479 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3480 if (fmt[i] == 'e')
3482 rtx arg = XEXP (x, i);
3483 rtx folded_arg = arg, const_arg = 0;
3484 enum machine_mode mode_arg = GET_MODE (arg);
3485 rtx cheap_arg, expensive_arg;
3486 rtx replacements[2];
3487 int j;
3489 /* Most arguments are cheap, so handle them specially. */
3490 switch (GET_CODE (arg))
3492 case REG:
3493 /* This is the same as calling equiv_constant; it is duplicated
3494 here for speed. */
3495 if (REGNO_QTY_VALID_P (REGNO (arg)))
3497 int arg_q = REG_QTY (REGNO (arg));
3498 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3500 if (arg_ent->const_rtx != NULL_RTX
3501 && GET_CODE (arg_ent->const_rtx) != REG
3502 && GET_CODE (arg_ent->const_rtx) != PLUS)
3503 const_arg
3504 = gen_lowpart_if_possible (GET_MODE (arg),
3505 arg_ent->const_rtx);
3507 break;
3509 case CONST:
3510 case CONST_INT:
3511 case SYMBOL_REF:
3512 case LABEL_REF:
3513 case CONST_DOUBLE:
3514 const_arg = arg;
3515 break;
3517 #ifdef HAVE_cc0
3518 case CC0:
3519 folded_arg = prev_insn_cc0;
3520 mode_arg = prev_insn_cc0_mode;
3521 const_arg = equiv_constant (folded_arg);
3522 break;
3523 #endif
3525 default:
3526 folded_arg = fold_rtx (arg, insn);
3527 const_arg = equiv_constant (folded_arg);
3530 /* For the first three operands, see if the operand
3531 is constant or equivalent to a constant. */
3532 switch (i)
3534 case 0:
3535 folded_arg0 = folded_arg;
3536 const_arg0 = const_arg;
3537 mode_arg0 = mode_arg;
3538 break;
3539 case 1:
3540 folded_arg1 = folded_arg;
3541 const_arg1 = const_arg;
3542 break;
3543 case 2:
3544 const_arg2 = const_arg;
3545 break;
3548 /* Pick the least expensive of the folded argument and an
3549 equivalent constant argument. */
3550 if (const_arg == 0 || const_arg == folded_arg
3551 || COST (const_arg) > COST (folded_arg))
3552 cheap_arg = folded_arg, expensive_arg = const_arg;
3553 else
3554 cheap_arg = const_arg, expensive_arg = folded_arg;
3556 /* Try to replace the operand with the cheapest of the two
3557 possibilities. If it doesn't work and this is either of the first
3558 two operands of a commutative operation, try swapping them.
3559 If THAT fails, try the more expensive, provided it is cheaper
3560 than what is already there. */
3562 if (cheap_arg == XEXP (x, i))
3563 continue;
3565 if (insn == 0 && ! copied)
3567 x = copy_rtx (x);
3568 copied = 1;
3571 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
3572 for (j = 0;
3573 j < 2 && replacements[j]
3574 && COST (replacements[j]) < COST (XEXP (x, i));
3575 j++)
3577 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3578 break;
3580 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
3582 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3583 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3585 if (apply_change_group ())
3587 /* Swap them back to be invalid so that this loop can
3588 continue and flag them to be swapped back later. */
3589 rtx tem;
3591 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3592 XEXP (x, 1) = tem;
3593 must_swap = 1;
3594 break;
3600 else
3602 if (fmt[i] == 'E')
3603 /* Don't try to fold inside of a vector of expressions.
3604 Doing nothing is harmless. */
3605 {;}
3608 /* If a commutative operation, place a constant integer as the second
3609 operand unless the first operand is also a constant integer. Otherwise,
3610 place any constant second unless the first operand is also a constant. */
3612 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3614 if (must_swap || (const_arg0
3615 && (const_arg1 == 0
3616 || (GET_CODE (const_arg0) == CONST_INT
3617 && GET_CODE (const_arg1) != CONST_INT))))
3619 register rtx tem = XEXP (x, 0);
3621 if (insn == 0 && ! copied)
3623 x = copy_rtx (x);
3624 copied = 1;
3627 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3628 validate_change (insn, &XEXP (x, 1), tem, 1);
3629 if (apply_change_group ())
3631 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3632 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3637 /* If X is an arithmetic operation, see if we can simplify it. */
3639 switch (GET_RTX_CLASS (code))
3641 case '1':
3643 int is_const = 0;
3645 /* We can't simplify extension ops unless we know the
3646 original mode. */
3647 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3648 && mode_arg0 == VOIDmode)
3649 break;
3651 /* If we had a CONST, strip it off and put it back later if we
3652 fold. */
3653 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3654 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3656 new = simplify_unary_operation (code, mode,
3657 const_arg0 ? const_arg0 : folded_arg0,
3658 mode_arg0);
3659 if (new != 0 && is_const)
3660 new = gen_rtx_CONST (mode, new);
3662 break;
3664 case '<':
3665 /* See what items are actually being compared and set FOLDED_ARG[01]
3666 to those values and CODE to the actual comparison code. If any are
3667 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3668 do anything if both operands are already known to be constant. */
3670 if (const_arg0 == 0 || const_arg1 == 0)
3672 struct table_elt *p0, *p1;
3673 rtx true = const_true_rtx, false = const0_rtx;
3674 enum machine_mode mode_arg1;
3676 #ifdef FLOAT_STORE_FLAG_VALUE
3677 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3679 true = (CONST_DOUBLE_FROM_REAL_VALUE
3680 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3681 false = CONST0_RTX (mode);
3683 #endif
3685 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3686 &mode_arg0, &mode_arg1);
3687 const_arg0 = equiv_constant (folded_arg0);
3688 const_arg1 = equiv_constant (folded_arg1);
3690 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3691 what kinds of things are being compared, so we can't do
3692 anything with this comparison. */
3694 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3695 break;
3697 /* If we do not now have two constants being compared, see
3698 if we can nevertheless deduce some things about the
3699 comparison. */
3700 if (const_arg0 == 0 || const_arg1 == 0)
3702 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3703 non-explicit constant? These aren't zero, but we
3704 don't know their sign. */
3705 if (const_arg1 == const0_rtx
3706 && (NONZERO_BASE_PLUS_P (folded_arg0)
3707 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3708 come out as 0. */
3709 || GET_CODE (folded_arg0) == SYMBOL_REF
3710 #endif
3711 || GET_CODE (folded_arg0) == LABEL_REF
3712 || GET_CODE (folded_arg0) == CONST))
3714 if (code == EQ)
3715 return false;
3716 else if (code == NE)
3717 return true;
3720 /* See if the two operands are the same. We don't do this
3721 for IEEE floating-point since we can't assume x == x
3722 since x might be a NaN. */
3724 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3725 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
3726 && (folded_arg0 == folded_arg1
3727 || (GET_CODE (folded_arg0) == REG
3728 && GET_CODE (folded_arg1) == REG
3729 && (REG_QTY (REGNO (folded_arg0))
3730 == REG_QTY (REGNO (folded_arg1))))
3731 || ((p0 = lookup (folded_arg0,
3732 (safe_hash (folded_arg0, mode_arg0)
3733 & HASH_MASK), mode_arg0))
3734 && (p1 = lookup (folded_arg1,
3735 (safe_hash (folded_arg1, mode_arg0)
3736 & HASH_MASK), mode_arg0))
3737 && p0->first_same_value == p1->first_same_value)))
3738 return ((code == EQ || code == LE || code == GE
3739 || code == LEU || code == GEU)
3740 ? true : false);
3742 /* If FOLDED_ARG0 is a register, see if the comparison we are
3743 doing now is either the same as we did before or the reverse
3744 (we only check the reverse if not floating-point). */
3745 else if (GET_CODE (folded_arg0) == REG)
3747 int qty = REG_QTY (REGNO (folded_arg0));
3749 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3751 struct qty_table_elem *ent = &qty_table[qty];
3753 if ((comparison_dominates_p (ent->comparison_code, code)
3754 || (! FLOAT_MODE_P (mode_arg0)
3755 && comparison_dominates_p (ent->comparison_code,
3756 reverse_condition (code))))
3757 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3758 || (const_arg1
3759 && rtx_equal_p (ent->comparison_const,
3760 const_arg1))
3761 || (GET_CODE (folded_arg1) == REG
3762 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3763 return (comparison_dominates_p (ent->comparison_code, code)
3764 ? true : false);
3770 /* If we are comparing against zero, see if the first operand is
3771 equivalent to an IOR with a constant. If so, we may be able to
3772 determine the result of this comparison. */
3774 if (const_arg1 == const0_rtx)
3776 rtx y = lookup_as_function (folded_arg0, IOR);
3777 rtx inner_const;
3779 if (y != 0
3780 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3781 && GET_CODE (inner_const) == CONST_INT
3782 && INTVAL (inner_const) != 0)
3784 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3785 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3786 && (INTVAL (inner_const)
3787 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3788 rtx true = const_true_rtx, false = const0_rtx;
3790 #ifdef FLOAT_STORE_FLAG_VALUE
3791 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3793 true = (CONST_DOUBLE_FROM_REAL_VALUE
3794 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3795 false = CONST0_RTX (mode);
3797 #endif
3799 switch (code)
3801 case EQ:
3802 return false;
3803 case NE:
3804 return true;
3805 case LT: case LE:
3806 if (has_sign)
3807 return true;
3808 break;
3809 case GT: case GE:
3810 if (has_sign)
3811 return false;
3812 break;
3813 default:
3814 break;
3819 new = simplify_relational_operation (code, mode_arg0,
3820 const_arg0 ? const_arg0 : folded_arg0,
3821 const_arg1 ? const_arg1 : folded_arg1);
3822 #ifdef FLOAT_STORE_FLAG_VALUE
3823 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3825 if (new == const0_rtx)
3826 new = CONST0_RTX (mode);
3827 else
3828 new = (CONST_DOUBLE_FROM_REAL_VALUE
3829 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3831 #endif
3832 break;
3834 case '2':
3835 case 'c':
3836 switch (code)
3838 case PLUS:
3839 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3840 with that LABEL_REF as its second operand. If so, the result is
3841 the first operand of that MINUS. This handles switches with an
3842 ADDR_DIFF_VEC table. */
3843 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3845 rtx y
3846 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3847 : lookup_as_function (folded_arg0, MINUS);
3849 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3850 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3851 return XEXP (y, 0);
3853 /* Now try for a CONST of a MINUS like the above. */
3854 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3855 : lookup_as_function (folded_arg0, CONST))) != 0
3856 && GET_CODE (XEXP (y, 0)) == MINUS
3857 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3858 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
3859 return XEXP (XEXP (y, 0), 0);
3862 /* Likewise if the operands are in the other order. */
3863 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3865 rtx y
3866 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3867 : lookup_as_function (folded_arg1, MINUS);
3869 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3870 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3871 return XEXP (y, 0);
3873 /* Now try for a CONST of a MINUS like the above. */
3874 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3875 : lookup_as_function (folded_arg1, CONST))) != 0
3876 && GET_CODE (XEXP (y, 0)) == MINUS
3877 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3878 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
3879 return XEXP (XEXP (y, 0), 0);
3882 /* If second operand is a register equivalent to a negative
3883 CONST_INT, see if we can find a register equivalent to the
3884 positive constant. Make a MINUS if so. Don't do this for
3885 a non-negative constant since we might then alternate between
3886 chosing positive and negative constants. Having the positive
3887 constant previously-used is the more common case. Be sure
3888 the resulting constant is non-negative; if const_arg1 were
3889 the smallest negative number this would overflow: depending
3890 on the mode, this would either just be the same value (and
3891 hence not save anything) or be incorrect. */
3892 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3893 && INTVAL (const_arg1) < 0
3894 /* This used to test
3896 - INTVAL (const_arg1) >= 0
3898 But The Sun V5.0 compilers mis-compiled that test. So
3899 instead we test for the problematic value in a more direct
3900 manner and hope the Sun compilers get it correct. */
3901 && INTVAL (const_arg1) !=
3902 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3903 && GET_CODE (folded_arg1) == REG)
3905 rtx new_const = GEN_INT (- INTVAL (const_arg1));
3906 struct table_elt *p
3907 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
3908 mode);
3910 if (p)
3911 for (p = p->first_same_value; p; p = p->next_same_value)
3912 if (GET_CODE (p->exp) == REG)
3913 return simplify_gen_binary (MINUS, mode, folded_arg0,
3914 canon_reg (p->exp, NULL_RTX));
3916 goto from_plus;
3918 case MINUS:
3919 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3920 If so, produce (PLUS Z C2-C). */
3921 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3923 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3924 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
3925 return fold_rtx (plus_constant (copy_rtx (y),
3926 -INTVAL (const_arg1)),
3927 NULL_RTX);
3930 /* ... fall through ... */
3932 from_plus:
3933 case SMIN: case SMAX: case UMIN: case UMAX:
3934 case IOR: case AND: case XOR:
3935 case MULT: case DIV: case UDIV:
3936 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3937 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3938 is known to be of similar form, we may be able to replace the
3939 operation with a combined operation. This may eliminate the
3940 intermediate operation if every use is simplified in this way.
3941 Note that the similar optimization done by combine.c only works
3942 if the intermediate operation's result has only one reference. */
3944 if (GET_CODE (folded_arg0) == REG
3945 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3947 int is_shift
3948 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3949 rtx y = lookup_as_function (folded_arg0, code);
3950 rtx inner_const;
3951 enum rtx_code associate_code;
3952 rtx new_const;
3954 if (y == 0
3955 || 0 == (inner_const
3956 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
3957 || GET_CODE (inner_const) != CONST_INT
3958 /* If we have compiled a statement like
3959 "if (x == (x & mask1))", and now are looking at
3960 "x & mask2", we will have a case where the first operand
3961 of Y is the same as our first operand. Unless we detect
3962 this case, an infinite loop will result. */
3963 || XEXP (y, 0) == folded_arg0)
3964 break;
3966 /* Don't associate these operations if they are a PLUS with the
3967 same constant and it is a power of two. These might be doable
3968 with a pre- or post-increment. Similarly for two subtracts of
3969 identical powers of two with post decrement. */
3971 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
3972 && ((HAVE_PRE_INCREMENT
3973 && exact_log2 (INTVAL (const_arg1)) >= 0)
3974 || (HAVE_POST_INCREMENT
3975 && exact_log2 (INTVAL (const_arg1)) >= 0)
3976 || (HAVE_PRE_DECREMENT
3977 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3978 || (HAVE_POST_DECREMENT
3979 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3980 break;
3982 /* Compute the code used to compose the constants. For example,
3983 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
3985 associate_code
3986 = (code == MULT || code == DIV || code == UDIV ? MULT
3987 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
3989 new_const = simplify_binary_operation (associate_code, mode,
3990 const_arg1, inner_const);
3992 if (new_const == 0)
3993 break;
3995 /* If we are associating shift operations, don't let this
3996 produce a shift of the size of the object or larger.
3997 This could occur when we follow a sign-extend by a right
3998 shift on a machine that does a sign-extend as a pair
3999 of shifts. */
4001 if (is_shift && GET_CODE (new_const) == CONST_INT
4002 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4004 /* As an exception, we can turn an ASHIFTRT of this
4005 form into a shift of the number of bits - 1. */
4006 if (code == ASHIFTRT)
4007 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4008 else
4009 break;
4012 y = copy_rtx (XEXP (y, 0));
4014 /* If Y contains our first operand (the most common way this
4015 can happen is if Y is a MEM), we would do into an infinite
4016 loop if we tried to fold it. So don't in that case. */
4018 if (! reg_mentioned_p (folded_arg0, y))
4019 y = fold_rtx (y, insn);
4021 return simplify_gen_binary (code, mode, y, new_const);
4023 break;
4025 default:
4026 break;
4029 new = simplify_binary_operation (code, mode,
4030 const_arg0 ? const_arg0 : folded_arg0,
4031 const_arg1 ? const_arg1 : folded_arg1);
4032 break;
4034 case 'o':
4035 /* (lo_sum (high X) X) is simply X. */
4036 if (code == LO_SUM && const_arg0 != 0
4037 && GET_CODE (const_arg0) == HIGH
4038 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4039 return const_arg1;
4040 break;
4042 case '3':
4043 case 'b':
4044 new = simplify_ternary_operation (code, mode, mode_arg0,
4045 const_arg0 ? const_arg0 : folded_arg0,
4046 const_arg1 ? const_arg1 : folded_arg1,
4047 const_arg2 ? const_arg2 : XEXP (x, 2));
4048 break;
4050 case 'x':
4051 /* Always eliminate CONSTANT_P_RTX at this stage. */
4052 if (code == CONSTANT_P_RTX)
4053 return (const_arg0 ? const1_rtx : const0_rtx);
4054 break;
4057 return new ? new : x;
4060 /* Return a constant value currently equivalent to X.
4061 Return 0 if we don't know one. */
4063 static rtx
4064 equiv_constant (x)
4065 rtx x;
4067 if (GET_CODE (x) == REG
4068 && REGNO_QTY_VALID_P (REGNO (x)))
4070 int x_q = REG_QTY (REGNO (x));
4071 struct qty_table_elem *x_ent = &qty_table[x_q];
4073 if (x_ent->const_rtx)
4074 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4077 if (x == 0 || CONSTANT_P (x))
4078 return x;
4080 /* If X is a MEM, try to fold it outside the context of any insn to see if
4081 it might be equivalent to a constant. That handles the case where it
4082 is a constant-pool reference. Then try to look it up in the hash table
4083 in case it is something whose value we have seen before. */
4085 if (GET_CODE (x) == MEM)
4087 struct table_elt *elt;
4089 x = fold_rtx (x, NULL_RTX);
4090 if (CONSTANT_P (x))
4091 return x;
4093 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4094 if (elt == 0)
4095 return 0;
4097 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4098 if (elt->is_const && CONSTANT_P (elt->exp))
4099 return elt->exp;
4102 return 0;
4105 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4106 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4107 least-significant part of X.
4108 MODE specifies how big a part of X to return.
4110 If the requested operation cannot be done, 0 is returned.
4112 This is similar to gen_lowpart in emit-rtl.c. */
4115 gen_lowpart_if_possible (mode, x)
4116 enum machine_mode mode;
4117 register rtx x;
4119 rtx result = gen_lowpart_common (mode, x);
4121 if (result)
4122 return result;
4123 else if (GET_CODE (x) == MEM)
4125 /* This is the only other case we handle. */
4126 register int offset = 0;
4127 rtx new;
4129 if (WORDS_BIG_ENDIAN)
4130 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4131 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4132 if (BYTES_BIG_ENDIAN)
4133 /* Adjust the address so that the address-after-the-data is
4134 unchanged. */
4135 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4136 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4137 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
4138 if (! memory_address_p (mode, XEXP (new, 0)))
4139 return 0;
4140 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
4141 MEM_COPY_ATTRIBUTES (new, x);
4142 return new;
4144 else
4145 return 0;
4148 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4149 branch. It will be zero if not.
4151 In certain cases, this can cause us to add an equivalence. For example,
4152 if we are following the taken case of
4153 if (i == 2)
4154 we can add the fact that `i' and '2' are now equivalent.
4156 In any case, we can record that this comparison was passed. If the same
4157 comparison is seen later, we will know its value. */
4159 static void
4160 record_jump_equiv (insn, taken)
4161 rtx insn;
4162 int taken;
4164 int cond_known_true;
4165 rtx op0, op1;
4166 enum machine_mode mode, mode0, mode1;
4167 int reversed_nonequality = 0;
4168 enum rtx_code code;
4170 /* Ensure this is the right kind of insn. */
4171 if (! condjump_p (insn) || simplejump_p (insn))
4172 return;
4174 /* See if this jump condition is known true or false. */
4175 if (taken)
4176 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
4177 else
4178 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
4180 /* Get the type of comparison being done and the operands being compared.
4181 If we had to reverse a non-equality condition, record that fact so we
4182 know that it isn't valid for floating-point. */
4183 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
4184 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
4185 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
4187 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4188 if (! cond_known_true)
4190 reversed_nonequality = (code != EQ && code != NE);
4191 code = reverse_condition (code);
4193 /* Don't remember if we can't find the inverse. */
4194 if (code == UNKNOWN)
4195 return;
4198 /* The mode is the mode of the non-constant. */
4199 mode = mode0;
4200 if (mode1 != VOIDmode)
4201 mode = mode1;
4203 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4206 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4207 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4208 Make any useful entries we can with that information. Called from
4209 above function and called recursively. */
4211 static void
4212 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4213 enum rtx_code code;
4214 enum machine_mode mode;
4215 rtx op0, op1;
4216 int reversed_nonequality;
4218 unsigned op0_hash, op1_hash;
4219 int op0_in_memory, op1_in_memory;
4220 struct table_elt *op0_elt, *op1_elt;
4222 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4223 we know that they are also equal in the smaller mode (this is also
4224 true for all smaller modes whether or not there is a SUBREG, but
4225 is not worth testing for with no SUBREG). */
4227 /* Note that GET_MODE (op0) may not equal MODE. */
4228 if (code == EQ && GET_CODE (op0) == SUBREG
4229 && (GET_MODE_SIZE (GET_MODE (op0))
4230 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4232 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4233 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4235 record_jump_cond (code, mode, SUBREG_REG (op0),
4236 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4237 reversed_nonequality);
4240 if (code == EQ && GET_CODE (op1) == SUBREG
4241 && (GET_MODE_SIZE (GET_MODE (op1))
4242 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4244 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4245 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4247 record_jump_cond (code, mode, SUBREG_REG (op1),
4248 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4249 reversed_nonequality);
4252 /* Similarly, if this is an NE comparison, and either is a SUBREG
4253 making a smaller mode, we know the whole thing is also NE. */
4255 /* Note that GET_MODE (op0) may not equal MODE;
4256 if we test MODE instead, we can get an infinite recursion
4257 alternating between two modes each wider than MODE. */
4259 if (code == NE && GET_CODE (op0) == SUBREG
4260 && subreg_lowpart_p (op0)
4261 && (GET_MODE_SIZE (GET_MODE (op0))
4262 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4264 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4265 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4267 record_jump_cond (code, mode, SUBREG_REG (op0),
4268 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4269 reversed_nonequality);
4272 if (code == NE && GET_CODE (op1) == SUBREG
4273 && subreg_lowpart_p (op1)
4274 && (GET_MODE_SIZE (GET_MODE (op1))
4275 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4277 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4278 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4280 record_jump_cond (code, mode, SUBREG_REG (op1),
4281 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4282 reversed_nonequality);
4285 /* Hash both operands. */
4287 do_not_record = 0;
4288 hash_arg_in_memory = 0;
4289 op0_hash = HASH (op0, mode);
4290 op0_in_memory = hash_arg_in_memory;
4292 if (do_not_record)
4293 return;
4295 do_not_record = 0;
4296 hash_arg_in_memory = 0;
4297 op1_hash = HASH (op1, mode);
4298 op1_in_memory = hash_arg_in_memory;
4300 if (do_not_record)
4301 return;
4303 /* Look up both operands. */
4304 op0_elt = lookup (op0, op0_hash, mode);
4305 op1_elt = lookup (op1, op1_hash, mode);
4307 /* If both operands are already equivalent or if they are not in the
4308 table but are identical, do nothing. */
4309 if ((op0_elt != 0 && op1_elt != 0
4310 && op0_elt->first_same_value == op1_elt->first_same_value)
4311 || op0 == op1 || rtx_equal_p (op0, op1))
4312 return;
4314 /* If we aren't setting two things equal all we can do is save this
4315 comparison. Similarly if this is floating-point. In the latter
4316 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4317 If we record the equality, we might inadvertently delete code
4318 whose intent was to change -0 to +0. */
4320 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4322 struct qty_table_elem *ent;
4323 int qty;
4325 /* If we reversed a floating-point comparison, if OP0 is not a
4326 register, or if OP1 is neither a register or constant, we can't
4327 do anything. */
4329 if (GET_CODE (op1) != REG)
4330 op1 = equiv_constant (op1);
4332 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4333 || GET_CODE (op0) != REG || op1 == 0)
4334 return;
4336 /* Put OP0 in the hash table if it isn't already. This gives it a
4337 new quantity number. */
4338 if (op0_elt == 0)
4340 if (insert_regs (op0, NULL_PTR, 0))
4342 rehash_using_reg (op0);
4343 op0_hash = HASH (op0, mode);
4345 /* If OP0 is contained in OP1, this changes its hash code
4346 as well. Faster to rehash than to check, except
4347 for the simple case of a constant. */
4348 if (! CONSTANT_P (op1))
4349 op1_hash = HASH (op1,mode);
4352 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4353 op0_elt->in_memory = op0_in_memory;
4356 qty = REG_QTY (REGNO (op0));
4357 ent = &qty_table[qty];
4359 ent->comparison_code = code;
4360 if (GET_CODE (op1) == REG)
4362 /* Look it up again--in case op0 and op1 are the same. */
4363 op1_elt = lookup (op1, op1_hash, mode);
4365 /* Put OP1 in the hash table so it gets a new quantity number. */
4366 if (op1_elt == 0)
4368 if (insert_regs (op1, NULL_PTR, 0))
4370 rehash_using_reg (op1);
4371 op1_hash = HASH (op1, mode);
4374 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4375 op1_elt->in_memory = op1_in_memory;
4378 ent->comparison_const = NULL_RTX;
4379 ent->comparison_qty = REG_QTY (REGNO (op1));
4381 else
4383 ent->comparison_const = op1;
4384 ent->comparison_qty = -1;
4387 return;
4390 /* If either side is still missing an equivalence, make it now,
4391 then merge the equivalences. */
4393 if (op0_elt == 0)
4395 if (insert_regs (op0, NULL_PTR, 0))
4397 rehash_using_reg (op0);
4398 op0_hash = HASH (op0, mode);
4401 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4402 op0_elt->in_memory = op0_in_memory;
4405 if (op1_elt == 0)
4407 if (insert_regs (op1, NULL_PTR, 0))
4409 rehash_using_reg (op1);
4410 op1_hash = HASH (op1, mode);
4413 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4414 op1_elt->in_memory = op1_in_memory;
4417 merge_equiv_classes (op0_elt, op1_elt);
4418 last_jump_equiv_class = op0_elt;
4421 /* CSE processing for one instruction.
4422 First simplify sources and addresses of all assignments
4423 in the instruction, using previously-computed equivalents values.
4424 Then install the new sources and destinations in the table
4425 of available values.
4427 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4428 the insn. It means that INSN is inside libcall block. In this
4429 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4431 /* Data on one SET contained in the instruction. */
4433 struct set
4435 /* The SET rtx itself. */
4436 rtx rtl;
4437 /* The SET_SRC of the rtx (the original value, if it is changing). */
4438 rtx src;
4439 /* The hash-table element for the SET_SRC of the SET. */
4440 struct table_elt *src_elt;
4441 /* Hash value for the SET_SRC. */
4442 unsigned src_hash;
4443 /* Hash value for the SET_DEST. */
4444 unsigned dest_hash;
4445 /* The SET_DEST, with SUBREG, etc., stripped. */
4446 rtx inner_dest;
4447 /* Nonzero if the SET_SRC is in memory. */
4448 char src_in_memory;
4449 /* Nonzero if the SET_SRC contains something
4450 whose value cannot be predicted and understood. */
4451 char src_volatile;
4452 /* Original machine mode, in case it becomes a CONST_INT. */
4453 enum machine_mode mode;
4454 /* A constant equivalent for SET_SRC, if any. */
4455 rtx src_const;
4456 /* Original SET_SRC value used for libcall notes. */
4457 rtx orig_src;
4458 /* Hash value of constant equivalent for SET_SRC. */
4459 unsigned src_const_hash;
4460 /* Table entry for constant equivalent for SET_SRC, if any. */
4461 struct table_elt *src_const_elt;
4464 static void
4465 cse_insn (insn, libcall_insn)
4466 rtx insn;
4467 rtx libcall_insn;
4469 register rtx x = PATTERN (insn);
4470 register int i;
4471 rtx tem;
4472 register int n_sets = 0;
4474 #ifdef HAVE_cc0
4475 /* Records what this insn does to set CC0. */
4476 rtx this_insn_cc0 = 0;
4477 enum machine_mode this_insn_cc0_mode = VOIDmode;
4478 #endif
4480 rtx src_eqv = 0;
4481 struct table_elt *src_eqv_elt = 0;
4482 int src_eqv_volatile = 0;
4483 int src_eqv_in_memory = 0;
4484 unsigned src_eqv_hash = 0;
4486 struct set *sets = (struct set *) NULL_PTR;
4488 this_insn = insn;
4490 /* Find all the SETs and CLOBBERs in this instruction.
4491 Record all the SETs in the array `set' and count them.
4492 Also determine whether there is a CLOBBER that invalidates
4493 all memory references, or all references at varying addresses. */
4495 if (GET_CODE (insn) == CALL_INSN)
4497 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4498 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4499 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4502 if (GET_CODE (x) == SET)
4504 sets = (struct set *) alloca (sizeof (struct set));
4505 sets[0].rtl = x;
4507 /* Ignore SETs that are unconditional jumps.
4508 They never need cse processing, so this does not hurt.
4509 The reason is not efficiency but rather
4510 so that we can test at the end for instructions
4511 that have been simplified to unconditional jumps
4512 and not be misled by unchanged instructions
4513 that were unconditional jumps to begin with. */
4514 if (SET_DEST (x) == pc_rtx
4515 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4518 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4519 The hard function value register is used only once, to copy to
4520 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4521 Ensure we invalidate the destination register. On the 80386 no
4522 other code would invalidate it since it is a fixed_reg.
4523 We need not check the return of apply_change_group; see canon_reg. */
4525 else if (GET_CODE (SET_SRC (x)) == CALL)
4527 canon_reg (SET_SRC (x), insn);
4528 apply_change_group ();
4529 fold_rtx (SET_SRC (x), insn);
4530 invalidate (SET_DEST (x), VOIDmode);
4532 else
4533 n_sets = 1;
4535 else if (GET_CODE (x) == PARALLEL)
4537 register int lim = XVECLEN (x, 0);
4539 sets = (struct set *) alloca (lim * sizeof (struct set));
4541 /* Find all regs explicitly clobbered in this insn,
4542 and ensure they are not replaced with any other regs
4543 elsewhere in this insn.
4544 When a reg that is clobbered is also used for input,
4545 we should presume that that is for a reason,
4546 and we should not substitute some other register
4547 which is not supposed to be clobbered.
4548 Therefore, this loop cannot be merged into the one below
4549 because a CALL may precede a CLOBBER and refer to the
4550 value clobbered. We must not let a canonicalization do
4551 anything in that case. */
4552 for (i = 0; i < lim; i++)
4554 register rtx y = XVECEXP (x, 0, i);
4555 if (GET_CODE (y) == CLOBBER)
4557 rtx clobbered = XEXP (y, 0);
4559 if (GET_CODE (clobbered) == REG
4560 || GET_CODE (clobbered) == SUBREG)
4561 invalidate (clobbered, VOIDmode);
4562 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4563 || GET_CODE (clobbered) == ZERO_EXTRACT)
4564 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4568 for (i = 0; i < lim; i++)
4570 register rtx y = XVECEXP (x, 0, i);
4571 if (GET_CODE (y) == SET)
4573 /* As above, we ignore unconditional jumps and call-insns and
4574 ignore the result of apply_change_group. */
4575 if (GET_CODE (SET_SRC (y)) == CALL)
4577 canon_reg (SET_SRC (y), insn);
4578 apply_change_group ();
4579 fold_rtx (SET_SRC (y), insn);
4580 invalidate (SET_DEST (y), VOIDmode);
4582 else if (SET_DEST (y) == pc_rtx
4583 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4585 else
4586 sets[n_sets++].rtl = y;
4588 else if (GET_CODE (y) == CLOBBER)
4590 /* If we clobber memory, canon the address.
4591 This does nothing when a register is clobbered
4592 because we have already invalidated the reg. */
4593 if (GET_CODE (XEXP (y, 0)) == MEM)
4594 canon_reg (XEXP (y, 0), NULL_RTX);
4596 else if (GET_CODE (y) == USE
4597 && ! (GET_CODE (XEXP (y, 0)) == REG
4598 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4599 canon_reg (y, NULL_RTX);
4600 else if (GET_CODE (y) == CALL)
4602 /* The result of apply_change_group can be ignored; see
4603 canon_reg. */
4604 canon_reg (y, insn);
4605 apply_change_group ();
4606 fold_rtx (y, insn);
4610 else if (GET_CODE (x) == CLOBBER)
4612 if (GET_CODE (XEXP (x, 0)) == MEM)
4613 canon_reg (XEXP (x, 0), NULL_RTX);
4616 /* Canonicalize a USE of a pseudo register or memory location. */
4617 else if (GET_CODE (x) == USE
4618 && ! (GET_CODE (XEXP (x, 0)) == REG
4619 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4620 canon_reg (XEXP (x, 0), NULL_RTX);
4621 else if (GET_CODE (x) == CALL)
4623 /* The result of apply_change_group can be ignored; see canon_reg. */
4624 canon_reg (x, insn);
4625 apply_change_group ();
4626 fold_rtx (x, insn);
4629 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4630 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4631 is handled specially for this case, and if it isn't set, then there will
4632 be no equivalence for the destination. */
4633 if (n_sets == 1 && REG_NOTES (insn) != 0
4634 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4635 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4636 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4637 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4639 /* Canonicalize sources and addresses of destinations.
4640 We do this in a separate pass to avoid problems when a MATCH_DUP is
4641 present in the insn pattern. In that case, we want to ensure that
4642 we don't break the duplicate nature of the pattern. So we will replace
4643 both operands at the same time. Otherwise, we would fail to find an
4644 equivalent substitution in the loop calling validate_change below.
4646 We used to suppress canonicalization of DEST if it appears in SRC,
4647 but we don't do this any more. */
4649 for (i = 0; i < n_sets; i++)
4651 rtx dest = SET_DEST (sets[i].rtl);
4652 rtx src = SET_SRC (sets[i].rtl);
4653 rtx new = canon_reg (src, insn);
4654 int insn_code;
4656 sets[i].orig_src = src;
4657 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4658 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4659 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4660 || (insn_code = recog_memoized (insn)) < 0
4661 || insn_data[insn_code].n_dups > 0)
4662 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4663 else
4664 SET_SRC (sets[i].rtl) = new;
4666 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4668 validate_change (insn, &XEXP (dest, 1),
4669 canon_reg (XEXP (dest, 1), insn), 1);
4670 validate_change (insn, &XEXP (dest, 2),
4671 canon_reg (XEXP (dest, 2), insn), 1);
4674 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4675 || GET_CODE (dest) == ZERO_EXTRACT
4676 || GET_CODE (dest) == SIGN_EXTRACT)
4677 dest = XEXP (dest, 0);
4679 if (GET_CODE (dest) == MEM)
4680 canon_reg (dest, insn);
4683 /* Now that we have done all the replacements, we can apply the change
4684 group and see if they all work. Note that this will cause some
4685 canonicalizations that would have worked individually not to be applied
4686 because some other canonicalization didn't work, but this should not
4687 occur often.
4689 The result of apply_change_group can be ignored; see canon_reg. */
4691 apply_change_group ();
4693 /* Set sets[i].src_elt to the class each source belongs to.
4694 Detect assignments from or to volatile things
4695 and set set[i] to zero so they will be ignored
4696 in the rest of this function.
4698 Nothing in this loop changes the hash table or the register chains. */
4700 for (i = 0; i < n_sets; i++)
4702 register rtx src, dest;
4703 register rtx src_folded;
4704 register struct table_elt *elt = 0, *p;
4705 enum machine_mode mode;
4706 rtx src_eqv_here;
4707 rtx src_const = 0;
4708 rtx src_related = 0;
4709 struct table_elt *src_const_elt = 0;
4710 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
4711 int src_related_cost = 10000, src_elt_cost = 10000;
4712 /* Set non-zero if we need to call force_const_mem on with the
4713 contents of src_folded before using it. */
4714 int src_folded_force_flag = 0;
4716 dest = SET_DEST (sets[i].rtl);
4717 src = SET_SRC (sets[i].rtl);
4719 /* If SRC is a constant that has no machine mode,
4720 hash it with the destination's machine mode.
4721 This way we can keep different modes separate. */
4723 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4724 sets[i].mode = mode;
4726 if (src_eqv)
4728 enum machine_mode eqvmode = mode;
4729 if (GET_CODE (dest) == STRICT_LOW_PART)
4730 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4731 do_not_record = 0;
4732 hash_arg_in_memory = 0;
4733 src_eqv = fold_rtx (src_eqv, insn);
4734 src_eqv_hash = HASH (src_eqv, eqvmode);
4736 /* Find the equivalence class for the equivalent expression. */
4738 if (!do_not_record)
4739 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4741 src_eqv_volatile = do_not_record;
4742 src_eqv_in_memory = hash_arg_in_memory;
4745 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4746 value of the INNER register, not the destination. So it is not
4747 a valid substitution for the source. But save it for later. */
4748 if (GET_CODE (dest) == STRICT_LOW_PART)
4749 src_eqv_here = 0;
4750 else
4751 src_eqv_here = src_eqv;
4753 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4754 simplified result, which may not necessarily be valid. */
4755 src_folded = fold_rtx (src, insn);
4757 #if 0
4758 /* ??? This caused bad code to be generated for the m68k port with -O2.
4759 Suppose src is (CONST_INT -1), and that after truncation src_folded
4760 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4761 At the end we will add src and src_const to the same equivalence
4762 class. We now have 3 and -1 on the same equivalence class. This
4763 causes later instructions to be mis-optimized. */
4764 /* If storing a constant in a bitfield, pre-truncate the constant
4765 so we will be able to record it later. */
4766 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4767 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4769 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4771 if (GET_CODE (src) == CONST_INT
4772 && GET_CODE (width) == CONST_INT
4773 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4774 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4775 src_folded
4776 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4777 << INTVAL (width)) - 1));
4779 #endif
4781 /* Compute SRC's hash code, and also notice if it
4782 should not be recorded at all. In that case,
4783 prevent any further processing of this assignment. */
4784 do_not_record = 0;
4785 hash_arg_in_memory = 0;
4787 sets[i].src = src;
4788 sets[i].src_hash = HASH (src, mode);
4789 sets[i].src_volatile = do_not_record;
4790 sets[i].src_in_memory = hash_arg_in_memory;
4792 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4793 a pseudo that is set more than once, do not record SRC. Using
4794 SRC as a replacement for anything else will be incorrect in that
4795 situation. Note that this usually occurs only for stack slots,
4796 in which case all the RTL would be referring to SRC, so we don't
4797 lose any optimization opportunities by not having SRC in the
4798 hash table. */
4800 if (GET_CODE (src) == MEM
4801 && find_reg_note (insn, REG_EQUIV, src) != 0
4802 && GET_CODE (dest) == REG
4803 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
4804 && REG_N_SETS (REGNO (dest)) != 1)
4805 sets[i].src_volatile = 1;
4807 #if 0
4808 /* It is no longer clear why we used to do this, but it doesn't
4809 appear to still be needed. So let's try without it since this
4810 code hurts cse'ing widened ops. */
4811 /* If source is a perverse subreg (such as QI treated as an SI),
4812 treat it as volatile. It may do the work of an SI in one context
4813 where the extra bits are not being used, but cannot replace an SI
4814 in general. */
4815 if (GET_CODE (src) == SUBREG
4816 && (GET_MODE_SIZE (GET_MODE (src))
4817 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4818 sets[i].src_volatile = 1;
4819 #endif
4821 /* Locate all possible equivalent forms for SRC. Try to replace
4822 SRC in the insn with each cheaper equivalent.
4824 We have the following types of equivalents: SRC itself, a folded
4825 version, a value given in a REG_EQUAL note, or a value related
4826 to a constant.
4828 Each of these equivalents may be part of an additional class
4829 of equivalents (if more than one is in the table, they must be in
4830 the same class; we check for this).
4832 If the source is volatile, we don't do any table lookups.
4834 We note any constant equivalent for possible later use in a
4835 REG_NOTE. */
4837 if (!sets[i].src_volatile)
4838 elt = lookup (src, sets[i].src_hash, mode);
4840 sets[i].src_elt = elt;
4842 if (elt && src_eqv_here && src_eqv_elt)
4844 if (elt->first_same_value != src_eqv_elt->first_same_value)
4846 /* The REG_EQUAL is indicating that two formerly distinct
4847 classes are now equivalent. So merge them. */
4848 merge_equiv_classes (elt, src_eqv_elt);
4849 src_eqv_hash = HASH (src_eqv, elt->mode);
4850 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4853 src_eqv_here = 0;
4856 else if (src_eqv_elt)
4857 elt = src_eqv_elt;
4859 /* Try to find a constant somewhere and record it in `src_const'.
4860 Record its table element, if any, in `src_const_elt'. Look in
4861 any known equivalences first. (If the constant is not in the
4862 table, also set `sets[i].src_const_hash'). */
4863 if (elt)
4864 for (p = elt->first_same_value; p; p = p->next_same_value)
4865 if (p->is_const)
4867 src_const = p->exp;
4868 src_const_elt = elt;
4869 break;
4872 if (src_const == 0
4873 && (CONSTANT_P (src_folded)
4874 /* Consider (minus (label_ref L1) (label_ref L2)) as
4875 "constant" here so we will record it. This allows us
4876 to fold switch statements when an ADDR_DIFF_VEC is used. */
4877 || (GET_CODE (src_folded) == MINUS
4878 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4879 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4880 src_const = src_folded, src_const_elt = elt;
4881 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4882 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4884 /* If we don't know if the constant is in the table, get its
4885 hash code and look it up. */
4886 if (src_const && src_const_elt == 0)
4888 sets[i].src_const_hash = HASH (src_const, mode);
4889 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4892 sets[i].src_const = src_const;
4893 sets[i].src_const_elt = src_const_elt;
4895 /* If the constant and our source are both in the table, mark them as
4896 equivalent. Otherwise, if a constant is in the table but the source
4897 isn't, set ELT to it. */
4898 if (src_const_elt && elt
4899 && src_const_elt->first_same_value != elt->first_same_value)
4900 merge_equiv_classes (elt, src_const_elt);
4901 else if (src_const_elt && elt == 0)
4902 elt = src_const_elt;
4904 /* See if there is a register linearly related to a constant
4905 equivalent of SRC. */
4906 if (src_const
4907 && (GET_CODE (src_const) == CONST
4908 || (src_const_elt && src_const_elt->related_value != 0)))
4910 src_related = use_related_value (src_const, src_const_elt);
4911 if (src_related)
4913 struct table_elt *src_related_elt
4914 = lookup (src_related, HASH (src_related, mode), mode);
4915 if (src_related_elt && elt)
4917 if (elt->first_same_value
4918 != src_related_elt->first_same_value)
4919 /* This can occur when we previously saw a CONST
4920 involving a SYMBOL_REF and then see the SYMBOL_REF
4921 twice. Merge the involved classes. */
4922 merge_equiv_classes (elt, src_related_elt);
4924 src_related = 0;
4925 src_related_elt = 0;
4927 else if (src_related_elt && elt == 0)
4928 elt = src_related_elt;
4932 /* See if we have a CONST_INT that is already in a register in a
4933 wider mode. */
4935 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4936 && GET_MODE_CLASS (mode) == MODE_INT
4937 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4939 enum machine_mode wider_mode;
4941 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4942 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4943 && src_related == 0;
4944 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4946 struct table_elt *const_elt
4947 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4949 if (const_elt == 0)
4950 continue;
4952 for (const_elt = const_elt->first_same_value;
4953 const_elt; const_elt = const_elt->next_same_value)
4954 if (GET_CODE (const_elt->exp) == REG)
4956 src_related = gen_lowpart_if_possible (mode,
4957 const_elt->exp);
4958 break;
4963 /* Another possibility is that we have an AND with a constant in
4964 a mode narrower than a word. If so, it might have been generated
4965 as part of an "if" which would narrow the AND. If we already
4966 have done the AND in a wider mode, we can use a SUBREG of that
4967 value. */
4969 if (flag_expensive_optimizations && ! src_related
4970 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
4971 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4973 enum machine_mode tmode;
4974 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4976 for (tmode = GET_MODE_WIDER_MODE (mode);
4977 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4978 tmode = GET_MODE_WIDER_MODE (tmode))
4980 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
4981 struct table_elt *larger_elt;
4983 if (inner)
4985 PUT_MODE (new_and, tmode);
4986 XEXP (new_and, 0) = inner;
4987 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4988 if (larger_elt == 0)
4989 continue;
4991 for (larger_elt = larger_elt->first_same_value;
4992 larger_elt; larger_elt = larger_elt->next_same_value)
4993 if (GET_CODE (larger_elt->exp) == REG)
4995 src_related
4996 = gen_lowpart_if_possible (mode, larger_elt->exp);
4997 break;
5000 if (src_related)
5001 break;
5006 #ifdef LOAD_EXTEND_OP
5007 /* See if a MEM has already been loaded with a widening operation;
5008 if it has, we can use a subreg of that. Many CISC machines
5009 also have such operations, but this is only likely to be
5010 beneficial these machines. */
5012 if (flag_expensive_optimizations && src_related == 0
5013 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5014 && GET_MODE_CLASS (mode) == MODE_INT
5015 && GET_CODE (src) == MEM && ! do_not_record
5016 && LOAD_EXTEND_OP (mode) != NIL)
5018 enum machine_mode tmode;
5020 /* Set what we are trying to extend and the operation it might
5021 have been extended with. */
5022 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5023 XEXP (memory_extend_rtx, 0) = src;
5025 for (tmode = GET_MODE_WIDER_MODE (mode);
5026 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5027 tmode = GET_MODE_WIDER_MODE (tmode))
5029 struct table_elt *larger_elt;
5031 PUT_MODE (memory_extend_rtx, tmode);
5032 larger_elt = lookup (memory_extend_rtx,
5033 HASH (memory_extend_rtx, tmode), tmode);
5034 if (larger_elt == 0)
5035 continue;
5037 for (larger_elt = larger_elt->first_same_value;
5038 larger_elt; larger_elt = larger_elt->next_same_value)
5039 if (GET_CODE (larger_elt->exp) == REG)
5041 src_related = gen_lowpart_if_possible (mode,
5042 larger_elt->exp);
5043 break;
5046 if (src_related)
5047 break;
5050 #endif /* LOAD_EXTEND_OP */
5052 if (src == src_folded)
5053 src_folded = 0;
5055 /* At this point, ELT, if non-zero, points to a class of expressions
5056 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5057 and SRC_RELATED, if non-zero, each contain additional equivalent
5058 expressions. Prune these latter expressions by deleting expressions
5059 already in the equivalence class.
5061 Check for an equivalent identical to the destination. If found,
5062 this is the preferred equivalent since it will likely lead to
5063 elimination of the insn. Indicate this by placing it in
5064 `src_related'. */
5066 if (elt) elt = elt->first_same_value;
5067 for (p = elt; p; p = p->next_same_value)
5069 enum rtx_code code = GET_CODE (p->exp);
5071 /* If the expression is not valid, ignore it. Then we do not
5072 have to check for validity below. In most cases, we can use
5073 `rtx_equal_p', since canonicalization has already been done. */
5074 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5075 continue;
5077 /* Also skip paradoxical subregs, unless that's what we're
5078 looking for. */
5079 if (code == SUBREG
5080 && (GET_MODE_SIZE (GET_MODE (p->exp))
5081 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5082 && ! (src != 0
5083 && GET_CODE (src) == SUBREG
5084 && GET_MODE (src) == GET_MODE (p->exp)
5085 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5086 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5087 continue;
5089 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5090 src = 0;
5091 else if (src_folded && GET_CODE (src_folded) == code
5092 && rtx_equal_p (src_folded, p->exp))
5093 src_folded = 0;
5094 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5095 && rtx_equal_p (src_eqv_here, p->exp))
5096 src_eqv_here = 0;
5097 else if (src_related && GET_CODE (src_related) == code
5098 && rtx_equal_p (src_related, p->exp))
5099 src_related = 0;
5101 /* This is the same as the destination of the insns, we want
5102 to prefer it. Copy it to src_related. The code below will
5103 then give it a negative cost. */
5104 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5105 src_related = dest;
5109 /* Find the cheapest valid equivalent, trying all the available
5110 possibilities. Prefer items not in the hash table to ones
5111 that are when they are equal cost. Note that we can never
5112 worsen an insn as the current contents will also succeed.
5113 If we find an equivalent identical to the destination, use it as best,
5114 since this insn will probably be eliminated in that case. */
5115 if (src)
5117 if (rtx_equal_p (src, dest))
5118 src_cost = -1;
5119 else
5120 src_cost = COST (src);
5123 if (src_eqv_here)
5125 if (rtx_equal_p (src_eqv_here, dest))
5126 src_eqv_cost = -1;
5127 else
5128 src_eqv_cost = COST (src_eqv_here);
5131 if (src_folded)
5133 if (rtx_equal_p (src_folded, dest))
5134 src_folded_cost = -1;
5135 else
5136 src_folded_cost = COST (src_folded);
5139 if (src_related)
5141 if (rtx_equal_p (src_related, dest))
5142 src_related_cost = -1;
5143 else
5144 src_related_cost = COST (src_related);
5147 /* If this was an indirect jump insn, a known label will really be
5148 cheaper even though it looks more expensive. */
5149 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5150 src_folded = src_const, src_folded_cost = -1;
5152 /* Terminate loop when replacement made. This must terminate since
5153 the current contents will be tested and will always be valid. */
5154 while (1)
5156 rtx trial;
5158 /* Skip invalid entries. */
5159 while (elt && GET_CODE (elt->exp) != REG
5160 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5161 elt = elt->next_same_value;
5163 /* A paradoxical subreg would be bad here: it'll be the right
5164 size, but later may be adjusted so that the upper bits aren't
5165 what we want. So reject it. */
5166 if (elt != 0
5167 && GET_CODE (elt->exp) == SUBREG
5168 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5169 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5170 /* It is okay, though, if the rtx we're trying to match
5171 will ignore any of the bits we can't predict. */
5172 && ! (src != 0
5173 && GET_CODE (src) == SUBREG
5174 && GET_MODE (src) == GET_MODE (elt->exp)
5175 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5176 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5178 elt = elt->next_same_value;
5179 continue;
5182 if (elt) src_elt_cost = elt->cost;
5184 /* Find cheapest and skip it for the next time. For items
5185 of equal cost, use this order:
5186 src_folded, src, src_eqv, src_related and hash table entry. */
5187 if (src_folded_cost <= src_cost
5188 && src_folded_cost <= src_eqv_cost
5189 && src_folded_cost <= src_related_cost
5190 && src_folded_cost <= src_elt_cost)
5192 trial = src_folded, src_folded_cost = 10000;
5193 if (src_folded_force_flag)
5194 trial = force_const_mem (mode, trial);
5196 else if (src_cost <= src_eqv_cost
5197 && src_cost <= src_related_cost
5198 && src_cost <= src_elt_cost)
5199 trial = src, src_cost = 10000;
5200 else if (src_eqv_cost <= src_related_cost
5201 && src_eqv_cost <= src_elt_cost)
5202 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
5203 else if (src_related_cost <= src_elt_cost)
5204 trial = copy_rtx (src_related), src_related_cost = 10000;
5205 else
5207 trial = copy_rtx (elt->exp);
5208 elt = elt->next_same_value;
5209 src_elt_cost = 10000;
5212 /* We don't normally have an insn matching (set (pc) (pc)), so
5213 check for this separately here. We will delete such an
5214 insn below.
5216 Tablejump insns contain a USE of the table, so simply replacing
5217 the operand with the constant won't match. This is simply an
5218 unconditional branch, however, and is therefore valid. Just
5219 insert the substitution here and we will delete and re-emit
5220 the insn later. */
5222 if (n_sets == 1 && dest == pc_rtx
5223 && (trial == pc_rtx
5224 || (GET_CODE (trial) == LABEL_REF
5225 && ! condjump_p (insn))))
5227 if (trial == pc_rtx)
5229 SET_SRC (sets[i].rtl) = trial;
5230 cse_jumps_altered = 1;
5231 break;
5234 PATTERN (insn) = gen_jump (XEXP (trial, 0));
5235 INSN_CODE (insn) = -1;
5236 cse_jumps_altered = 1;
5237 break;
5240 /* Look for a substitution that makes a valid insn. */
5241 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5243 /* If we just made a substitution inside a libcall, then we
5244 need to make the same substitution in any notes attached
5245 to the RETVAL insn. */
5246 if (libcall_insn
5247 && (GET_CODE (sets[i].orig_src) == REG
5248 || GET_CODE (sets[i].orig_src) == SUBREG
5249 || GET_CODE (sets[i].orig_src) == MEM))
5250 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5251 canon_reg (SET_SRC (sets[i].rtl), insn));
5253 /* The result of apply_change_group can be ignored; see
5254 canon_reg. */
5256 validate_change (insn, &SET_SRC (sets[i].rtl),
5257 canon_reg (SET_SRC (sets[i].rtl), insn),
5259 apply_change_group ();
5260 break;
5263 /* If we previously found constant pool entries for
5264 constants and this is a constant, try making a
5265 pool entry. Put it in src_folded unless we already have done
5266 this since that is where it likely came from. */
5268 else if (constant_pool_entries_cost
5269 && CONSTANT_P (trial)
5270 && ! (GET_CODE (trial) == CONST
5271 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5272 && (src_folded == 0
5273 || (GET_CODE (src_folded) != MEM
5274 && ! src_folded_force_flag))
5275 && GET_MODE_CLASS (mode) != MODE_CC
5276 && mode != VOIDmode)
5278 src_folded_force_flag = 1;
5279 src_folded = trial;
5280 src_folded_cost = constant_pool_entries_cost;
5284 src = SET_SRC (sets[i].rtl);
5286 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5287 However, there is an important exception: If both are registers
5288 that are not the head of their equivalence class, replace SET_SRC
5289 with the head of the class. If we do not do this, we will have
5290 both registers live over a portion of the basic block. This way,
5291 their lifetimes will likely abut instead of overlapping. */
5292 if (GET_CODE (dest) == REG
5293 && REGNO_QTY_VALID_P (REGNO (dest)))
5295 int dest_q = REG_QTY (REGNO (dest));
5296 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5298 if (dest_ent->mode == GET_MODE (dest)
5299 && dest_ent->first_reg != REGNO (dest)
5300 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5301 /* Don't do this if the original insn had a hard reg as
5302 SET_SRC or SET_DEST. */
5303 && (GET_CODE (sets[i].src) != REG
5304 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5305 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5306 /* We can't call canon_reg here because it won't do anything if
5307 SRC is a hard register. */
5309 int src_q = REG_QTY (REGNO (src));
5310 struct qty_table_elem *src_ent = &qty_table[src_q];
5311 int first = src_ent->first_reg;
5312 rtx new_src
5313 = (first >= FIRST_PSEUDO_REGISTER
5314 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5316 /* We must use validate-change even for this, because this
5317 might be a special no-op instruction, suitable only to
5318 tag notes onto. */
5319 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5321 src = new_src;
5322 /* If we had a constant that is cheaper than what we are now
5323 setting SRC to, use that constant. We ignored it when we
5324 thought we could make this into a no-op. */
5325 if (src_const && COST (src_const) < COST (src)
5326 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
5328 src = src_const;
5333 /* If we made a change, recompute SRC values. */
5334 if (src != sets[i].src)
5336 cse_altered = 1;
5337 do_not_record = 0;
5338 hash_arg_in_memory = 0;
5339 sets[i].src = src;
5340 sets[i].src_hash = HASH (src, mode);
5341 sets[i].src_volatile = do_not_record;
5342 sets[i].src_in_memory = hash_arg_in_memory;
5343 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5346 /* If this is a single SET, we are setting a register, and we have an
5347 equivalent constant, we want to add a REG_NOTE. We don't want
5348 to write a REG_EQUAL note for a constant pseudo since verifying that
5349 that pseudo hasn't been eliminated is a pain. Such a note also
5350 won't help anything.
5352 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5353 which can be created for a reference to a compile time computable
5354 entry in a jump table. */
5356 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5357 && GET_CODE (src_const) != REG
5358 && ! (GET_CODE (src_const) == CONST
5359 && GET_CODE (XEXP (src_const, 0)) == MINUS
5360 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5361 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5363 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5365 /* Make sure that the rtx is not shared with any other insn. */
5366 src_const = copy_rtx (src_const);
5368 /* Record the actual constant value in a REG_EQUAL note, making
5369 a new one if one does not already exist. */
5370 if (tem)
5371 XEXP (tem, 0) = src_const;
5372 else
5373 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5374 src_const, REG_NOTES (insn));
5376 /* If storing a constant value in a register that
5377 previously held the constant value 0,
5378 record this fact with a REG_WAS_0 note on this insn.
5380 Note that the *register* is required to have previously held 0,
5381 not just any register in the quantity and we must point to the
5382 insn that set that register to zero.
5384 Rather than track each register individually, we just see if
5385 the last set for this quantity was for this register. */
5387 if (REGNO_QTY_VALID_P (REGNO (dest)))
5389 int dest_q = REG_QTY (REGNO (dest));
5390 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5392 if (dest_ent->const_rtx == const0_rtx)
5394 /* See if we previously had a REG_WAS_0 note. */
5395 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5396 rtx const_insn = dest_ent->const_insn;
5398 if ((tem = single_set (const_insn)) != 0
5399 && rtx_equal_p (SET_DEST (tem), dest))
5401 if (note)
5402 XEXP (note, 0) = const_insn;
5403 else
5404 REG_NOTES (insn)
5405 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5406 REG_NOTES (insn));
5412 /* Now deal with the destination. */
5413 do_not_record = 0;
5415 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5416 to the MEM or REG within it. */
5417 while (GET_CODE (dest) == SIGN_EXTRACT
5418 || GET_CODE (dest) == ZERO_EXTRACT
5419 || GET_CODE (dest) == SUBREG
5420 || GET_CODE (dest) == STRICT_LOW_PART)
5421 dest = XEXP (dest, 0);
5423 sets[i].inner_dest = dest;
5425 if (GET_CODE (dest) == MEM)
5427 #ifdef PUSH_ROUNDING
5428 /* Stack pushes invalidate the stack pointer. */
5429 rtx addr = XEXP (dest, 0);
5430 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
5431 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
5432 && XEXP (addr, 0) == stack_pointer_rtx)
5433 invalidate (stack_pointer_rtx, Pmode);
5434 #endif
5435 dest = fold_rtx (dest, insn);
5438 /* Compute the hash code of the destination now,
5439 before the effects of this instruction are recorded,
5440 since the register values used in the address computation
5441 are those before this instruction. */
5442 sets[i].dest_hash = HASH (dest, mode);
5444 /* Don't enter a bit-field in the hash table
5445 because the value in it after the store
5446 may not equal what was stored, due to truncation. */
5448 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5449 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5451 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5453 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5454 && GET_CODE (width) == CONST_INT
5455 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5456 && ! (INTVAL (src_const)
5457 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5458 /* Exception: if the value is constant,
5459 and it won't be truncated, record it. */
5461 else
5463 /* This is chosen so that the destination will be invalidated
5464 but no new value will be recorded.
5465 We must invalidate because sometimes constant
5466 values can be recorded for bitfields. */
5467 sets[i].src_elt = 0;
5468 sets[i].src_volatile = 1;
5469 src_eqv = 0;
5470 src_eqv_elt = 0;
5474 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5475 the insn. */
5476 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5478 /* One less use of the label this insn used to jump to. */
5479 if (JUMP_LABEL (insn) != 0)
5480 --LABEL_NUSES (JUMP_LABEL (insn));
5481 PUT_CODE (insn, NOTE);
5482 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5483 NOTE_SOURCE_FILE (insn) = 0;
5484 cse_jumps_altered = 1;
5485 /* No more processing for this set. */
5486 sets[i].rtl = 0;
5489 /* If this SET is now setting PC to a label, we know it used to
5490 be a conditional or computed branch. So we see if we can follow
5491 it. If it was a computed branch, delete it and re-emit. */
5492 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5494 /* If this is not in the format for a simple branch and
5495 we are the only SET in it, re-emit it. */
5496 if (! simplejump_p (insn) && n_sets == 1)
5498 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5499 JUMP_LABEL (new) = XEXP (src, 0);
5500 LABEL_NUSES (XEXP (src, 0))++;
5501 insn = new;
5503 else
5504 /* Otherwise, force rerecognition, since it probably had
5505 a different pattern before.
5506 This shouldn't really be necessary, since whatever
5507 changed the source value above should have done this.
5508 Until the right place is found, might as well do this here. */
5509 INSN_CODE (insn) = -1;
5511 never_reached_warning (insn);
5513 /* Now emit a BARRIER after the unconditional jump. Do not bother
5514 deleting any unreachable code, let jump/flow do that. */
5515 if (NEXT_INSN (insn) != 0
5516 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5517 emit_barrier_after (insn);
5519 cse_jumps_altered = 1;
5520 sets[i].rtl = 0;
5523 /* If destination is volatile, invalidate it and then do no further
5524 processing for this assignment. */
5526 else if (do_not_record)
5528 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5529 || GET_CODE (dest) == MEM)
5530 invalidate (dest, VOIDmode);
5531 else if (GET_CODE (dest) == STRICT_LOW_PART
5532 || GET_CODE (dest) == ZERO_EXTRACT)
5533 invalidate (XEXP (dest, 0), GET_MODE (dest));
5534 sets[i].rtl = 0;
5537 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5538 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5540 #ifdef HAVE_cc0
5541 /* If setting CC0, record what it was set to, or a constant, if it
5542 is equivalent to a constant. If it is being set to a floating-point
5543 value, make a COMPARE with the appropriate constant of 0. If we
5544 don't do this, later code can interpret this as a test against
5545 const0_rtx, which can cause problems if we try to put it into an
5546 insn as a floating-point operand. */
5547 if (dest == cc0_rtx)
5549 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5550 this_insn_cc0_mode = mode;
5551 if (FLOAT_MODE_P (mode))
5552 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5553 CONST0_RTX (mode));
5555 #endif
5558 /* Now enter all non-volatile source expressions in the hash table
5559 if they are not already present.
5560 Record their equivalence classes in src_elt.
5561 This way we can insert the corresponding destinations into
5562 the same classes even if the actual sources are no longer in them
5563 (having been invalidated). */
5565 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5566 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5568 register struct table_elt *elt;
5569 register struct table_elt *classp = sets[0].src_elt;
5570 rtx dest = SET_DEST (sets[0].rtl);
5571 enum machine_mode eqvmode = GET_MODE (dest);
5573 if (GET_CODE (dest) == STRICT_LOW_PART)
5575 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5576 classp = 0;
5578 if (insert_regs (src_eqv, classp, 0))
5580 rehash_using_reg (src_eqv);
5581 src_eqv_hash = HASH (src_eqv, eqvmode);
5583 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5584 elt->in_memory = src_eqv_in_memory;
5585 src_eqv_elt = elt;
5587 /* Check to see if src_eqv_elt is the same as a set source which
5588 does not yet have an elt, and if so set the elt of the set source
5589 to src_eqv_elt. */
5590 for (i = 0; i < n_sets; i++)
5591 if (sets[i].rtl && sets[i].src_elt == 0
5592 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5593 sets[i].src_elt = src_eqv_elt;
5596 for (i = 0; i < n_sets; i++)
5597 if (sets[i].rtl && ! sets[i].src_volatile
5598 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5600 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5602 /* REG_EQUAL in setting a STRICT_LOW_PART
5603 gives an equivalent for the entire destination register,
5604 not just for the subreg being stored in now.
5605 This is a more interesting equivalence, so we arrange later
5606 to treat the entire reg as the destination. */
5607 sets[i].src_elt = src_eqv_elt;
5608 sets[i].src_hash = src_eqv_hash;
5610 else
5612 /* Insert source and constant equivalent into hash table, if not
5613 already present. */
5614 register struct table_elt *classp = src_eqv_elt;
5615 register rtx src = sets[i].src;
5616 register rtx dest = SET_DEST (sets[i].rtl);
5617 enum machine_mode mode
5618 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5620 if (sets[i].src_elt == 0)
5622 /* Don't put a hard register source into the table if this is
5623 the last insn of a libcall. In this case, we only need
5624 to put src_eqv_elt in src_elt. */
5625 if (GET_CODE (src) != REG
5626 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5627 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5629 register struct table_elt *elt;
5631 /* Note that these insert_regs calls cannot remove
5632 any of the src_elt's, because they would have failed to
5633 match if not still valid. */
5634 if (insert_regs (src, classp, 0))
5636 rehash_using_reg (src);
5637 sets[i].src_hash = HASH (src, mode);
5639 elt = insert (src, classp, sets[i].src_hash, mode);
5640 elt->in_memory = sets[i].src_in_memory;
5641 sets[i].src_elt = classp = elt;
5643 else
5644 sets[i].src_elt = classp;
5646 if (sets[i].src_const && sets[i].src_const_elt == 0
5647 && src != sets[i].src_const
5648 && ! rtx_equal_p (sets[i].src_const, src))
5649 sets[i].src_elt = insert (sets[i].src_const, classp,
5650 sets[i].src_const_hash, mode);
5653 else if (sets[i].src_elt == 0)
5654 /* If we did not insert the source into the hash table (e.g., it was
5655 volatile), note the equivalence class for the REG_EQUAL value, if any,
5656 so that the destination goes into that class. */
5657 sets[i].src_elt = src_eqv_elt;
5659 invalidate_from_clobbers (x);
5661 /* Some registers are invalidated by subroutine calls. Memory is
5662 invalidated by non-constant calls. */
5664 if (GET_CODE (insn) == CALL_INSN)
5666 if (! CONST_CALL_P (insn))
5667 invalidate_memory ();
5668 invalidate_for_call ();
5671 /* Now invalidate everything set by this instruction.
5672 If a SUBREG or other funny destination is being set,
5673 sets[i].rtl is still nonzero, so here we invalidate the reg
5674 a part of which is being set. */
5676 for (i = 0; i < n_sets; i++)
5677 if (sets[i].rtl)
5679 /* We can't use the inner dest, because the mode associated with
5680 a ZERO_EXTRACT is significant. */
5681 register rtx dest = SET_DEST (sets[i].rtl);
5683 /* Needed for registers to remove the register from its
5684 previous quantity's chain.
5685 Needed for memory if this is a nonvarying address, unless
5686 we have just done an invalidate_memory that covers even those. */
5687 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5688 || GET_CODE (dest) == MEM)
5689 invalidate (dest, VOIDmode);
5690 else if (GET_CODE (dest) == STRICT_LOW_PART
5691 || GET_CODE (dest) == ZERO_EXTRACT)
5692 invalidate (XEXP (dest, 0), GET_MODE (dest));
5695 /* A volatile ASM invalidates everything. */
5696 if (GET_CODE (insn) == INSN
5697 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5698 && MEM_VOLATILE_P (PATTERN (insn)))
5699 flush_hash_table ();
5701 /* Make sure registers mentioned in destinations
5702 are safe for use in an expression to be inserted.
5703 This removes from the hash table
5704 any invalid entry that refers to one of these registers.
5706 We don't care about the return value from mention_regs because
5707 we are going to hash the SET_DEST values unconditionally. */
5709 for (i = 0; i < n_sets; i++)
5711 if (sets[i].rtl)
5713 rtx x = SET_DEST (sets[i].rtl);
5715 if (GET_CODE (x) != REG)
5716 mention_regs (x);
5717 else
5719 /* We used to rely on all references to a register becoming
5720 inaccessible when a register changes to a new quantity,
5721 since that changes the hash code. However, that is not
5722 safe, since after HASH_SIZE new quantities we get a
5723 hash 'collision' of a register with its own invalid
5724 entries. And since SUBREGs have been changed not to
5725 change their hash code with the hash code of the register,
5726 it wouldn't work any longer at all. So we have to check
5727 for any invalid references lying around now.
5728 This code is similar to the REG case in mention_regs,
5729 but it knows that reg_tick has been incremented, and
5730 it leaves reg_in_table as -1 . */
5731 unsigned int regno = REGNO (x);
5732 unsigned int endregno
5733 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5734 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5735 unsigned int i;
5737 for (i = regno; i < endregno; i++)
5739 if (REG_IN_TABLE (i) >= 0)
5741 remove_invalid_refs (i);
5742 REG_IN_TABLE (i) = -1;
5749 /* We may have just removed some of the src_elt's from the hash table.
5750 So replace each one with the current head of the same class. */
5752 for (i = 0; i < n_sets; i++)
5753 if (sets[i].rtl)
5755 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5756 /* If elt was removed, find current head of same class,
5757 or 0 if nothing remains of that class. */
5759 register struct table_elt *elt = sets[i].src_elt;
5761 while (elt && elt->prev_same_value)
5762 elt = elt->prev_same_value;
5764 while (elt && elt->first_same_value == 0)
5765 elt = elt->next_same_value;
5766 sets[i].src_elt = elt ? elt->first_same_value : 0;
5770 /* Now insert the destinations into their equivalence classes. */
5772 for (i = 0; i < n_sets; i++)
5773 if (sets[i].rtl)
5775 register rtx dest = SET_DEST (sets[i].rtl);
5776 rtx inner_dest = sets[i].inner_dest;
5777 register struct table_elt *elt;
5779 /* Don't record value if we are not supposed to risk allocating
5780 floating-point values in registers that might be wider than
5781 memory. */
5782 if ((flag_float_store
5783 && GET_CODE (dest) == MEM
5784 && FLOAT_MODE_P (GET_MODE (dest)))
5785 /* Don't record BLKmode values, because we don't know the
5786 size of it, and can't be sure that other BLKmode values
5787 have the same or smaller size. */
5788 || GET_MODE (dest) == BLKmode
5789 /* Don't record values of destinations set inside a libcall block
5790 since we might delete the libcall. Things should have been set
5791 up so we won't want to reuse such a value, but we play it safe
5792 here. */
5793 || libcall_insn
5794 /* If we didn't put a REG_EQUAL value or a source into the hash
5795 table, there is no point is recording DEST. */
5796 || sets[i].src_elt == 0
5797 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5798 or SIGN_EXTEND, don't record DEST since it can cause
5799 some tracking to be wrong.
5801 ??? Think about this more later. */
5802 || (GET_CODE (dest) == SUBREG
5803 && (GET_MODE_SIZE (GET_MODE (dest))
5804 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5805 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5806 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5807 continue;
5809 /* STRICT_LOW_PART isn't part of the value BEING set,
5810 and neither is the SUBREG inside it.
5811 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5812 if (GET_CODE (dest) == STRICT_LOW_PART)
5813 dest = SUBREG_REG (XEXP (dest, 0));
5815 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5816 /* Registers must also be inserted into chains for quantities. */
5817 if (insert_regs (dest, sets[i].src_elt, 1))
5819 /* If `insert_regs' changes something, the hash code must be
5820 recalculated. */
5821 rehash_using_reg (dest);
5822 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5825 if (GET_CODE (inner_dest) == MEM
5826 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
5827 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
5828 that (MEM (ADDRESSOF (X))) is equivalent to Y.
5829 Consider the case in which the address of the MEM is
5830 passed to a function, which alters the MEM. Then, if we
5831 later use Y instead of the MEM we'll miss the update. */
5832 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
5833 else
5834 elt = insert (dest, sets[i].src_elt,
5835 sets[i].dest_hash, GET_MODE (dest));
5837 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
5838 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
5839 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
5840 0))));
5842 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5843 narrower than M2, and both M1 and M2 are the same number of words,
5844 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5845 make that equivalence as well.
5847 However, BAR may have equivalences for which gen_lowpart_if_possible
5848 will produce a simpler value than gen_lowpart_if_possible applied to
5849 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5850 BAR's equivalences. If we don't get a simplified form, make
5851 the SUBREG. It will not be used in an equivalence, but will
5852 cause two similar assignments to be detected.
5854 Note the loop below will find SUBREG_REG (DEST) since we have
5855 already entered SRC and DEST of the SET in the table. */
5857 if (GET_CODE (dest) == SUBREG
5858 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5859 / UNITS_PER_WORD)
5860 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
5861 && (GET_MODE_SIZE (GET_MODE (dest))
5862 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5863 && sets[i].src_elt != 0)
5865 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5866 struct table_elt *elt, *classp = 0;
5868 for (elt = sets[i].src_elt->first_same_value; elt;
5869 elt = elt->next_same_value)
5871 rtx new_src = 0;
5872 unsigned src_hash;
5873 struct table_elt *src_elt;
5875 /* Ignore invalid entries. */
5876 if (GET_CODE (elt->exp) != REG
5877 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5878 continue;
5880 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
5881 if (new_src == 0)
5882 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
5884 src_hash = HASH (new_src, new_mode);
5885 src_elt = lookup (new_src, src_hash, new_mode);
5887 /* Put the new source in the hash table is if isn't
5888 already. */
5889 if (src_elt == 0)
5891 if (insert_regs (new_src, classp, 0))
5893 rehash_using_reg (new_src);
5894 src_hash = HASH (new_src, new_mode);
5896 src_elt = insert (new_src, classp, src_hash, new_mode);
5897 src_elt->in_memory = elt->in_memory;
5899 else if (classp && classp != src_elt->first_same_value)
5900 /* Show that two things that we've seen before are
5901 actually the same. */
5902 merge_equiv_classes (src_elt, classp);
5904 classp = src_elt->first_same_value;
5905 /* Ignore invalid entries. */
5906 while (classp
5907 && GET_CODE (classp->exp) != REG
5908 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
5909 classp = classp->next_same_value;
5914 /* Special handling for (set REG0 REG1)
5915 where REG0 is the "cheapest", cheaper than REG1.
5916 After cse, REG1 will probably not be used in the sequel,
5917 so (if easily done) change this insn to (set REG1 REG0) and
5918 replace REG1 with REG0 in the previous insn that computed their value.
5919 Then REG1 will become a dead store and won't cloud the situation
5920 for later optimizations.
5922 Do not make this change if REG1 is a hard register, because it will
5923 then be used in the sequel and we may be changing a two-operand insn
5924 into a three-operand insn.
5926 Also do not do this if we are operating on a copy of INSN.
5928 Also don't do this if INSN ends a libcall; this would cause an unrelated
5929 register to be set in the middle of a libcall, and we then get bad code
5930 if the libcall is deleted. */
5932 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
5933 && NEXT_INSN (PREV_INSN (insn)) == insn
5934 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
5935 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5936 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5938 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5939 struct qty_table_elem *src_ent = &qty_table[src_q];
5941 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5942 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5944 rtx prev = PREV_INSN (insn);
5945 while (prev && GET_CODE (prev) == NOTE)
5946 prev = PREV_INSN (prev);
5948 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
5949 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
5951 rtx dest = SET_DEST (sets[0].rtl);
5952 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
5954 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
5955 validate_change (insn, & SET_DEST (sets[0].rtl),
5956 SET_SRC (sets[0].rtl), 1);
5957 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
5958 apply_change_group ();
5960 /* If REG1 was equivalent to a constant, REG0 is not. */
5961 if (note)
5962 PUT_REG_NOTE_KIND (note, REG_EQUAL);
5964 /* If there was a REG_WAS_0 note on PREV, remove it. Move
5965 any REG_WAS_0 note on INSN to PREV. */
5966 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
5967 if (note)
5968 remove_note (prev, note);
5970 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5971 if (note)
5973 remove_note (insn, note);
5974 XEXP (note, 1) = REG_NOTES (prev);
5975 REG_NOTES (prev) = note;
5978 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
5979 then we must delete it, because the value in REG0 has changed. */
5980 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5981 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
5982 remove_note (insn, note);
5987 /* If this is a conditional jump insn, record any known equivalences due to
5988 the condition being tested. */
5990 last_jump_equiv_class = 0;
5991 if (GET_CODE (insn) == JUMP_INSN
5992 && n_sets == 1 && GET_CODE (x) == SET
5993 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
5994 record_jump_equiv (insn, 0);
5996 #ifdef HAVE_cc0
5997 /* If the previous insn set CC0 and this insn no longer references CC0,
5998 delete the previous insn. Here we use the fact that nothing expects CC0
5999 to be valid over an insn, which is true until the final pass. */
6000 if (prev_insn && GET_CODE (prev_insn) == INSN
6001 && (tem = single_set (prev_insn)) != 0
6002 && SET_DEST (tem) == cc0_rtx
6003 && ! reg_mentioned_p (cc0_rtx, x))
6005 PUT_CODE (prev_insn, NOTE);
6006 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6007 NOTE_SOURCE_FILE (prev_insn) = 0;
6010 prev_insn_cc0 = this_insn_cc0;
6011 prev_insn_cc0_mode = this_insn_cc0_mode;
6012 #endif
6014 prev_insn = insn;
6017 /* Remove from the hash table all expressions that reference memory. */
6019 static void
6020 invalidate_memory ()
6022 register int i;
6023 register struct table_elt *p, *next;
6025 for (i = 0; i < HASH_SIZE; i++)
6026 for (p = table[i]; p; p = next)
6028 next = p->next_same_hash;
6029 if (p->in_memory)
6030 remove_from_table (p, i);
6034 /* If ADDR is an address that implicitly affects the stack pointer, return
6035 1 and update the register tables to show the effect. Else, return 0. */
6037 static int
6038 addr_affects_sp_p (addr)
6039 register rtx addr;
6041 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6042 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6043 && GET_CODE (XEXP (addr, 0)) == REG
6044 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6046 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6047 REG_TICK (STACK_POINTER_REGNUM)++;
6049 /* This should be *very* rare. */
6050 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6051 invalidate (stack_pointer_rtx, VOIDmode);
6053 return 1;
6056 return 0;
6059 /* Perform invalidation on the basis of everything about an insn
6060 except for invalidating the actual places that are SET in it.
6061 This includes the places CLOBBERed, and anything that might
6062 alias with something that is SET or CLOBBERed.
6064 X is the pattern of the insn. */
6066 static void
6067 invalidate_from_clobbers (x)
6068 rtx x;
6070 if (GET_CODE (x) == CLOBBER)
6072 rtx ref = XEXP (x, 0);
6073 if (ref)
6075 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6076 || GET_CODE (ref) == MEM)
6077 invalidate (ref, VOIDmode);
6078 else if (GET_CODE (ref) == STRICT_LOW_PART
6079 || GET_CODE (ref) == ZERO_EXTRACT)
6080 invalidate (XEXP (ref, 0), GET_MODE (ref));
6083 else if (GET_CODE (x) == PARALLEL)
6085 register int i;
6086 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6088 register rtx y = XVECEXP (x, 0, i);
6089 if (GET_CODE (y) == CLOBBER)
6091 rtx ref = XEXP (y, 0);
6092 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6093 || GET_CODE (ref) == MEM)
6094 invalidate (ref, VOIDmode);
6095 else if (GET_CODE (ref) == STRICT_LOW_PART
6096 || GET_CODE (ref) == ZERO_EXTRACT)
6097 invalidate (XEXP (ref, 0), GET_MODE (ref));
6103 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6104 and replace any registers in them with either an equivalent constant
6105 or the canonical form of the register. If we are inside an address,
6106 only do this if the address remains valid.
6108 OBJECT is 0 except when within a MEM in which case it is the MEM.
6110 Return the replacement for X. */
6112 static rtx
6113 cse_process_notes (x, object)
6114 rtx x;
6115 rtx object;
6117 enum rtx_code code = GET_CODE (x);
6118 const char *fmt = GET_RTX_FORMAT (code);
6119 int i;
6121 switch (code)
6123 case CONST_INT:
6124 case CONST:
6125 case SYMBOL_REF:
6126 case LABEL_REF:
6127 case CONST_DOUBLE:
6128 case PC:
6129 case CC0:
6130 case LO_SUM:
6131 return x;
6133 case MEM:
6134 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6135 return x;
6137 case EXPR_LIST:
6138 case INSN_LIST:
6139 if (REG_NOTE_KIND (x) == REG_EQUAL)
6140 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6141 if (XEXP (x, 1))
6142 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6143 return x;
6145 case SIGN_EXTEND:
6146 case ZERO_EXTEND:
6147 case SUBREG:
6149 rtx new = cse_process_notes (XEXP (x, 0), object);
6150 /* We don't substitute VOIDmode constants into these rtx,
6151 since they would impede folding. */
6152 if (GET_MODE (new) != VOIDmode)
6153 validate_change (object, &XEXP (x, 0), new, 0);
6154 return x;
6157 case REG:
6158 i = REG_QTY (REGNO (x));
6160 /* Return a constant or a constant register. */
6161 if (REGNO_QTY_VALID_P (REGNO (x)))
6163 struct qty_table_elem *ent = &qty_table[i];
6165 if (ent->const_rtx != NULL_RTX
6166 && (CONSTANT_P (ent->const_rtx)
6167 || GET_CODE (ent->const_rtx) == REG))
6169 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6170 if (new)
6171 return new;
6175 /* Otherwise, canonicalize this register. */
6176 return canon_reg (x, NULL_RTX);
6178 default:
6179 break;
6182 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6183 if (fmt[i] == 'e')
6184 validate_change (object, &XEXP (x, i),
6185 cse_process_notes (XEXP (x, i), object), 0);
6187 return x;
6190 /* Find common subexpressions between the end test of a loop and the beginning
6191 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6193 Often we have a loop where an expression in the exit test is used
6194 in the body of the loop. For example "while (*p) *q++ = *p++;".
6195 Because of the way we duplicate the loop exit test in front of the loop,
6196 however, we don't detect that common subexpression. This will be caught
6197 when global cse is implemented, but this is a quite common case.
6199 This function handles the most common cases of these common expressions.
6200 It is called after we have processed the basic block ending with the
6201 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6202 jumps to a label used only once. */
6204 static void
6205 cse_around_loop (loop_start)
6206 rtx loop_start;
6208 rtx insn;
6209 int i;
6210 struct table_elt *p;
6212 /* If the jump at the end of the loop doesn't go to the start, we don't
6213 do anything. */
6214 for (insn = PREV_INSN (loop_start);
6215 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6216 insn = PREV_INSN (insn))
6219 if (insn == 0
6220 || GET_CODE (insn) != NOTE
6221 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6222 return;
6224 /* If the last insn of the loop (the end test) was an NE comparison,
6225 we will interpret it as an EQ comparison, since we fell through
6226 the loop. Any equivalences resulting from that comparison are
6227 therefore not valid and must be invalidated. */
6228 if (last_jump_equiv_class)
6229 for (p = last_jump_equiv_class->first_same_value; p;
6230 p = p->next_same_value)
6232 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6233 || (GET_CODE (p->exp) == SUBREG
6234 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6235 invalidate (p->exp, VOIDmode);
6236 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6237 || GET_CODE (p->exp) == ZERO_EXTRACT)
6238 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6241 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6242 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6244 The only thing we do with SET_DEST is invalidate entries, so we
6245 can safely process each SET in order. It is slightly less efficient
6246 to do so, but we only want to handle the most common cases.
6248 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6249 These pseudos won't have valid entries in any of the tables indexed
6250 by register number, such as reg_qty. We avoid out-of-range array
6251 accesses by not processing any instructions created after cse started. */
6253 for (insn = NEXT_INSN (loop_start);
6254 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6255 && INSN_UID (insn) < max_insn_uid
6256 && ! (GET_CODE (insn) == NOTE
6257 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6258 insn = NEXT_INSN (insn))
6260 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6261 && (GET_CODE (PATTERN (insn)) == SET
6262 || GET_CODE (PATTERN (insn)) == CLOBBER))
6263 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6264 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6265 && GET_CODE (PATTERN (insn)) == PARALLEL)
6266 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6267 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6268 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6269 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6270 loop_start);
6274 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6275 since they are done elsewhere. This function is called via note_stores. */
6277 static void
6278 invalidate_skipped_set (dest, set, data)
6279 rtx set;
6280 rtx dest;
6281 void *data ATTRIBUTE_UNUSED;
6283 enum rtx_code code = GET_CODE (dest);
6285 if (code == MEM
6286 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6287 /* There are times when an address can appear varying and be a PLUS
6288 during this scan when it would be a fixed address were we to know
6289 the proper equivalences. So invalidate all memory if there is
6290 a BLKmode or nonscalar memory reference or a reference to a
6291 variable address. */
6292 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6293 || cse_rtx_varies_p (XEXP (dest, 0))))
6295 invalidate_memory ();
6296 return;
6299 if (GET_CODE (set) == CLOBBER
6300 #ifdef HAVE_cc0
6301 || dest == cc0_rtx
6302 #endif
6303 || dest == pc_rtx)
6304 return;
6306 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6307 invalidate (XEXP (dest, 0), GET_MODE (dest));
6308 else if (code == REG || code == SUBREG || code == MEM)
6309 invalidate (dest, VOIDmode);
6312 /* Invalidate all insns from START up to the end of the function or the
6313 next label. This called when we wish to CSE around a block that is
6314 conditionally executed. */
6316 static void
6317 invalidate_skipped_block (start)
6318 rtx start;
6320 rtx insn;
6322 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6323 insn = NEXT_INSN (insn))
6325 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6326 continue;
6328 if (GET_CODE (insn) == CALL_INSN)
6330 if (! CONST_CALL_P (insn))
6331 invalidate_memory ();
6332 invalidate_for_call ();
6335 invalidate_from_clobbers (PATTERN (insn));
6336 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6340 /* If modifying X will modify the value in *DATA (which is really an
6341 `rtx *'), indicate that fact by setting the pointed to value to
6342 NULL_RTX. */
6344 static void
6345 cse_check_loop_start (x, set, data)
6346 rtx x;
6347 rtx set ATTRIBUTE_UNUSED;
6348 void *data;
6350 rtx *cse_check_loop_start_value = (rtx *) data;
6352 if (*cse_check_loop_start_value == NULL_RTX
6353 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6354 return;
6356 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6357 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6358 *cse_check_loop_start_value = NULL_RTX;
6361 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6362 a loop that starts with the label at LOOP_START.
6364 If X is a SET, we see if its SET_SRC is currently in our hash table.
6365 If so, we see if it has a value equal to some register used only in the
6366 loop exit code (as marked by jump.c).
6368 If those two conditions are true, we search backwards from the start of
6369 the loop to see if that same value was loaded into a register that still
6370 retains its value at the start of the loop.
6372 If so, we insert an insn after the load to copy the destination of that
6373 load into the equivalent register and (try to) replace our SET_SRC with that
6374 register.
6376 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6378 static void
6379 cse_set_around_loop (x, insn, loop_start)
6380 rtx x;
6381 rtx insn;
6382 rtx loop_start;
6384 struct table_elt *src_elt;
6386 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6387 are setting PC or CC0 or whose SET_SRC is already a register. */
6388 if (GET_CODE (x) == SET
6389 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6390 && GET_CODE (SET_SRC (x)) != REG)
6392 src_elt = lookup (SET_SRC (x),
6393 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6394 GET_MODE (SET_DEST (x)));
6396 if (src_elt)
6397 for (src_elt = src_elt->first_same_value; src_elt;
6398 src_elt = src_elt->next_same_value)
6399 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6400 && COST (src_elt->exp) < COST (SET_SRC (x)))
6402 rtx p, set;
6404 /* Look for an insn in front of LOOP_START that sets
6405 something in the desired mode to SET_SRC (x) before we hit
6406 a label or CALL_INSN. */
6408 for (p = prev_nonnote_insn (loop_start);
6409 p && GET_CODE (p) != CALL_INSN
6410 && GET_CODE (p) != CODE_LABEL;
6411 p = prev_nonnote_insn (p))
6412 if ((set = single_set (p)) != 0
6413 && GET_CODE (SET_DEST (set)) == REG
6414 && GET_MODE (SET_DEST (set)) == src_elt->mode
6415 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6417 /* We now have to ensure that nothing between P
6418 and LOOP_START modified anything referenced in
6419 SET_SRC (x). We know that nothing within the loop
6420 can modify it, or we would have invalidated it in
6421 the hash table. */
6422 rtx q;
6423 rtx cse_check_loop_start_value = SET_SRC (x);
6424 for (q = p; q != loop_start; q = NEXT_INSN (q))
6425 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
6426 note_stores (PATTERN (q),
6427 cse_check_loop_start,
6428 &cse_check_loop_start_value);
6430 /* If nothing was changed and we can replace our
6431 SET_SRC, add an insn after P to copy its destination
6432 to what we will be replacing SET_SRC with. */
6433 if (cse_check_loop_start_value
6434 && validate_change (insn, &SET_SRC (x),
6435 src_elt->exp, 0))
6437 /* If this creates new pseudos, this is unsafe,
6438 because the regno of new pseudo is unsuitable
6439 to index into reg_qty when cse_insn processes
6440 the new insn. Therefore, if a new pseudo was
6441 created, discard this optimization. */
6442 int nregs = max_reg_num ();
6443 rtx move
6444 = gen_move_insn (src_elt->exp, SET_DEST (set));
6445 if (nregs != max_reg_num ())
6447 if (! validate_change (insn, &SET_SRC (x),
6448 SET_SRC (set), 0))
6449 abort ();
6451 else
6452 emit_insn_after (move, p);
6454 break;
6459 /* Deal with the destination of X affecting the stack pointer. */
6460 addr_affects_sp_p (SET_DEST (x));
6462 /* See comment on similar code in cse_insn for explanation of these
6463 tests. */
6464 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6465 || GET_CODE (SET_DEST (x)) == MEM)
6466 invalidate (SET_DEST (x), VOIDmode);
6467 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6468 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6469 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6472 /* Find the end of INSN's basic block and return its range,
6473 the total number of SETs in all the insns of the block, the last insn of the
6474 block, and the branch path.
6476 The branch path indicates which branches should be followed. If a non-zero
6477 path size is specified, the block should be rescanned and a different set
6478 of branches will be taken. The branch path is only used if
6479 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6481 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6482 used to describe the block. It is filled in with the information about
6483 the current block. The incoming structure's branch path, if any, is used
6484 to construct the output branch path. */
6486 void
6487 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6488 rtx insn;
6489 struct cse_basic_block_data *data;
6490 int follow_jumps;
6491 int after_loop;
6492 int skip_blocks;
6494 rtx p = insn, q;
6495 int nsets = 0;
6496 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6497 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
6498 int path_size = data->path_size;
6499 int path_entry = 0;
6500 int i;
6502 /* Update the previous branch path, if any. If the last branch was
6503 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6504 shorten the path by one and look at the previous branch. We know that
6505 at least one branch must have been taken if PATH_SIZE is non-zero. */
6506 while (path_size > 0)
6508 if (data->path[path_size - 1].status != NOT_TAKEN)
6510 data->path[path_size - 1].status = NOT_TAKEN;
6511 break;
6513 else
6514 path_size--;
6517 /* If the first instruction is marked with QImode, that means we've
6518 already processed this block. Our caller will look at DATA->LAST
6519 to figure out where to go next. We want to return the next block
6520 in the instruction stream, not some branched-to block somewhere
6521 else. We accomplish this by pretending our called forbid us to
6522 follow jumps, or skip blocks. */
6523 if (GET_MODE (insn) == QImode)
6524 follow_jumps = skip_blocks = 0;
6526 /* Scan to end of this basic block. */
6527 while (p && GET_CODE (p) != CODE_LABEL)
6529 /* Don't cse out the end of a loop. This makes a difference
6530 only for the unusual loops that always execute at least once;
6531 all other loops have labels there so we will stop in any case.
6532 Cse'ing out the end of the loop is dangerous because it
6533 might cause an invariant expression inside the loop
6534 to be reused after the end of the loop. This would make it
6535 hard to move the expression out of the loop in loop.c,
6536 especially if it is one of several equivalent expressions
6537 and loop.c would like to eliminate it.
6539 If we are running after loop.c has finished, we can ignore
6540 the NOTE_INSN_LOOP_END. */
6542 if (! after_loop && GET_CODE (p) == NOTE
6543 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6544 break;
6546 /* Don't cse over a call to setjmp; on some machines (eg vax)
6547 the regs restored by the longjmp come from
6548 a later time than the setjmp. */
6549 if (GET_CODE (p) == NOTE
6550 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6551 break;
6553 /* A PARALLEL can have lots of SETs in it,
6554 especially if it is really an ASM_OPERANDS. */
6555 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6556 && GET_CODE (PATTERN (p)) == PARALLEL)
6557 nsets += XVECLEN (PATTERN (p), 0);
6558 else if (GET_CODE (p) != NOTE)
6559 nsets += 1;
6561 /* Ignore insns made by CSE; they cannot affect the boundaries of
6562 the basic block. */
6564 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6565 high_cuid = INSN_CUID (p);
6566 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6567 low_cuid = INSN_CUID (p);
6569 /* See if this insn is in our branch path. If it is and we are to
6570 take it, do so. */
6571 if (path_entry < path_size && data->path[path_entry].branch == p)
6573 if (data->path[path_entry].status != NOT_TAKEN)
6574 p = JUMP_LABEL (p);
6576 /* Point to next entry in path, if any. */
6577 path_entry++;
6580 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6581 was specified, we haven't reached our maximum path length, there are
6582 insns following the target of the jump, this is the only use of the
6583 jump label, and the target label is preceded by a BARRIER.
6585 Alternatively, we can follow the jump if it branches around a
6586 block of code and there are no other branches into the block.
6587 In this case invalidate_skipped_block will be called to invalidate any
6588 registers set in the block when following the jump. */
6590 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6591 && GET_CODE (p) == JUMP_INSN
6592 && GET_CODE (PATTERN (p)) == SET
6593 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6594 && JUMP_LABEL (p) != 0
6595 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6596 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6598 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6599 if ((GET_CODE (q) != NOTE
6600 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6601 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6602 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6603 break;
6605 /* If we ran into a BARRIER, this code is an extension of the
6606 basic block when the branch is taken. */
6607 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6609 /* Don't allow ourself to keep walking around an
6610 always-executed loop. */
6611 if (next_real_insn (q) == next)
6613 p = NEXT_INSN (p);
6614 continue;
6617 /* Similarly, don't put a branch in our path more than once. */
6618 for (i = 0; i < path_entry; i++)
6619 if (data->path[i].branch == p)
6620 break;
6622 if (i != path_entry)
6623 break;
6625 data->path[path_entry].branch = p;
6626 data->path[path_entry++].status = TAKEN;
6628 /* This branch now ends our path. It was possible that we
6629 didn't see this branch the last time around (when the
6630 insn in front of the target was a JUMP_INSN that was
6631 turned into a no-op). */
6632 path_size = path_entry;
6634 p = JUMP_LABEL (p);
6635 /* Mark block so we won't scan it again later. */
6636 PUT_MODE (NEXT_INSN (p), QImode);
6638 /* Detect a branch around a block of code. */
6639 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6641 register rtx tmp;
6643 if (next_real_insn (q) == next)
6645 p = NEXT_INSN (p);
6646 continue;
6649 for (i = 0; i < path_entry; i++)
6650 if (data->path[i].branch == p)
6651 break;
6653 if (i != path_entry)
6654 break;
6656 /* This is no_labels_between_p (p, q) with an added check for
6657 reaching the end of a function (in case Q precedes P). */
6658 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6659 if (GET_CODE (tmp) == CODE_LABEL)
6660 break;
6662 if (tmp == q)
6664 data->path[path_entry].branch = p;
6665 data->path[path_entry++].status = AROUND;
6667 path_size = path_entry;
6669 p = JUMP_LABEL (p);
6670 /* Mark block so we won't scan it again later. */
6671 PUT_MODE (NEXT_INSN (p), QImode);
6675 p = NEXT_INSN (p);
6678 data->low_cuid = low_cuid;
6679 data->high_cuid = high_cuid;
6680 data->nsets = nsets;
6681 data->last = p;
6683 /* If all jumps in the path are not taken, set our path length to zero
6684 so a rescan won't be done. */
6685 for (i = path_size - 1; i >= 0; i--)
6686 if (data->path[i].status != NOT_TAKEN)
6687 break;
6689 if (i == -1)
6690 data->path_size = 0;
6691 else
6692 data->path_size = path_size;
6694 /* End the current branch path. */
6695 data->path[path_size].branch = 0;
6698 /* Perform cse on the instructions of a function.
6699 F is the first instruction.
6700 NREGS is one plus the highest pseudo-reg number used in the instruction.
6702 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6703 (only if -frerun-cse-after-loop).
6705 Returns 1 if jump_optimize should be redone due to simplifications
6706 in conditional jump instructions. */
6709 cse_main (f, nregs, after_loop, file)
6710 rtx f;
6711 int nregs;
6712 int after_loop;
6713 FILE *file;
6715 struct cse_basic_block_data val;
6716 register rtx insn = f;
6717 register int i;
6719 cse_jumps_altered = 0;
6720 recorded_label_ref = 0;
6721 constant_pool_entries_cost = 0;
6722 val.path_size = 0;
6724 init_recog ();
6725 init_alias_analysis ();
6727 max_reg = nregs;
6729 max_insn_uid = get_max_uid ();
6731 reg_eqv_table = (struct reg_eqv_elem *)
6732 xmalloc (nregs * sizeof (struct reg_eqv_elem));
6734 #ifdef LOAD_EXTEND_OP
6736 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6737 and change the code and mode as appropriate. */
6738 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6739 #endif
6741 /* Discard all the free elements of the previous function
6742 since they are allocated in the temporarily obstack. */
6743 bzero ((char *) table, sizeof table);
6744 free_element_chain = 0;
6745 n_elements_made = 0;
6747 /* Find the largest uid. */
6749 max_uid = get_max_uid ();
6750 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
6752 /* Compute the mapping from uids to cuids.
6753 CUIDs are numbers assigned to insns, like uids,
6754 except that cuids increase monotonically through the code.
6755 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6756 between two insns is not affected by -g. */
6758 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6760 if (GET_CODE (insn) != NOTE
6761 || NOTE_LINE_NUMBER (insn) < 0)
6762 INSN_CUID (insn) = ++i;
6763 else
6764 /* Give a line number note the same cuid as preceding insn. */
6765 INSN_CUID (insn) = i;
6768 /* Initialize which registers are clobbered by calls. */
6770 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
6772 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6773 if ((call_used_regs[i]
6774 /* Used to check !fixed_regs[i] here, but that isn't safe;
6775 fixed regs are still call-clobbered, and sched can get
6776 confused if they can "live across calls".
6778 The frame pointer is always preserved across calls. The arg
6779 pointer is if it is fixed. The stack pointer usually is, unless
6780 RETURN_POPS_ARGS, in which case an explicit CLOBBER
6781 will be present. If we are generating PIC code, the PIC offset
6782 table register is preserved across calls. */
6784 && i != STACK_POINTER_REGNUM
6785 && i != FRAME_POINTER_REGNUM
6786 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
6787 && i != HARD_FRAME_POINTER_REGNUM
6788 #endif
6789 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
6790 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
6791 #endif
6792 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
6793 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
6794 #endif
6796 || global_regs[i])
6797 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
6799 if (ggc_p)
6800 ggc_push_context ();
6802 /* Loop over basic blocks.
6803 Compute the maximum number of qty's needed for each basic block
6804 (which is 2 for each SET). */
6805 insn = f;
6806 while (insn)
6808 cse_altered = 0;
6809 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6810 flag_cse_skip_blocks);
6812 /* If this basic block was already processed or has no sets, skip it. */
6813 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6815 PUT_MODE (insn, VOIDmode);
6816 insn = (val.last ? NEXT_INSN (val.last) : 0);
6817 val.path_size = 0;
6818 continue;
6821 cse_basic_block_start = val.low_cuid;
6822 cse_basic_block_end = val.high_cuid;
6823 max_qty = val.nsets * 2;
6825 if (file)
6826 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6827 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6828 val.nsets);
6830 /* Make MAX_QTY bigger to give us room to optimize
6831 past the end of this basic block, if that should prove useful. */
6832 if (max_qty < 500)
6833 max_qty = 500;
6835 max_qty += max_reg;
6837 /* If this basic block is being extended by following certain jumps,
6838 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6839 Otherwise, we start after this basic block. */
6840 if (val.path_size > 0)
6841 cse_basic_block (insn, val.last, val.path, 0);
6842 else
6844 int old_cse_jumps_altered = cse_jumps_altered;
6845 rtx temp;
6847 /* When cse changes a conditional jump to an unconditional
6848 jump, we want to reprocess the block, since it will give
6849 us a new branch path to investigate. */
6850 cse_jumps_altered = 0;
6851 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
6852 if (cse_jumps_altered == 0
6853 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6854 insn = temp;
6856 cse_jumps_altered |= old_cse_jumps_altered;
6859 if (ggc_p && cse_altered)
6860 ggc_collect ();
6862 #ifdef USE_C_ALLOCA
6863 alloca (0);
6864 #endif
6867 if (ggc_p)
6868 ggc_pop_context ();
6870 if (max_elements_made < n_elements_made)
6871 max_elements_made = n_elements_made;
6873 /* Clean up. */
6874 end_alias_analysis ();
6875 free (uid_cuid);
6876 free (reg_eqv_table);
6878 return cse_jumps_altered || recorded_label_ref;
6881 /* Process a single basic block. FROM and TO and the limits of the basic
6882 block. NEXT_BRANCH points to the branch path when following jumps or
6883 a null path when not following jumps.
6885 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
6886 loop. This is true when we are being called for the last time on a
6887 block and this CSE pass is before loop.c. */
6889 static rtx
6890 cse_basic_block (from, to, next_branch, around_loop)
6891 register rtx from, to;
6892 struct branch_path *next_branch;
6893 int around_loop;
6895 register rtx insn;
6896 int to_usage = 0;
6897 rtx libcall_insn = NULL_RTX;
6898 int num_insns = 0;
6900 /* This array is undefined before max_reg, so only allocate
6901 the space actually needed and adjust the start. */
6903 qty_table
6904 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
6905 * sizeof (struct qty_table_elem));
6906 qty_table -= max_reg;
6908 new_basic_block ();
6910 /* TO might be a label. If so, protect it from being deleted. */
6911 if (to != 0 && GET_CODE (to) == CODE_LABEL)
6912 ++LABEL_NUSES (to);
6914 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6916 register enum rtx_code code = GET_CODE (insn);
6918 /* If we have processed 1,000 insns, flush the hash table to
6919 avoid extreme quadratic behavior. We must not include NOTEs
6920 in the count since there may be more of them when generating
6921 debugging information. If we clear the table at different
6922 times, code generated with -g -O might be different than code
6923 generated with -O but not -g.
6925 ??? This is a real kludge and needs to be done some other way.
6926 Perhaps for 2.9. */
6927 if (code != NOTE && num_insns++ > 1000)
6929 flush_hash_table ();
6930 num_insns = 0;
6933 /* See if this is a branch that is part of the path. If so, and it is
6934 to be taken, do so. */
6935 if (next_branch->branch == insn)
6937 enum taken status = next_branch++->status;
6938 if (status != NOT_TAKEN)
6940 if (status == TAKEN)
6941 record_jump_equiv (insn, 1);
6942 else
6943 invalidate_skipped_block (NEXT_INSN (insn));
6945 /* Set the last insn as the jump insn; it doesn't affect cc0.
6946 Then follow this branch. */
6947 #ifdef HAVE_cc0
6948 prev_insn_cc0 = 0;
6949 #endif
6950 prev_insn = insn;
6951 insn = JUMP_LABEL (insn);
6952 continue;
6956 if (GET_MODE (insn) == QImode)
6957 PUT_MODE (insn, VOIDmode);
6959 if (GET_RTX_CLASS (code) == 'i')
6961 rtx p;
6963 /* Process notes first so we have all notes in canonical forms when
6964 looking for duplicate operations. */
6966 if (REG_NOTES (insn))
6967 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6969 /* Track when we are inside in LIBCALL block. Inside such a block,
6970 we do not want to record destinations. The last insn of a
6971 LIBCALL block is not considered to be part of the block, since
6972 its destination is the result of the block and hence should be
6973 recorded. */
6975 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6976 libcall_insn = XEXP (p, 0);
6977 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6978 libcall_insn = NULL_RTX;
6980 cse_insn (insn, libcall_insn);
6983 /* If INSN is now an unconditional jump, skip to the end of our
6984 basic block by pretending that we just did the last insn in the
6985 basic block. If we are jumping to the end of our block, show
6986 that we can have one usage of TO. */
6988 if (simplejump_p (insn))
6990 if (to == 0)
6992 free (qty_table + max_reg);
6993 return 0;
6996 if (JUMP_LABEL (insn) == to)
6997 to_usage = 1;
6999 /* Maybe TO was deleted because the jump is unconditional.
7000 If so, there is nothing left in this basic block. */
7001 /* ??? Perhaps it would be smarter to set TO
7002 to whatever follows this insn,
7003 and pretend the basic block had always ended here. */
7004 if (INSN_DELETED_P (to))
7005 break;
7007 insn = PREV_INSN (to);
7010 /* See if it is ok to keep on going past the label
7011 which used to end our basic block. Remember that we incremented
7012 the count of that label, so we decrement it here. If we made
7013 a jump unconditional, TO_USAGE will be one; in that case, we don't
7014 want to count the use in that jump. */
7016 if (to != 0 && NEXT_INSN (insn) == to
7017 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7019 struct cse_basic_block_data val;
7020 rtx prev;
7022 insn = NEXT_INSN (to);
7024 /* If TO was the last insn in the function, we are done. */
7025 if (insn == 0)
7027 free (qty_table + max_reg);
7028 return 0;
7031 /* If TO was preceded by a BARRIER we are done with this block
7032 because it has no continuation. */
7033 prev = prev_nonnote_insn (to);
7034 if (prev && GET_CODE (prev) == BARRIER)
7036 free (qty_table + max_reg);
7037 return insn;
7040 /* Find the end of the following block. Note that we won't be
7041 following branches in this case. */
7042 to_usage = 0;
7043 val.path_size = 0;
7044 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7046 /* If the tables we allocated have enough space left
7047 to handle all the SETs in the next basic block,
7048 continue through it. Otherwise, return,
7049 and that block will be scanned individually. */
7050 if (val.nsets * 2 + next_qty > max_qty)
7051 break;
7053 cse_basic_block_start = val.low_cuid;
7054 cse_basic_block_end = val.high_cuid;
7055 to = val.last;
7057 /* Prevent TO from being deleted if it is a label. */
7058 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7059 ++LABEL_NUSES (to);
7061 /* Back up so we process the first insn in the extension. */
7062 insn = PREV_INSN (insn);
7066 if (next_qty > max_qty)
7067 abort ();
7069 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7070 the previous insn is the only insn that branches to the head of a loop,
7071 we can cse into the loop. Don't do this if we changed the jump
7072 structure of a loop unless we aren't going to be following jumps. */
7074 if ((cse_jumps_altered == 0
7075 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7076 && around_loop && to != 0
7077 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7078 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7079 && JUMP_LABEL (PREV_INSN (to)) != 0
7080 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7081 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7083 free (qty_table + max_reg);
7085 return to ? NEXT_INSN (to) : 0;
7088 /* Count the number of times registers are used (not set) in X.
7089 COUNTS is an array in which we accumulate the count, INCR is how much
7090 we count each register usage.
7092 Don't count a usage of DEST, which is the SET_DEST of a SET which
7093 contains X in its SET_SRC. This is because such a SET does not
7094 modify the liveness of DEST. */
7096 static void
7097 count_reg_usage (x, counts, dest, incr)
7098 rtx x;
7099 int *counts;
7100 rtx dest;
7101 int incr;
7103 enum rtx_code code;
7104 const char *fmt;
7105 int i, j;
7107 if (x == 0)
7108 return;
7110 switch (code = GET_CODE (x))
7112 case REG:
7113 if (x != dest)
7114 counts[REGNO (x)] += incr;
7115 return;
7117 case PC:
7118 case CC0:
7119 case CONST:
7120 case CONST_INT:
7121 case CONST_DOUBLE:
7122 case SYMBOL_REF:
7123 case LABEL_REF:
7124 return;
7126 case CLOBBER:
7127 /* If we are clobbering a MEM, mark any registers inside the address
7128 as being used. */
7129 if (GET_CODE (XEXP (x, 0)) == MEM)
7130 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7131 return;
7133 case SET:
7134 /* Unless we are setting a REG, count everything in SET_DEST. */
7135 if (GET_CODE (SET_DEST (x)) != REG)
7136 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7138 /* If SRC has side-effects, then we can't delete this insn, so the
7139 usage of SET_DEST inside SRC counts.
7141 ??? Strictly-speaking, we might be preserving this insn
7142 because some other SET has side-effects, but that's hard
7143 to do and can't happen now. */
7144 count_reg_usage (SET_SRC (x), counts,
7145 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7146 incr);
7147 return;
7149 case CALL_INSN:
7150 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7152 /* ... falls through ... */
7153 case INSN:
7154 case JUMP_INSN:
7155 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7157 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7158 use them. */
7160 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7161 return;
7163 case EXPR_LIST:
7164 case INSN_LIST:
7165 if (REG_NOTE_KIND (x) == REG_EQUAL
7166 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7167 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7168 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7169 return;
7171 default:
7172 break;
7175 fmt = GET_RTX_FORMAT (code);
7176 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7178 if (fmt[i] == 'e')
7179 count_reg_usage (XEXP (x, i), counts, dest, incr);
7180 else if (fmt[i] == 'E')
7181 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7182 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7186 /* Scan all the insns and delete any that are dead; i.e., they store a register
7187 that is never used or they copy a register to itself.
7189 This is used to remove insns made obviously dead by cse, loop or other
7190 optimizations. It improves the heuristics in loop since it won't try to
7191 move dead invariants out of loops or make givs for dead quantities. The
7192 remaining passes of the compilation are also sped up. */
7194 void
7195 delete_trivially_dead_insns (insns, nreg)
7196 rtx insns;
7197 int nreg;
7199 int *counts;
7200 rtx insn, prev;
7201 #ifdef HAVE_cc0
7202 rtx tem;
7203 #endif
7204 int i;
7205 int in_libcall = 0, dead_libcall = 0;
7207 /* First count the number of times each register is used. */
7208 counts = (int *) xcalloc (nreg, sizeof (int));
7209 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7210 count_reg_usage (insn, counts, NULL_RTX, 1);
7212 /* Go from the last insn to the first and delete insns that only set unused
7213 registers or copy a register to itself. As we delete an insn, remove
7214 usage counts for registers it uses.
7216 The first jump optimization pass may leave a real insn as the last
7217 insn in the function. We must not skip that insn or we may end
7218 up deleting code that is not really dead. */
7219 insn = get_last_insn ();
7220 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7221 insn = prev_real_insn (insn);
7223 for ( ; insn; insn = prev)
7225 int live_insn = 0;
7226 rtx note;
7228 prev = prev_real_insn (insn);
7230 /* Don't delete any insns that are part of a libcall block unless
7231 we can delete the whole libcall block.
7233 Flow or loop might get confused if we did that. Remember
7234 that we are scanning backwards. */
7235 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7237 in_libcall = 1;
7238 live_insn = 1;
7239 dead_libcall = 0;
7241 /* See if there's a REG_EQUAL note on this insn and try to
7242 replace the source with the REG_EQUAL expression.
7244 We assume that insns with REG_RETVALs can only be reg->reg
7245 copies at this point. */
7246 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7247 if (note)
7249 rtx set = single_set (insn);
7250 rtx new = simplify_rtx (XEXP (note, 0));
7252 if (!new)
7253 new = XEXP (note, 0);
7255 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7257 remove_note (insn,
7258 find_reg_note (insn, REG_RETVAL, NULL_RTX));
7259 dead_libcall = 1;
7263 else if (in_libcall)
7264 live_insn = ! dead_libcall;
7265 else if (GET_CODE (PATTERN (insn)) == SET)
7267 if ((GET_CODE (SET_DEST (PATTERN (insn))) == REG
7268 || GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG)
7269 && rtx_equal_p (SET_DEST (PATTERN (insn)),
7270 SET_SRC (PATTERN (insn))))
7273 #ifdef HAVE_cc0
7274 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7275 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7276 && ((tem = next_nonnote_insn (insn)) == 0
7277 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7278 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7280 #endif
7281 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7282 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7283 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7284 || side_effects_p (SET_SRC (PATTERN (insn)))
7285 /* An ADDRESSOF expression can turn into a use of the
7286 internal arg pointer, so always consider the
7287 internal arg pointer live. If it is truly dead,
7288 flow will delete the initializing insn. */
7289 || (SET_DEST (PATTERN (insn))
7290 == current_function_internal_arg_pointer))
7291 live_insn = 1;
7293 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7294 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7296 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7298 if (GET_CODE (elt) == SET)
7300 if ((GET_CODE (SET_DEST (elt)) == REG
7301 || GET_CODE (SET_DEST (elt)) == SUBREG)
7302 && rtx_equal_p (SET_DEST (elt), SET_SRC (elt)))
7305 #ifdef HAVE_cc0
7306 else if (GET_CODE (SET_DEST (elt)) == CC0
7307 && ! side_effects_p (SET_SRC (elt))
7308 && ((tem = next_nonnote_insn (insn)) == 0
7309 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7310 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7312 #endif
7313 else if (GET_CODE (SET_DEST (elt)) != REG
7314 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7315 || counts[REGNO (SET_DEST (elt))] != 0
7316 || side_effects_p (SET_SRC (elt))
7317 /* An ADDRESSOF expression can turn into a use of the
7318 internal arg pointer, so always consider the
7319 internal arg pointer live. If it is truly dead,
7320 flow will delete the initializing insn. */
7321 || (SET_DEST (elt)
7322 == current_function_internal_arg_pointer))
7323 live_insn = 1;
7325 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7326 live_insn = 1;
7328 else
7329 live_insn = 1;
7331 /* If this is a dead insn, delete it and show registers in it aren't
7332 being used. */
7334 if (! live_insn)
7336 count_reg_usage (insn, counts, NULL_RTX, -1);
7337 delete_insn (insn);
7340 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7342 in_libcall = 0;
7343 dead_libcall = 0;
7347 /* Clean up. */
7348 free (counts);