1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "basic-block.h"
34 #include "insn-config.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
50 /* The basic idea of common subexpression elimination is to go
51 through the code, keeping a record of expressions that would
52 have the same value at the current scan point, and replacing
53 expressions encountered with the cheapest equivalent expression.
55 It is too complicated to keep track of the different possibilities
56 when control paths merge in this code; so, at each label, we forget all
57 that is known and start fresh. This can be described as processing each
58 extended basic block separately. We have a separate pass to perform
61 Note CSE can turn a conditional or computed jump into a nop or
62 an unconditional jump. When this occurs we arrange to run the jump
63 optimizer after CSE to delete the unreachable code.
65 We use two data structures to record the equivalent expressions:
66 a hash table for most expressions, and a vector of "quantity
67 numbers" to record equivalent (pseudo) registers.
69 The use of the special data structure for registers is desirable
70 because it is faster. It is possible because registers references
71 contain a fairly small number, the register number, taken from
72 a contiguously allocated series, and two register references are
73 identical if they have the same number. General expressions
74 do not have any such thing, so the only way to retrieve the
75 information recorded on an expression other than a register
76 is to keep it in a hash table.
78 Registers and "quantity numbers":
80 At the start of each basic block, all of the (hardware and pseudo)
81 registers used in the function are given distinct quantity
82 numbers to indicate their contents. During scan, when the code
83 copies one register into another, we copy the quantity number.
84 When a register is loaded in any other way, we allocate a new
85 quantity number to describe the value generated by this operation.
86 `REG_QTY (N)' records what quantity register N is currently thought
89 All real quantity numbers are greater than or equal to zero.
90 If register N has not been assigned a quantity, `REG_QTY (N)' will
91 equal -N - 1, which is always negative.
93 Quantity numbers below zero do not exist and none of the `qty_table'
94 entries should be referenced with a negative index.
96 We also maintain a bidirectional chain of registers for each
97 quantity number. The `qty_table` members `first_reg' and `last_reg',
98 and `reg_eqv_table' members `next' and `prev' hold these chains.
100 The first register in a chain is the one whose lifespan is least local.
101 Among equals, it is the one that was seen first.
102 We replace any equivalent register with that one.
104 If two registers have the same quantity number, it must be true that
105 REG expressions with qty_table `mode' must be in the hash table for both
106 registers and must be in the same class.
108 The converse is not true. Since hard registers may be referenced in
109 any mode, two REG expressions might be equivalent in the hash table
110 but not have the same quantity number if the quantity number of one
111 of the registers is not the same mode as those expressions.
113 Constants and quantity numbers
115 When a quantity has a known constant value, that value is stored
116 in the appropriate qty_table `const_rtx'. This is in addition to
117 putting the constant in the hash table as is usual for non-regs.
119 Whether a reg or a constant is preferred is determined by the configuration
120 macro CONST_COSTS and will often depend on the constant value. In any
121 event, expressions containing constants can be simplified, by fold_rtx.
123 When a quantity has a known nearly constant value (such as an address
124 of a stack slot), that value is stored in the appropriate qty_table
127 Integer constants don't have a machine mode. However, cse
128 determines the intended machine mode from the destination
129 of the instruction that moves the constant. The machine mode
130 is recorded in the hash table along with the actual RTL
131 constant expression so that different modes are kept separate.
135 To record known equivalences among expressions in general
136 we use a hash table called `table'. It has a fixed number of buckets
137 that contain chains of `struct table_elt' elements for expressions.
138 These chains connect the elements whose expressions have the same
141 Other chains through the same elements connect the elements which
142 currently have equivalent values.
144 Register references in an expression are canonicalized before hashing
145 the expression. This is done using `reg_qty' and qty_table `first_reg'.
146 The hash code of a register reference is computed using the quantity
147 number, not the register number.
149 When the value of an expression changes, it is necessary to remove from the
150 hash table not just that expression but all expressions whose values
151 could be different as a result.
153 1. If the value changing is in memory, except in special cases
154 ANYTHING referring to memory could be changed. That is because
155 nobody knows where a pointer does not point.
156 The function `invalidate_memory' removes what is necessary.
158 The special cases are when the address is constant or is
159 a constant plus a fixed register such as the frame pointer
160 or a static chain pointer. When such addresses are stored in,
161 we can tell exactly which other such addresses must be invalidated
162 due to overlap. `invalidate' does this.
163 All expressions that refer to non-constant
164 memory addresses are also invalidated. `invalidate_memory' does this.
166 2. If the value changing is a register, all expressions
167 containing references to that register, and only those,
170 Because searching the entire hash table for expressions that contain
171 a register is very slow, we try to figure out when it isn't necessary.
172 Precisely, this is necessary only when expressions have been
173 entered in the hash table using this register, and then the value has
174 changed, and then another expression wants to be added to refer to
175 the register's new value. This sequence of circumstances is rare
176 within any one basic block.
178 `REG_TICK' and `REG_IN_TABLE', accessors for members of
179 cse_reg_info, are used to detect this case. REG_TICK (i) is
180 incremented whenever a value is stored in register i.
181 REG_IN_TABLE (i) holds -1 if no references to register i have been
182 entered in the table; otherwise, it contains the value REG_TICK (i)
183 had when the references were entered. If we want to enter a
184 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
185 remove old references. Until we want to enter a new entry, the
186 mere fact that the two vectors don't match makes the entries be
187 ignored if anyone tries to match them.
189 Registers themselves are entered in the hash table as well as in
190 the equivalent-register chains. However, `REG_TICK' and
191 `REG_IN_TABLE' do not apply to expressions which are simple
192 register references. These expressions are removed from the table
193 immediately when they become invalid, and this can be done even if
194 we do not immediately search for all the expressions that refer to
197 A CLOBBER rtx in an instruction invalidates its operand for further
198 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
199 invalidates everything that resides in memory.
203 Constant expressions that differ only by an additive integer
204 are called related. When a constant expression is put in
205 the table, the related expression with no constant term
206 is also entered. These are made to point at each other
207 so that it is possible to find out if there exists any
208 register equivalent to an expression related to a given expression. */
210 /* Length of qty_table vector. We know in advance we will not need
211 a quantity number this big. */
215 /* Next quantity number to be allocated.
216 This is 1 + the largest number needed so far. */
220 /* Per-qty information tracking.
222 `first_reg' and `last_reg' track the head and tail of the
223 chain of registers which currently contain this quantity.
225 `mode' contains the machine mode of this quantity.
227 `const_rtx' holds the rtx of the constant value of this
228 quantity, if known. A summations of the frame/arg pointer
229 and a constant can also be entered here. When this holds
230 a known value, `const_insn' is the insn which stored the
233 `comparison_{code,const,qty}' are used to track when a
234 comparison between a quantity and some constant or register has
235 been passed. In such a case, we know the results of the comparison
236 in case we see it again. These members record a comparison that
237 is known to be true. `comparison_code' holds the rtx code of such
238 a comparison, else it is set to UNKNOWN and the other two
239 comparison members are undefined. `comparison_const' holds
240 the constant being compared against, or zero if the comparison
241 is not against a constant. `comparison_qty' holds the quantity
242 being compared against when the result is known. If the comparison
243 is not with a register, `comparison_qty' is -1. */
245 struct qty_table_elem
249 rtx comparison_const
;
251 unsigned int first_reg
, last_reg
;
252 /* The sizes of these fields should match the sizes of the
253 code and mode fields of struct rtx_def (see rtl.h). */
254 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
255 ENUM_BITFIELD(machine_mode
) mode
: 8;
258 /* The table of all qtys, indexed by qty number. */
259 static struct qty_table_elem
*qty_table
;
261 /* Structure used to pass arguments via for_each_rtx to function
262 cse_change_cc_mode. */
263 struct change_cc_mode_args
270 /* For machines that have a CC0, we do not record its value in the hash
271 table since its use is guaranteed to be the insn immediately following
272 its definition and any other insn is presumed to invalidate it.
274 Instead, we store below the current and last value assigned to CC0.
275 If it should happen to be a constant, it is stored in preference
276 to the actual assigned value. In case it is a constant, we store
277 the mode in which the constant should be interpreted. */
279 static rtx this_insn_cc0
, prev_insn_cc0
;
280 static enum machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
283 /* Insn being scanned. */
285 static rtx this_insn
;
286 static bool optimize_this_for_speed_p
;
288 /* Index by register number, gives the number of the next (or
289 previous) register in the chain of registers sharing the same
292 Or -1 if this register is at the end of the chain.
294 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
296 /* Per-register equivalence chain. */
302 /* The table of all register equivalence chains. */
303 static struct reg_eqv_elem
*reg_eqv_table
;
307 /* The timestamp at which this register is initialized. */
308 unsigned int timestamp
;
310 /* The quantity number of the register's current contents. */
313 /* The number of times the register has been altered in the current
317 /* The REG_TICK value at which rtx's containing this register are
318 valid in the hash table. If this does not equal the current
319 reg_tick value, such expressions existing in the hash table are
323 /* The SUBREG that was set when REG_TICK was last incremented. Set
324 to -1 if the last store was to the whole register, not a subreg. */
325 unsigned int subreg_ticked
;
328 /* A table of cse_reg_info indexed by register numbers. */
329 static struct cse_reg_info
*cse_reg_info_table
;
331 /* The size of the above table. */
332 static unsigned int cse_reg_info_table_size
;
334 /* The index of the first entry that has not been initialized. */
335 static unsigned int cse_reg_info_table_first_uninitialized
;
337 /* The timestamp at the beginning of the current run of
338 cse_extended_basic_block. We increment this variable at the beginning of
339 the current run of cse_extended_basic_block. The timestamp field of a
340 cse_reg_info entry matches the value of this variable if and only
341 if the entry has been initialized during the current run of
342 cse_extended_basic_block. */
343 static unsigned int cse_reg_info_timestamp
;
345 /* A HARD_REG_SET containing all the hard registers for which there is
346 currently a REG expression in the hash table. Note the difference
347 from the above variables, which indicate if the REG is mentioned in some
348 expression in the table. */
350 static HARD_REG_SET hard_regs_in_table
;
352 /* True if CSE has altered the CFG. */
353 static bool cse_cfg_altered
;
355 /* True if CSE has altered conditional jump insns in such a way
356 that jump optimization should be redone. */
357 static bool cse_jumps_altered
;
359 /* True if we put a LABEL_REF into the hash table for an INSN
360 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
361 to put in the note. */
362 static bool recorded_label_ref
;
364 /* canon_hash stores 1 in do_not_record
365 if it notices a reference to CC0, PC, or some other volatile
368 static int do_not_record
;
370 /* canon_hash stores 1 in hash_arg_in_memory
371 if it notices a reference to memory within the expression being hashed. */
373 static int hash_arg_in_memory
;
375 /* The hash table contains buckets which are chains of `struct table_elt's,
376 each recording one expression's information.
377 That expression is in the `exp' field.
379 The canon_exp field contains a canonical (from the point of view of
380 alias analysis) version of the `exp' field.
382 Those elements with the same hash code are chained in both directions
383 through the `next_same_hash' and `prev_same_hash' fields.
385 Each set of expressions with equivalent values
386 are on a two-way chain through the `next_same_value'
387 and `prev_same_value' fields, and all point with
388 the `first_same_value' field at the first element in
389 that chain. The chain is in order of increasing cost.
390 Each element's cost value is in its `cost' field.
392 The `in_memory' field is nonzero for elements that
393 involve any reference to memory. These elements are removed
394 whenever a write is done to an unidentified location in memory.
395 To be safe, we assume that a memory address is unidentified unless
396 the address is either a symbol constant or a constant plus
397 the frame pointer or argument pointer.
399 The `related_value' field is used to connect related expressions
400 (that differ by adding an integer).
401 The related expressions are chained in a circular fashion.
402 `related_value' is zero for expressions for which this
405 The `cost' field stores the cost of this element's expression.
406 The `regcost' field stores the value returned by approx_reg_cost for
407 this element's expression.
409 The `is_const' flag is set if the element is a constant (including
412 The `flag' field is used as a temporary during some search routines.
414 The `mode' field is usually the same as GET_MODE (`exp'), but
415 if `exp' is a CONST_INT and has no machine mode then the `mode'
416 field is the mode it was being used as. Each constant is
417 recorded separately for each mode it is used with. */
423 struct table_elt
*next_same_hash
;
424 struct table_elt
*prev_same_hash
;
425 struct table_elt
*next_same_value
;
426 struct table_elt
*prev_same_value
;
427 struct table_elt
*first_same_value
;
428 struct table_elt
*related_value
;
431 /* The size of this field should match the size
432 of the mode field of struct rtx_def (see rtl.h). */
433 ENUM_BITFIELD(machine_mode
) mode
: 8;
439 /* We don't want a lot of buckets, because we rarely have very many
440 things stored in the hash table, and a lot of buckets slows
441 down a lot of loops that happen frequently. */
443 #define HASH_SIZE (1 << HASH_SHIFT)
444 #define HASH_MASK (HASH_SIZE - 1)
446 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
447 register (hard registers may require `do_not_record' to be set). */
450 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
451 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
452 : canon_hash (X, M)) & HASH_MASK)
454 /* Like HASH, but without side-effects. */
455 #define SAFE_HASH(X, M) \
456 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
457 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
458 : safe_hash (X, M)) & HASH_MASK)
460 /* Determine whether register number N is considered a fixed register for the
461 purpose of approximating register costs.
462 It is desirable to replace other regs with fixed regs, to reduce need for
464 A reg wins if it is either the frame pointer or designated as fixed. */
465 #define FIXED_REGNO_P(N) \
466 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
467 || fixed_regs[N] || global_regs[N])
469 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
470 hard registers and pointers into the frame are the cheapest with a cost
471 of 0. Next come pseudos with a cost of one and other hard registers with
472 a cost of 2. Aside from these special cases, call `rtx_cost'. */
474 #define CHEAP_REGNO(N) \
475 (REGNO_PTR_FRAME_P(N) \
476 || (HARD_REGISTER_NUM_P (N) \
477 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
479 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
480 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
482 /* Get the number of times this register has been updated in this
485 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
487 /* Get the point at which REG was recorded in the table. */
489 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
491 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
494 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
496 /* Get the quantity number for REG. */
498 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
500 /* Determine if the quantity number for register X represents a valid index
501 into the qty_table. */
503 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
505 static struct table_elt
*table
[HASH_SIZE
];
507 /* Chain of `struct table_elt's made so far for this function
508 but currently removed from the table. */
510 static struct table_elt
*free_element_chain
;
512 /* Set to the cost of a constant pool reference if one was found for a
513 symbolic constant. If this was found, it means we should try to
514 convert constants into constant pool entries if they don't fit in
517 static int constant_pool_entries_cost
;
518 static int constant_pool_entries_regcost
;
520 /* This data describes a block that will be processed by
521 cse_extended_basic_block. */
523 struct cse_basic_block_data
525 /* Total number of SETs in block. */
527 /* Size of current branch path, if any. */
529 /* Current path, indicating which basic_blocks will be processed. */
532 /* The basic block for this path entry. */
538 /* Pointers to the live in/live out bitmaps for the boundaries of the
540 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
542 /* A simple bitmap to track which basic blocks have been visited
543 already as part of an already processed extended basic block. */
544 static sbitmap cse_visited_basic_blocks
;
546 static bool fixed_base_plus_p (rtx x
);
547 static int notreg_cost (rtx
, enum rtx_code
);
548 static int approx_reg_cost_1 (rtx
*, void *);
549 static int approx_reg_cost (rtx
);
550 static int preferable (int, int, int, int);
551 static void new_basic_block (void);
552 static void make_new_qty (unsigned int, enum machine_mode
);
553 static void make_regs_eqv (unsigned int, unsigned int);
554 static void delete_reg_equiv (unsigned int);
555 static int mention_regs (rtx
);
556 static int insert_regs (rtx
, struct table_elt
*, int);
557 static void remove_from_table (struct table_elt
*, unsigned);
558 static void remove_pseudo_from_table (rtx
, unsigned);
559 static struct table_elt
*lookup (rtx
, unsigned, enum machine_mode
);
560 static struct table_elt
*lookup_for_remove (rtx
, unsigned, enum machine_mode
);
561 static rtx
lookup_as_function (rtx
, enum rtx_code
);
562 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
564 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
565 static void invalidate (rtx
, enum machine_mode
);
566 static bool cse_rtx_varies_p (const_rtx
, bool);
567 static void remove_invalid_refs (unsigned int);
568 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
570 static void rehash_using_reg (rtx
);
571 static void invalidate_memory (void);
572 static void invalidate_for_call (void);
573 static rtx
use_related_value (rtx
, struct table_elt
*);
575 static inline unsigned canon_hash (rtx
, enum machine_mode
);
576 static inline unsigned safe_hash (rtx
, enum machine_mode
);
577 static inline unsigned hash_rtx_string (const char *);
579 static rtx
canon_reg (rtx
, rtx
);
580 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
582 enum machine_mode
*);
583 static rtx
fold_rtx (rtx
, rtx
);
584 static rtx
equiv_constant (rtx
);
585 static void record_jump_equiv (rtx
, bool);
586 static void record_jump_cond (enum rtx_code
, enum machine_mode
, rtx
, rtx
,
588 static void cse_insn (rtx
);
589 static void cse_prescan_path (struct cse_basic_block_data
*);
590 static void invalidate_from_clobbers (rtx
);
591 static rtx
cse_process_notes (rtx
, rtx
, bool *);
592 static void cse_extended_basic_block (struct cse_basic_block_data
*);
593 static void count_reg_usage (rtx
, int *, rtx
, int);
594 static int check_for_label_ref (rtx
*, void *);
595 extern void dump_class (struct table_elt
*);
596 static void get_cse_reg_info_1 (unsigned int regno
);
597 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
598 static int check_dependence (rtx
*, void *);
600 static void flush_hash_table (void);
601 static bool insn_live_p (rtx
, int *);
602 static bool set_live_p (rtx
, rtx
, int *);
603 static int cse_change_cc_mode (rtx
*, void *);
604 static void cse_change_cc_mode_insn (rtx
, rtx
);
605 static void cse_change_cc_mode_insns (rtx
, rtx
, rtx
);
606 static enum machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
610 #undef RTL_HOOKS_GEN_LOWPART
611 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
613 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
615 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
616 virtual regs here because the simplify_*_operation routines are called
617 by integrate.c, which is called before virtual register instantiation. */
620 fixed_base_plus_p (rtx x
)
622 switch (GET_CODE (x
))
625 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
627 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
629 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
630 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
635 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
637 return fixed_base_plus_p (XEXP (x
, 0));
644 /* Dump the expressions in the equivalence class indicated by CLASSP.
645 This function is used only for debugging. */
647 dump_class (struct table_elt
*classp
)
649 struct table_elt
*elt
;
651 fprintf (stderr
, "Equivalence chain for ");
652 print_rtl (stderr
, classp
->exp
);
653 fprintf (stderr
, ": \n");
655 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
657 print_rtl (stderr
, elt
->exp
);
658 fprintf (stderr
, "\n");
662 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
665 approx_reg_cost_1 (rtx
*xp
, void *data
)
668 int *cost_p
= (int *) data
;
672 unsigned int regno
= REGNO (x
);
674 if (! CHEAP_REGNO (regno
))
676 if (regno
< FIRST_PSEUDO_REGISTER
)
678 if (SMALL_REGISTER_CLASSES
)
690 /* Return an estimate of the cost of the registers used in an rtx.
691 This is mostly the number of different REG expressions in the rtx;
692 however for some exceptions like fixed registers we use a cost of
693 0. If any other hard register reference occurs, return MAX_COST. */
696 approx_reg_cost (rtx x
)
700 if (for_each_rtx (&x
, approx_reg_cost_1
, (void *) &cost
))
706 /* Return a negative value if an rtx A, whose costs are given by COST_A
707 and REGCOST_A, is more desirable than an rtx B.
708 Return a positive value if A is less desirable, or 0 if the two are
711 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
713 /* First, get rid of cases involving expressions that are entirely
715 if (cost_a
!= cost_b
)
717 if (cost_a
== MAX_COST
)
719 if (cost_b
== MAX_COST
)
723 /* Avoid extending lifetimes of hardregs. */
724 if (regcost_a
!= regcost_b
)
726 if (regcost_a
== MAX_COST
)
728 if (regcost_b
== MAX_COST
)
732 /* Normal operation costs take precedence. */
733 if (cost_a
!= cost_b
)
734 return cost_a
- cost_b
;
735 /* Only if these are identical consider effects on register pressure. */
736 if (regcost_a
!= regcost_b
)
737 return regcost_a
- regcost_b
;
741 /* Internal function, to compute cost when X is not a register; called
742 from COST macro to keep it simple. */
745 notreg_cost (rtx x
, enum rtx_code outer
)
747 return ((GET_CODE (x
) == SUBREG
748 && REG_P (SUBREG_REG (x
))
749 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
750 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_INT
751 && (GET_MODE_SIZE (GET_MODE (x
))
752 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
753 && subreg_lowpart_p (x
)
754 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x
)),
755 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))))
757 : rtx_cost (x
, outer
, optimize_this_for_speed_p
) * 2);
761 /* Initialize CSE_REG_INFO_TABLE. */
764 init_cse_reg_info (unsigned int nregs
)
766 /* Do we need to grow the table? */
767 if (nregs
> cse_reg_info_table_size
)
769 unsigned int new_size
;
771 if (cse_reg_info_table_size
< 2048)
773 /* Compute a new size that is a power of 2 and no smaller
774 than the large of NREGS and 64. */
775 new_size
= (cse_reg_info_table_size
776 ? cse_reg_info_table_size
: 64);
778 while (new_size
< nregs
)
783 /* If we need a big table, allocate just enough to hold
788 /* Reallocate the table with NEW_SIZE entries. */
789 if (cse_reg_info_table
)
790 free (cse_reg_info_table
);
791 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
792 cse_reg_info_table_size
= new_size
;
793 cse_reg_info_table_first_uninitialized
= 0;
796 /* Do we have all of the first NREGS entries initialized? */
797 if (cse_reg_info_table_first_uninitialized
< nregs
)
799 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
802 /* Put the old timestamp on newly allocated entries so that they
803 will all be considered out of date. We do not touch those
804 entries beyond the first NREGS entries to be nice to the
806 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
807 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
809 cse_reg_info_table_first_uninitialized
= nregs
;
813 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
816 get_cse_reg_info_1 (unsigned int regno
)
818 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
819 entry will be considered to have been initialized. */
820 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
822 /* Initialize the rest of the entry. */
823 cse_reg_info_table
[regno
].reg_tick
= 1;
824 cse_reg_info_table
[regno
].reg_in_table
= -1;
825 cse_reg_info_table
[regno
].subreg_ticked
= -1;
826 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
829 /* Find a cse_reg_info entry for REGNO. */
831 static inline struct cse_reg_info
*
832 get_cse_reg_info (unsigned int regno
)
834 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
836 /* If this entry has not been initialized, go ahead and initialize
838 if (p
->timestamp
!= cse_reg_info_timestamp
)
839 get_cse_reg_info_1 (regno
);
844 /* Clear the hash table and initialize each register with its own quantity,
845 for a new basic block. */
848 new_basic_block (void)
854 /* Invalidate cse_reg_info_table. */
855 cse_reg_info_timestamp
++;
857 /* Clear out hash table state for this pass. */
858 CLEAR_HARD_REG_SET (hard_regs_in_table
);
860 /* The per-quantity values used to be initialized here, but it is
861 much faster to initialize each as it is made in `make_new_qty'. */
863 for (i
= 0; i
< HASH_SIZE
; i
++)
865 struct table_elt
*first
;
870 struct table_elt
*last
= first
;
874 while (last
->next_same_hash
!= NULL
)
875 last
= last
->next_same_hash
;
877 /* Now relink this hash entire chain into
878 the free element list. */
880 last
->next_same_hash
= free_element_chain
;
881 free_element_chain
= first
;
890 /* Say that register REG contains a quantity in mode MODE not in any
891 register before and initialize that quantity. */
894 make_new_qty (unsigned int reg
, enum machine_mode mode
)
897 struct qty_table_elem
*ent
;
898 struct reg_eqv_elem
*eqv
;
900 gcc_assert (next_qty
< max_qty
);
902 q
= REG_QTY (reg
) = next_qty
++;
904 ent
->first_reg
= reg
;
907 ent
->const_rtx
= ent
->const_insn
= NULL_RTX
;
908 ent
->comparison_code
= UNKNOWN
;
910 eqv
= ®_eqv_table
[reg
];
911 eqv
->next
= eqv
->prev
= -1;
914 /* Make reg NEW equivalent to reg OLD.
915 OLD is not changing; NEW is. */
918 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
920 unsigned int lastr
, firstr
;
921 int q
= REG_QTY (old_reg
);
922 struct qty_table_elem
*ent
;
926 /* Nothing should become eqv until it has a "non-invalid" qty number. */
927 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
929 REG_QTY (new_reg
) = q
;
930 firstr
= ent
->first_reg
;
931 lastr
= ent
->last_reg
;
933 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
934 hard regs. Among pseudos, if NEW will live longer than any other reg
935 of the same qty, and that is beyond the current basic block,
936 make it the new canonical replacement for this qty. */
937 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
938 /* Certain fixed registers might be of the class NO_REGS. This means
939 that not only can they not be allocated by the compiler, but
940 they cannot be used in substitutions or canonicalizations
942 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
943 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
944 || (new_reg
>= FIRST_PSEUDO_REGISTER
945 && (firstr
< FIRST_PSEUDO_REGISTER
946 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
947 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
948 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
949 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
951 reg_eqv_table
[firstr
].prev
= new_reg
;
952 reg_eqv_table
[new_reg
].next
= firstr
;
953 reg_eqv_table
[new_reg
].prev
= -1;
954 ent
->first_reg
= new_reg
;
958 /* If NEW is a hard reg (known to be non-fixed), insert at end.
959 Otherwise, insert before any non-fixed hard regs that are at the
960 end. Registers of class NO_REGS cannot be used as an
961 equivalent for anything. */
962 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
963 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
964 && new_reg
>= FIRST_PSEUDO_REGISTER
)
965 lastr
= reg_eqv_table
[lastr
].prev
;
966 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
967 if (reg_eqv_table
[lastr
].next
>= 0)
968 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
970 qty_table
[q
].last_reg
= new_reg
;
971 reg_eqv_table
[lastr
].next
= new_reg
;
972 reg_eqv_table
[new_reg
].prev
= lastr
;
976 /* Remove REG from its equivalence class. */
979 delete_reg_equiv (unsigned int reg
)
981 struct qty_table_elem
*ent
;
982 int q
= REG_QTY (reg
);
985 /* If invalid, do nothing. */
986 if (! REGNO_QTY_VALID_P (reg
))
991 p
= reg_eqv_table
[reg
].prev
;
992 n
= reg_eqv_table
[reg
].next
;
995 reg_eqv_table
[n
].prev
= p
;
999 reg_eqv_table
[p
].next
= n
;
1003 REG_QTY (reg
) = -reg
- 1;
1006 /* Remove any invalid expressions from the hash table
1007 that refer to any of the registers contained in expression X.
1009 Make sure that newly inserted references to those registers
1010 as subexpressions will be considered valid.
1012 mention_regs is not called when a register itself
1013 is being stored in the table.
1015 Return 1 if we have done something that may have changed the hash code
1019 mention_regs (rtx x
)
1029 code
= GET_CODE (x
);
1032 unsigned int regno
= REGNO (x
);
1033 unsigned int endregno
= END_REGNO (x
);
1036 for (i
= regno
; i
< endregno
; i
++)
1038 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1039 remove_invalid_refs (i
);
1041 REG_IN_TABLE (i
) = REG_TICK (i
);
1042 SUBREG_TICKED (i
) = -1;
1048 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1049 pseudo if they don't use overlapping words. We handle only pseudos
1050 here for simplicity. */
1051 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1052 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1054 unsigned int i
= REGNO (SUBREG_REG (x
));
1056 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1058 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1059 the last store to this register really stored into this
1060 subreg, then remove the memory of this subreg.
1061 Otherwise, remove any memory of the entire register and
1062 all its subregs from the table. */
1063 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1064 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1065 remove_invalid_refs (i
);
1067 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1070 REG_IN_TABLE (i
) = REG_TICK (i
);
1071 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1075 /* If X is a comparison or a COMPARE and either operand is a register
1076 that does not have a quantity, give it one. This is so that a later
1077 call to record_jump_equiv won't cause X to be assigned a different
1078 hash code and not found in the table after that call.
1080 It is not necessary to do this here, since rehash_using_reg can
1081 fix up the table later, but doing this here eliminates the need to
1082 call that expensive function in the most common case where the only
1083 use of the register is in the comparison. */
1085 if (code
== COMPARE
|| COMPARISON_P (x
))
1087 if (REG_P (XEXP (x
, 0))
1088 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1089 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1091 rehash_using_reg (XEXP (x
, 0));
1095 if (REG_P (XEXP (x
, 1))
1096 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1097 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1099 rehash_using_reg (XEXP (x
, 1));
1104 fmt
= GET_RTX_FORMAT (code
);
1105 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1107 changed
|= mention_regs (XEXP (x
, i
));
1108 else if (fmt
[i
] == 'E')
1109 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1110 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1115 /* Update the register quantities for inserting X into the hash table
1116 with a value equivalent to CLASSP.
1117 (If the class does not contain a REG, it is irrelevant.)
1118 If MODIFIED is nonzero, X is a destination; it is being modified.
1119 Note that delete_reg_equiv should be called on a register
1120 before insert_regs is done on that register with MODIFIED != 0.
1122 Nonzero value means that elements of reg_qty have changed
1123 so X's hash code may be different. */
1126 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1130 unsigned int regno
= REGNO (x
);
1133 /* If REGNO is in the equivalence table already but is of the
1134 wrong mode for that equivalence, don't do anything here. */
1136 qty_valid
= REGNO_QTY_VALID_P (regno
);
1139 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1141 if (ent
->mode
!= GET_MODE (x
))
1145 if (modified
|| ! qty_valid
)
1148 for (classp
= classp
->first_same_value
;
1150 classp
= classp
->next_same_value
)
1151 if (REG_P (classp
->exp
)
1152 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1154 unsigned c_regno
= REGNO (classp
->exp
);
1156 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1158 /* Suppose that 5 is hard reg and 100 and 101 are
1161 (set (reg:si 100) (reg:si 5))
1162 (set (reg:si 5) (reg:si 100))
1163 (set (reg:di 101) (reg:di 5))
1165 We would now set REG_QTY (101) = REG_QTY (5), but the
1166 entry for 5 is in SImode. When we use this later in
1167 copy propagation, we get the register in wrong mode. */
1168 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1171 make_regs_eqv (regno
, c_regno
);
1175 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1176 than REG_IN_TABLE to find out if there was only a single preceding
1177 invalidation - for the SUBREG - or another one, which would be
1178 for the full register. However, if we find here that REG_TICK
1179 indicates that the register is invalid, it means that it has
1180 been invalidated in a separate operation. The SUBREG might be used
1181 now (then this is a recursive call), or we might use the full REG
1182 now and a SUBREG of it later. So bump up REG_TICK so that
1183 mention_regs will do the right thing. */
1185 && REG_IN_TABLE (regno
) >= 0
1186 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1188 make_new_qty (regno
, GET_MODE (x
));
1195 /* If X is a SUBREG, we will likely be inserting the inner register in the
1196 table. If that register doesn't have an assigned quantity number at
1197 this point but does later, the insertion that we will be doing now will
1198 not be accessible because its hash code will have changed. So assign
1199 a quantity number now. */
1201 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1202 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1204 insert_regs (SUBREG_REG (x
), NULL
, 0);
1209 return mention_regs (x
);
1212 /* Look in or update the hash table. */
1214 /* Remove table element ELT from use in the table.
1215 HASH is its hash code, made using the HASH macro.
1216 It's an argument because often that is known in advance
1217 and we save much time not recomputing it. */
1220 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1225 /* Mark this element as removed. See cse_insn. */
1226 elt
->first_same_value
= 0;
1228 /* Remove the table element from its equivalence class. */
1231 struct table_elt
*prev
= elt
->prev_same_value
;
1232 struct table_elt
*next
= elt
->next_same_value
;
1235 next
->prev_same_value
= prev
;
1238 prev
->next_same_value
= next
;
1241 struct table_elt
*newfirst
= next
;
1244 next
->first_same_value
= newfirst
;
1245 next
= next
->next_same_value
;
1250 /* Remove the table element from its hash bucket. */
1253 struct table_elt
*prev
= elt
->prev_same_hash
;
1254 struct table_elt
*next
= elt
->next_same_hash
;
1257 next
->prev_same_hash
= prev
;
1260 prev
->next_same_hash
= next
;
1261 else if (table
[hash
] == elt
)
1265 /* This entry is not in the proper hash bucket. This can happen
1266 when two classes were merged by `merge_equiv_classes'. Search
1267 for the hash bucket that it heads. This happens only very
1268 rarely, so the cost is acceptable. */
1269 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1270 if (table
[hash
] == elt
)
1275 /* Remove the table element from its related-value circular chain. */
1277 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1279 struct table_elt
*p
= elt
->related_value
;
1281 while (p
->related_value
!= elt
)
1282 p
= p
->related_value
;
1283 p
->related_value
= elt
->related_value
;
1284 if (p
->related_value
== p
)
1285 p
->related_value
= 0;
1288 /* Now add it to the free element chain. */
1289 elt
->next_same_hash
= free_element_chain
;
1290 free_element_chain
= elt
;
1293 /* Same as above, but X is a pseudo-register. */
1296 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1298 struct table_elt
*elt
;
1300 /* Because a pseudo-register can be referenced in more than one
1301 mode, we might have to remove more than one table entry. */
1302 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1303 remove_from_table (elt
, hash
);
1306 /* Look up X in the hash table and return its table element,
1307 or 0 if X is not in the table.
1309 MODE is the machine-mode of X, or if X is an integer constant
1310 with VOIDmode then MODE is the mode with which X will be used.
1312 Here we are satisfied to find an expression whose tree structure
1315 static struct table_elt
*
1316 lookup (rtx x
, unsigned int hash
, enum machine_mode mode
)
1318 struct table_elt
*p
;
1320 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1321 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1322 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1328 /* Like `lookup' but don't care whether the table element uses invalid regs.
1329 Also ignore discrepancies in the machine mode of a register. */
1331 static struct table_elt
*
1332 lookup_for_remove (rtx x
, unsigned int hash
, enum machine_mode mode
)
1334 struct table_elt
*p
;
1338 unsigned int regno
= REGNO (x
);
1340 /* Don't check the machine mode when comparing registers;
1341 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1342 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1344 && REGNO (p
->exp
) == regno
)
1349 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1351 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1358 /* Look for an expression equivalent to X and with code CODE.
1359 If one is found, return that expression. */
1362 lookup_as_function (rtx x
, enum rtx_code code
)
1365 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1367 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1368 long as we are narrowing. So if we looked in vain for a mode narrower
1369 than word_mode before, look for word_mode now. */
1370 if (p
== 0 && code
== CONST_INT
1371 && GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (word_mode
))
1374 PUT_MODE (x
, word_mode
);
1375 p
= lookup (x
, SAFE_HASH (x
, VOIDmode
), word_mode
);
1381 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1382 if (GET_CODE (p
->exp
) == code
1383 /* Make sure this is a valid entry in the table. */
1384 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1390 /* Insert X in the hash table, assuming HASH is its hash code
1391 and CLASSP is an element of the class it should go in
1392 (or 0 if a new class should be made).
1393 It is inserted at the proper position to keep the class in
1394 the order cheapest first.
1396 MODE is the machine-mode of X, or if X is an integer constant
1397 with VOIDmode then MODE is the mode with which X will be used.
1399 For elements of equal cheapness, the most recent one
1400 goes in front, except that the first element in the list
1401 remains first unless a cheaper element is added. The order of
1402 pseudo-registers does not matter, as canon_reg will be called to
1403 find the cheapest when a register is retrieved from the table.
1405 The in_memory field in the hash table element is set to 0.
1406 The caller must set it nonzero if appropriate.
1408 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1409 and if insert_regs returns a nonzero value
1410 you must then recompute its hash code before calling here.
1412 If necessary, update table showing constant values of quantities. */
1414 #define CHEAPER(X, Y) \
1415 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1417 static struct table_elt
*
1418 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
, enum machine_mode mode
)
1420 struct table_elt
*elt
;
1422 /* If X is a register and we haven't made a quantity for it,
1423 something is wrong. */
1424 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1426 /* If X is a hard register, show it is being put in the table. */
1427 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1428 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1430 /* Put an element for X into the right hash bucket. */
1432 elt
= free_element_chain
;
1434 free_element_chain
= elt
->next_same_hash
;
1436 elt
= XNEW (struct table_elt
);
1439 elt
->canon_exp
= NULL_RTX
;
1440 elt
->cost
= COST (x
);
1441 elt
->regcost
= approx_reg_cost (x
);
1442 elt
->next_same_value
= 0;
1443 elt
->prev_same_value
= 0;
1444 elt
->next_same_hash
= table
[hash
];
1445 elt
->prev_same_hash
= 0;
1446 elt
->related_value
= 0;
1449 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1452 table
[hash
]->prev_same_hash
= elt
;
1455 /* Put it into the proper value-class. */
1458 classp
= classp
->first_same_value
;
1459 if (CHEAPER (elt
, classp
))
1460 /* Insert at the head of the class. */
1462 struct table_elt
*p
;
1463 elt
->next_same_value
= classp
;
1464 classp
->prev_same_value
= elt
;
1465 elt
->first_same_value
= elt
;
1467 for (p
= classp
; p
; p
= p
->next_same_value
)
1468 p
->first_same_value
= elt
;
1472 /* Insert not at head of the class. */
1473 /* Put it after the last element cheaper than X. */
1474 struct table_elt
*p
, *next
;
1476 for (p
= classp
; (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1479 /* Put it after P and before NEXT. */
1480 elt
->next_same_value
= next
;
1482 next
->prev_same_value
= elt
;
1484 elt
->prev_same_value
= p
;
1485 p
->next_same_value
= elt
;
1486 elt
->first_same_value
= classp
;
1490 elt
->first_same_value
= elt
;
1492 /* If this is a constant being set equivalent to a register or a register
1493 being set equivalent to a constant, note the constant equivalence.
1495 If this is a constant, it cannot be equivalent to a different constant,
1496 and a constant is the only thing that can be cheaper than a register. So
1497 we know the register is the head of the class (before the constant was
1500 If this is a register that is not already known equivalent to a
1501 constant, we must check the entire class.
1503 If this is a register that is already known equivalent to an insn,
1504 update the qtys `const_insn' to show that `this_insn' is the latest
1505 insn making that quantity equivalent to the constant. */
1507 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1510 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1511 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1513 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1514 exp_ent
->const_insn
= this_insn
;
1519 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1522 struct table_elt
*p
;
1524 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1526 if (p
->is_const
&& !REG_P (p
->exp
))
1528 int x_q
= REG_QTY (REGNO (x
));
1529 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1532 = gen_lowpart (GET_MODE (x
), p
->exp
);
1533 x_ent
->const_insn
= this_insn
;
1540 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1541 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1542 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1544 /* If this is a constant with symbolic value,
1545 and it has a term with an explicit integer value,
1546 link it up with related expressions. */
1547 if (GET_CODE (x
) == CONST
)
1549 rtx subexp
= get_related_value (x
);
1551 struct table_elt
*subelt
, *subelt_prev
;
1555 /* Get the integer-free subexpression in the hash table. */
1556 subhash
= SAFE_HASH (subexp
, mode
);
1557 subelt
= lookup (subexp
, subhash
, mode
);
1559 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1560 /* Initialize SUBELT's circular chain if it has none. */
1561 if (subelt
->related_value
== 0)
1562 subelt
->related_value
= subelt
;
1563 /* Find the element in the circular chain that precedes SUBELT. */
1564 subelt_prev
= subelt
;
1565 while (subelt_prev
->related_value
!= subelt
)
1566 subelt_prev
= subelt_prev
->related_value
;
1567 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1568 This way the element that follows SUBELT is the oldest one. */
1569 elt
->related_value
= subelt_prev
->related_value
;
1570 subelt_prev
->related_value
= elt
;
1577 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1578 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1579 the two classes equivalent.
1581 CLASS1 will be the surviving class; CLASS2 should not be used after this
1584 Any invalid entries in CLASS2 will not be copied. */
1587 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1589 struct table_elt
*elt
, *next
, *new_elt
;
1591 /* Ensure we start with the head of the classes. */
1592 class1
= class1
->first_same_value
;
1593 class2
= class2
->first_same_value
;
1595 /* If they were already equal, forget it. */
1596 if (class1
== class2
)
1599 for (elt
= class2
; elt
; elt
= next
)
1603 enum machine_mode mode
= elt
->mode
;
1605 next
= elt
->next_same_value
;
1607 /* Remove old entry, make a new one in CLASS1's class.
1608 Don't do this for invalid entries as we cannot find their
1609 hash code (it also isn't necessary). */
1610 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1612 bool need_rehash
= false;
1614 hash_arg_in_memory
= 0;
1615 hash
= HASH (exp
, mode
);
1619 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1620 delete_reg_equiv (REGNO (exp
));
1623 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1624 remove_pseudo_from_table (exp
, hash
);
1626 remove_from_table (elt
, hash
);
1628 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1630 rehash_using_reg (exp
);
1631 hash
= HASH (exp
, mode
);
1633 new_elt
= insert (exp
, class1
, hash
, mode
);
1634 new_elt
->in_memory
= hash_arg_in_memory
;
1639 /* Flush the entire hash table. */
1642 flush_hash_table (void)
1645 struct table_elt
*p
;
1647 for (i
= 0; i
< HASH_SIZE
; i
++)
1648 for (p
= table
[i
]; p
; p
= table
[i
])
1650 /* Note that invalidate can remove elements
1651 after P in the current hash chain. */
1653 invalidate (p
->exp
, VOIDmode
);
1655 remove_from_table (p
, i
);
1659 /* Function called for each rtx to check whether true dependence exist. */
1660 struct check_dependence_data
1662 enum machine_mode mode
;
1668 check_dependence (rtx
*x
, void *data
)
1670 struct check_dependence_data
*d
= (struct check_dependence_data
*) data
;
1671 if (*x
&& MEM_P (*x
))
1672 return canon_true_dependence (d
->exp
, d
->mode
, d
->addr
, *x
,
1678 /* Remove from the hash table, or mark as invalid, all expressions whose
1679 values could be altered by storing in X. X is a register, a subreg, or
1680 a memory reference with nonvarying address (because, when a memory
1681 reference with a varying address is stored in, all memory references are
1682 removed by invalidate_memory so specific invalidation is superfluous).
1683 FULL_MODE, if not VOIDmode, indicates that this much should be
1684 invalidated instead of just the amount indicated by the mode of X. This
1685 is only used for bitfield stores into memory.
1687 A nonvarying address may be just a register or just a symbol reference,
1688 or it may be either of those plus a numeric offset. */
1691 invalidate (rtx x
, enum machine_mode full_mode
)
1694 struct table_elt
*p
;
1697 switch (GET_CODE (x
))
1701 /* If X is a register, dependencies on its contents are recorded
1702 through the qty number mechanism. Just change the qty number of
1703 the register, mark it as invalid for expressions that refer to it,
1704 and remove it itself. */
1705 unsigned int regno
= REGNO (x
);
1706 unsigned int hash
= HASH (x
, GET_MODE (x
));
1708 /* Remove REGNO from any quantity list it might be on and indicate
1709 that its value might have changed. If it is a pseudo, remove its
1710 entry from the hash table.
1712 For a hard register, we do the first two actions above for any
1713 additional hard registers corresponding to X. Then, if any of these
1714 registers are in the table, we must remove any REG entries that
1715 overlap these registers. */
1717 delete_reg_equiv (regno
);
1719 SUBREG_TICKED (regno
) = -1;
1721 if (regno
>= FIRST_PSEUDO_REGISTER
)
1722 remove_pseudo_from_table (x
, hash
);
1725 HOST_WIDE_INT in_table
1726 = TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1727 unsigned int endregno
= END_HARD_REGNO (x
);
1728 unsigned int tregno
, tendregno
, rn
;
1729 struct table_elt
*p
, *next
;
1731 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1733 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1735 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1736 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1737 delete_reg_equiv (rn
);
1739 SUBREG_TICKED (rn
) = -1;
1743 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1744 for (p
= table
[hash
]; p
; p
= next
)
1746 next
= p
->next_same_hash
;
1749 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1752 tregno
= REGNO (p
->exp
);
1753 tendregno
= END_HARD_REGNO (p
->exp
);
1754 if (tendregno
> regno
&& tregno
< endregno
)
1755 remove_from_table (p
, hash
);
1762 invalidate (SUBREG_REG (x
), VOIDmode
);
1766 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1767 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1771 /* This is part of a disjoint return value; extract the location in
1772 question ignoring the offset. */
1773 invalidate (XEXP (x
, 0), VOIDmode
);
1777 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1778 /* Calculate the canonical version of X here so that
1779 true_dependence doesn't generate new RTL for X on each call. */
1782 /* Remove all hash table elements that refer to overlapping pieces of
1784 if (full_mode
== VOIDmode
)
1785 full_mode
= GET_MODE (x
);
1787 for (i
= 0; i
< HASH_SIZE
; i
++)
1789 struct table_elt
*next
;
1791 for (p
= table
[i
]; p
; p
= next
)
1793 next
= p
->next_same_hash
;
1796 struct check_dependence_data d
;
1798 /* Just canonicalize the expression once;
1799 otherwise each time we call invalidate
1800 true_dependence will canonicalize the
1801 expression again. */
1803 p
->canon_exp
= canon_rtx (p
->exp
);
1807 if (for_each_rtx (&p
->canon_exp
, check_dependence
, &d
))
1808 remove_from_table (p
, i
);
1819 /* Remove all expressions that refer to register REGNO,
1820 since they are already invalid, and we are about to
1821 mark that register valid again and don't want the old
1822 expressions to reappear as valid. */
1825 remove_invalid_refs (unsigned int regno
)
1828 struct table_elt
*p
, *next
;
1830 for (i
= 0; i
< HASH_SIZE
; i
++)
1831 for (p
= table
[i
]; p
; p
= next
)
1833 next
= p
->next_same_hash
;
1835 && refers_to_regno_p (regno
, regno
+ 1, p
->exp
, (rtx
*) 0))
1836 remove_from_table (p
, i
);
1840 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1843 remove_invalid_subreg_refs (unsigned int regno
, unsigned int offset
,
1844 enum machine_mode mode
)
1847 struct table_elt
*p
, *next
;
1848 unsigned int end
= offset
+ (GET_MODE_SIZE (mode
) - 1);
1850 for (i
= 0; i
< HASH_SIZE
; i
++)
1851 for (p
= table
[i
]; p
; p
= next
)
1854 next
= p
->next_same_hash
;
1857 && (GET_CODE (exp
) != SUBREG
1858 || !REG_P (SUBREG_REG (exp
))
1859 || REGNO (SUBREG_REG (exp
)) != regno
1860 || (((SUBREG_BYTE (exp
)
1861 + (GET_MODE_SIZE (GET_MODE (exp
)) - 1)) >= offset
)
1862 && SUBREG_BYTE (exp
) <= end
))
1863 && refers_to_regno_p (regno
, regno
+ 1, p
->exp
, (rtx
*) 0))
1864 remove_from_table (p
, i
);
1868 /* Recompute the hash codes of any valid entries in the hash table that
1869 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1871 This is called when we make a jump equivalence. */
1874 rehash_using_reg (rtx x
)
1877 struct table_elt
*p
, *next
;
1880 if (GET_CODE (x
) == SUBREG
)
1883 /* If X is not a register or if the register is known not to be in any
1884 valid entries in the table, we have no work to do. */
1887 || REG_IN_TABLE (REGNO (x
)) < 0
1888 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
1891 /* Scan all hash chains looking for valid entries that mention X.
1892 If we find one and it is in the wrong hash chain, move it. */
1894 for (i
= 0; i
< HASH_SIZE
; i
++)
1895 for (p
= table
[i
]; p
; p
= next
)
1897 next
= p
->next_same_hash
;
1898 if (reg_mentioned_p (x
, p
->exp
)
1899 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
1900 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
1902 if (p
->next_same_hash
)
1903 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
1905 if (p
->prev_same_hash
)
1906 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
1908 table
[i
] = p
->next_same_hash
;
1910 p
->next_same_hash
= table
[hash
];
1911 p
->prev_same_hash
= 0;
1913 table
[hash
]->prev_same_hash
= p
;
1919 /* Remove from the hash table any expression that is a call-clobbered
1920 register. Also update their TICK values. */
1923 invalidate_for_call (void)
1925 unsigned int regno
, endregno
;
1928 struct table_elt
*p
, *next
;
1931 /* Go through all the hard registers. For each that is clobbered in
1932 a CALL_INSN, remove the register from quantity chains and update
1933 reg_tick if defined. Also see if any of these registers is currently
1936 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1937 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
1939 delete_reg_equiv (regno
);
1940 if (REG_TICK (regno
) >= 0)
1943 SUBREG_TICKED (regno
) = -1;
1946 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
1949 /* In the case where we have no call-clobbered hard registers in the
1950 table, we are done. Otherwise, scan the table and remove any
1951 entry that overlaps a call-clobbered register. */
1954 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1955 for (p
= table
[hash
]; p
; p
= next
)
1957 next
= p
->next_same_hash
;
1960 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1963 regno
= REGNO (p
->exp
);
1964 endregno
= END_HARD_REGNO (p
->exp
);
1966 for (i
= regno
; i
< endregno
; i
++)
1967 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1969 remove_from_table (p
, hash
);
1975 /* Given an expression X of type CONST,
1976 and ELT which is its table entry (or 0 if it
1977 is not in the hash table),
1978 return an alternate expression for X as a register plus integer.
1979 If none can be found, return 0. */
1982 use_related_value (rtx x
, struct table_elt
*elt
)
1984 struct table_elt
*relt
= 0;
1985 struct table_elt
*p
, *q
;
1986 HOST_WIDE_INT offset
;
1988 /* First, is there anything related known?
1989 If we have a table element, we can tell from that.
1990 Otherwise, must look it up. */
1992 if (elt
!= 0 && elt
->related_value
!= 0)
1994 else if (elt
== 0 && GET_CODE (x
) == CONST
)
1996 rtx subexp
= get_related_value (x
);
1998 relt
= lookup (subexp
,
1999 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2006 /* Search all related table entries for one that has an
2007 equivalent register. */
2012 /* This loop is strange in that it is executed in two different cases.
2013 The first is when X is already in the table. Then it is searching
2014 the RELATED_VALUE list of X's class (RELT). The second case is when
2015 X is not in the table. Then RELT points to a class for the related
2018 Ensure that, whatever case we are in, that we ignore classes that have
2019 the same value as X. */
2021 if (rtx_equal_p (x
, p
->exp
))
2024 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2031 p
= p
->related_value
;
2033 /* We went all the way around, so there is nothing to be found.
2034 Alternatively, perhaps RELT was in the table for some other reason
2035 and it has no related values recorded. */
2036 if (p
== relt
|| p
== 0)
2043 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2044 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2045 return plus_constant (q
->exp
, offset
);
2049 /* Hash a string. Just add its bytes up. */
2050 static inline unsigned
2051 hash_rtx_string (const char *ps
)
2054 const unsigned char *p
= (const unsigned char *) ps
;
2063 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2064 When the callback returns true, we continue with the new rtx. */
2067 hash_rtx_cb (const_rtx x
, enum machine_mode mode
,
2068 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2069 bool have_reg_qty
, hash_rtx_callback_function cb
)
2075 enum machine_mode newmode
;
2078 /* Used to turn recursion into iteration. We can't rely on GCC's
2079 tail-recursion elimination since we need to keep accumulating values
2085 /* Invoke the callback first. */
2087 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2089 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2090 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2094 code
= GET_CODE (x
);
2099 unsigned int regno
= REGNO (x
);
2101 if (do_not_record_p
&& !reload_completed
)
2103 /* On some machines, we can't record any non-fixed hard register,
2104 because extending its life will cause reload problems. We
2105 consider ap, fp, sp, gp to be fixed for this purpose.
2107 We also consider CCmode registers to be fixed for this purpose;
2108 failure to do so leads to failure to simplify 0<100 type of
2111 On all machines, we can't record any global registers.
2112 Nor should we record any register that is in a small
2113 class, as defined by CLASS_LIKELY_SPILLED_P. */
2116 if (regno
>= FIRST_PSEUDO_REGISTER
)
2118 else if (x
== frame_pointer_rtx
2119 || x
== hard_frame_pointer_rtx
2120 || x
== arg_pointer_rtx
2121 || x
== stack_pointer_rtx
2122 || x
== pic_offset_table_rtx
)
2124 else if (global_regs
[regno
])
2126 else if (fixed_regs
[regno
])
2128 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2130 else if (SMALL_REGISTER_CLASSES
)
2132 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno
)))
2139 *do_not_record_p
= 1;
2144 hash
+= ((unsigned int) REG
<< 7);
2145 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2149 /* We handle SUBREG of a REG specially because the underlying
2150 reg changes its hash value with every value change; we don't
2151 want to have to forget unrelated subregs when one subreg changes. */
2154 if (REG_P (SUBREG_REG (x
)))
2156 hash
+= (((unsigned int) SUBREG
<< 7)
2157 + REGNO (SUBREG_REG (x
))
2158 + (SUBREG_BYTE (x
) / UNITS_PER_WORD
));
2165 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2166 + (unsigned int) INTVAL (x
));
2170 /* This is like the general case, except that it only counts
2171 the integers representing the constant. */
2172 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2173 if (GET_MODE (x
) != VOIDmode
)
2174 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2176 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2177 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2181 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2182 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2190 units
= CONST_VECTOR_NUNITS (x
);
2192 for (i
= 0; i
< units
; ++i
)
2194 elt
= CONST_VECTOR_ELT (x
, i
);
2195 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2196 do_not_record_p
, hash_arg_in_memory_p
,
2203 /* Assume there is only one rtx object for any given label. */
2205 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2206 differences and differences between each stage's debugging dumps. */
2207 hash
+= (((unsigned int) LABEL_REF
<< 7)
2208 + CODE_LABEL_NUMBER (XEXP (x
, 0)));
2213 /* Don't hash on the symbol's address to avoid bootstrap differences.
2214 Different hash values may cause expressions to be recorded in
2215 different orders and thus different registers to be used in the
2216 final assembler. This also avoids differences in the dump files
2217 between various stages. */
2219 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2222 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2224 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2229 /* We don't record if marked volatile or if BLKmode since we don't
2230 know the size of the move. */
2231 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2233 *do_not_record_p
= 1;
2236 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2237 *hash_arg_in_memory_p
= 1;
2239 /* Now that we have already found this special case,
2240 might as well speed it up as much as possible. */
2241 hash
+= (unsigned) MEM
;
2246 /* A USE that mentions non-volatile memory needs special
2247 handling since the MEM may be BLKmode which normally
2248 prevents an entry from being made. Pure calls are
2249 marked by a USE which mentions BLKmode memory.
2250 See calls.c:emit_call_1. */
2251 if (MEM_P (XEXP (x
, 0))
2252 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2254 hash
+= (unsigned) USE
;
2257 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2258 *hash_arg_in_memory_p
= 1;
2260 /* Now that we have already found this special case,
2261 might as well speed it up as much as possible. */
2262 hash
+= (unsigned) MEM
;
2277 case UNSPEC_VOLATILE
:
2278 if (do_not_record_p
) {
2279 *do_not_record_p
= 1;
2287 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2289 *do_not_record_p
= 1;
2294 /* We don't want to take the filename and line into account. */
2295 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2296 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2297 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2298 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2300 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2302 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2304 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2305 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2306 do_not_record_p
, hash_arg_in_memory_p
,
2309 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2312 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2313 x
= ASM_OPERANDS_INPUT (x
, 0);
2314 mode
= GET_MODE (x
);
2326 i
= GET_RTX_LENGTH (code
) - 1;
2327 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2328 fmt
= GET_RTX_FORMAT (code
);
2334 /* If we are about to do the last recursive call
2335 needed at this level, change it into iteration.
2336 This function is called enough to be worth it. */
2343 hash
+= hash_rtx_cb (XEXP (x
, i
), 0, do_not_record_p
,
2344 hash_arg_in_memory_p
,
2349 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2350 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), 0, do_not_record_p
,
2351 hash_arg_in_memory_p
,
2356 hash
+= hash_rtx_string (XSTR (x
, i
));
2360 hash
+= (unsigned int) XINT (x
, i
);
2375 /* Hash an rtx. We are careful to make sure the value is never negative.
2376 Equivalent registers hash identically.
2377 MODE is used in hashing for CONST_INTs only;
2378 otherwise the mode of X is used.
2380 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2382 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2383 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2385 Note that cse_insn knows that the hash code of a MEM expression
2386 is just (int) MEM plus the hash code of the address. */
2389 hash_rtx (const_rtx x
, enum machine_mode mode
, int *do_not_record_p
,
2390 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2392 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2393 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2396 /* Hash an rtx X for cse via hash_rtx.
2397 Stores 1 in do_not_record if any subexpression is volatile.
2398 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2399 does not have the RTX_UNCHANGING_P bit set. */
2401 static inline unsigned
2402 canon_hash (rtx x
, enum machine_mode mode
)
2404 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2407 /* Like canon_hash but with no side effects, i.e. do_not_record
2408 and hash_arg_in_memory are not changed. */
2410 static inline unsigned
2411 safe_hash (rtx x
, enum machine_mode mode
)
2413 int dummy_do_not_record
;
2414 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2417 /* Return 1 iff X and Y would canonicalize into the same thing,
2418 without actually constructing the canonicalization of either one.
2419 If VALIDATE is nonzero,
2420 we assume X is an expression being processed from the rtl
2421 and Y was found in the hash table. We check register refs
2422 in Y for being marked as valid.
2424 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2427 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2433 /* Note: it is incorrect to assume an expression is equivalent to itself
2434 if VALIDATE is nonzero. */
2435 if (x
== y
&& !validate
)
2438 if (x
== 0 || y
== 0)
2441 code
= GET_CODE (x
);
2442 if (code
!= GET_CODE (y
))
2445 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2446 if (GET_MODE (x
) != GET_MODE (y
))
2459 return XEXP (x
, 0) == XEXP (y
, 0);
2462 return XSTR (x
, 0) == XSTR (y
, 0);
2466 return REGNO (x
) == REGNO (y
);
2469 unsigned int regno
= REGNO (y
);
2471 unsigned int endregno
= END_REGNO (y
);
2473 /* If the quantities are not the same, the expressions are not
2474 equivalent. If there are and we are not to validate, they
2475 are equivalent. Otherwise, ensure all regs are up-to-date. */
2477 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2483 for (i
= regno
; i
< endregno
; i
++)
2484 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2493 /* A volatile mem should not be considered equivalent to any
2495 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2498 /* Can't merge two expressions in different alias sets, since we
2499 can decide that the expression is transparent in a block when
2500 it isn't, due to it being set with the different alias set.
2502 Also, can't merge two expressions with different MEM_ATTRS.
2503 They could e.g. be two different entities allocated into the
2504 same space on the stack (see e.g. PR25130). In that case, the
2505 MEM addresses can be the same, even though the two MEMs are
2506 absolutely not equivalent.
2508 But because really all MEM attributes should be the same for
2509 equivalent MEMs, we just use the invariant that MEMs that have
2510 the same attributes share the same mem_attrs data structure. */
2511 if (MEM_ATTRS (x
) != MEM_ATTRS (y
))
2516 /* For commutative operations, check both orders. */
2524 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2526 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2527 validate
, for_gcse
))
2528 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2530 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2531 validate
, for_gcse
)));
2534 /* We don't use the generic code below because we want to
2535 disregard filename and line numbers. */
2537 /* A volatile asm isn't equivalent to any other. */
2538 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2541 if (GET_MODE (x
) != GET_MODE (y
)
2542 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2543 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2544 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2545 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2546 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2549 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2551 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2552 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2553 ASM_OPERANDS_INPUT (y
, i
),
2555 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2556 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2566 /* Compare the elements. If any pair of corresponding elements
2567 fail to match, return 0 for the whole thing. */
2569 fmt
= GET_RTX_FORMAT (code
);
2570 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2575 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2576 validate
, for_gcse
))
2581 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2583 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2584 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2585 validate
, for_gcse
))
2590 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2595 if (XINT (x
, i
) != XINT (y
, i
))
2600 if (XWINT (x
, i
) != XWINT (y
, i
))
2616 /* Return 1 if X has a value that can vary even between two
2617 executions of the program. 0 means X can be compared reliably
2618 against certain constants or near-constants. */
2621 cse_rtx_varies_p (const_rtx x
, bool from_alias
)
2623 /* We need not check for X and the equivalence class being of the same
2624 mode because if X is equivalent to a constant in some mode, it
2625 doesn't vary in any mode. */
2628 && REGNO_QTY_VALID_P (REGNO (x
)))
2630 int x_q
= REG_QTY (REGNO (x
));
2631 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
2633 if (GET_MODE (x
) == x_ent
->mode
2634 && x_ent
->const_rtx
!= NULL_RTX
)
2638 if (GET_CODE (x
) == PLUS
2639 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2640 && REG_P (XEXP (x
, 0))
2641 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
2643 int x0_q
= REG_QTY (REGNO (XEXP (x
, 0)));
2644 struct qty_table_elem
*x0_ent
= &qty_table
[x0_q
];
2646 if ((GET_MODE (XEXP (x
, 0)) == x0_ent
->mode
)
2647 && x0_ent
->const_rtx
!= NULL_RTX
)
2651 /* This can happen as the result of virtual register instantiation, if
2652 the initial constant is too large to be a valid address. This gives
2653 us a three instruction sequence, load large offset into a register,
2654 load fp minus a constant into a register, then a MEM which is the
2655 sum of the two `constant' registers. */
2656 if (GET_CODE (x
) == PLUS
2657 && REG_P (XEXP (x
, 0))
2658 && REG_P (XEXP (x
, 1))
2659 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0)))
2660 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
2662 int x0_q
= REG_QTY (REGNO (XEXP (x
, 0)));
2663 int x1_q
= REG_QTY (REGNO (XEXP (x
, 1)));
2664 struct qty_table_elem
*x0_ent
= &qty_table
[x0_q
];
2665 struct qty_table_elem
*x1_ent
= &qty_table
[x1_q
];
2667 if ((GET_MODE (XEXP (x
, 0)) == x0_ent
->mode
)
2668 && x0_ent
->const_rtx
!= NULL_RTX
2669 && (GET_MODE (XEXP (x
, 1)) == x1_ent
->mode
)
2670 && x1_ent
->const_rtx
!= NULL_RTX
)
2674 return rtx_varies_p (x
, from_alias
);
2677 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2678 the result if necessary. INSN is as for canon_reg. */
2681 validate_canon_reg (rtx
*xloc
, rtx insn
)
2685 rtx new_rtx
= canon_reg (*xloc
, insn
);
2687 /* If replacing pseudo with hard reg or vice versa, ensure the
2688 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2689 gcc_assert (insn
&& new_rtx
);
2690 validate_change (insn
, xloc
, new_rtx
, 1);
2694 /* Canonicalize an expression:
2695 replace each register reference inside it
2696 with the "oldest" equivalent register.
2698 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2699 after we make our substitution. The calls are made with IN_GROUP nonzero
2700 so apply_change_group must be called upon the outermost return from this
2701 function (unless INSN is zero). The result of apply_change_group can
2702 generally be discarded since the changes we are making are optional. */
2705 canon_reg (rtx x
, rtx insn
)
2714 code
= GET_CODE (x
);
2734 struct qty_table_elem
*ent
;
2736 /* Never replace a hard reg, because hard regs can appear
2737 in more than one machine mode, and we must preserve the mode
2738 of each occurrence. Also, some hard regs appear in
2739 MEMs that are shared and mustn't be altered. Don't try to
2740 replace any reg that maps to a reg of class NO_REGS. */
2741 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2742 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2745 q
= REG_QTY (REGNO (x
));
2746 ent
= &qty_table
[q
];
2747 first
= ent
->first_reg
;
2748 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2749 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2750 : gen_rtx_REG (ent
->mode
, first
));
2757 fmt
= GET_RTX_FORMAT (code
);
2758 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2763 validate_canon_reg (&XEXP (x
, i
), insn
);
2764 else if (fmt
[i
] == 'E')
2765 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2766 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2772 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2773 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2774 what values are being compared.
2776 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2777 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2778 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2779 compared to produce cc0.
2781 The return value is the comparison operator and is either the code of
2782 A or the code corresponding to the inverse of the comparison. */
2784 static enum rtx_code
2785 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2786 enum machine_mode
*pmode1
, enum machine_mode
*pmode2
)
2790 arg1
= *parg1
, arg2
= *parg2
;
2792 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2794 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2796 /* Set nonzero when we find something of interest. */
2798 int reverse_code
= 0;
2799 struct table_elt
*p
= 0;
2801 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2802 On machines with CC0, this is the only case that can occur, since
2803 fold_rtx will return the COMPARE or item being compared with zero
2806 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2809 /* If ARG1 is a comparison operator and CODE is testing for
2810 STORE_FLAG_VALUE, get the inner arguments. */
2812 else if (COMPARISON_P (arg1
))
2814 #ifdef FLOAT_STORE_FLAG_VALUE
2815 REAL_VALUE_TYPE fsfv
;
2819 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2820 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2821 #ifdef FLOAT_STORE_FLAG_VALUE
2822 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2823 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2824 REAL_VALUE_NEGATIVE (fsfv
)))
2829 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2830 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2831 #ifdef FLOAT_STORE_FLAG_VALUE
2832 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2833 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2834 REAL_VALUE_NEGATIVE (fsfv
)))
2837 x
= arg1
, reverse_code
= 1;
2840 /* ??? We could also check for
2842 (ne (and (eq (...) (const_int 1))) (const_int 0))
2844 and related forms, but let's wait until we see them occurring. */
2847 /* Look up ARG1 in the hash table and see if it has an equivalence
2848 that lets us see what is being compared. */
2849 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
2852 p
= p
->first_same_value
;
2854 /* If what we compare is already known to be constant, that is as
2856 We need to break the loop in this case, because otherwise we
2857 can have an infinite loop when looking at a reg that is known
2858 to be a constant which is the same as a comparison of a reg
2859 against zero which appears later in the insn stream, which in
2860 turn is constant and the same as the comparison of the first reg
2866 for (; p
; p
= p
->next_same_value
)
2868 enum machine_mode inner_mode
= GET_MODE (p
->exp
);
2869 #ifdef FLOAT_STORE_FLAG_VALUE
2870 REAL_VALUE_TYPE fsfv
;
2873 /* If the entry isn't valid, skip it. */
2874 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
2877 if (GET_CODE (p
->exp
) == COMPARE
2878 /* Another possibility is that this machine has a compare insn
2879 that includes the comparison code. In that case, ARG1 would
2880 be equivalent to a comparison operation that would set ARG1 to
2881 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2882 ORIG_CODE is the actual comparison being done; if it is an EQ,
2883 we must reverse ORIG_CODE. On machine with a negative value
2884 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2887 && GET_MODE_CLASS (inner_mode
) == MODE_INT
2888 && (GET_MODE_BITSIZE (inner_mode
)
2889 <= HOST_BITS_PER_WIDE_INT
)
2890 && (STORE_FLAG_VALUE
2891 & ((HOST_WIDE_INT
) 1
2892 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
2893 #ifdef FLOAT_STORE_FLAG_VALUE
2895 && SCALAR_FLOAT_MODE_P (inner_mode
)
2896 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2897 REAL_VALUE_NEGATIVE (fsfv
)))
2900 && COMPARISON_P (p
->exp
)))
2905 else if ((code
== EQ
2907 && GET_MODE_CLASS (inner_mode
) == MODE_INT
2908 && (GET_MODE_BITSIZE (inner_mode
)
2909 <= HOST_BITS_PER_WIDE_INT
)
2910 && (STORE_FLAG_VALUE
2911 & ((HOST_WIDE_INT
) 1
2912 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
2913 #ifdef FLOAT_STORE_FLAG_VALUE
2915 && SCALAR_FLOAT_MODE_P (inner_mode
)
2916 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2917 REAL_VALUE_NEGATIVE (fsfv
)))
2920 && COMPARISON_P (p
->exp
))
2927 /* If this non-trapping address, e.g. fp + constant, the
2928 equivalent is a better operand since it may let us predict
2929 the value of the comparison. */
2930 else if (!rtx_addr_can_trap_p (p
->exp
))
2937 /* If we didn't find a useful equivalence for ARG1, we are done.
2938 Otherwise, set up for the next iteration. */
2942 /* If we need to reverse the comparison, make sure that that is
2943 possible -- we can't necessarily infer the value of GE from LT
2944 with floating-point operands. */
2947 enum rtx_code reversed
= reversed_comparison_code (x
, NULL_RTX
);
2948 if (reversed
== UNKNOWN
)
2953 else if (COMPARISON_P (x
))
2954 code
= GET_CODE (x
);
2955 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
2958 /* Return our results. Return the modes from before fold_rtx
2959 because fold_rtx might produce const_int, and then it's too late. */
2960 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
2961 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
2966 /* If X is a nontrivial arithmetic operation on an argument for which
2967 a constant value can be determined, return the result of operating
2968 on that value, as a constant. Otherwise, return X, possibly with
2969 one or more operands changed to a forward-propagated constant.
2971 If X is a register whose contents are known, we do NOT return
2972 those contents here; equiv_constant is called to perform that task.
2973 For SUBREGs and MEMs, we do that both here and in equiv_constant.
2975 INSN is the insn that we may be modifying. If it is 0, make a copy
2976 of X before modifying it. */
2979 fold_rtx (rtx x
, rtx insn
)
2982 enum machine_mode mode
;
2988 /* Operands of X. */
2992 /* Constant equivalents of first three operands of X;
2993 0 when no such equivalent is known. */
2998 /* The mode of the first operand of X. We need this for sign and zero
3000 enum machine_mode mode_arg0
;
3005 /* Try to perform some initial simplifications on X. */
3006 code
= GET_CODE (x
);
3011 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3024 /* No use simplifying an EXPR_LIST
3025 since they are used only for lists of args
3026 in a function call's REG_EQUAL note. */
3032 return prev_insn_cc0
;
3038 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3039 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3040 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3044 #ifdef NO_FUNCTION_CSE
3046 if (CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3051 /* Anything else goes through the loop below. */
3056 mode
= GET_MODE (x
);
3060 mode_arg0
= VOIDmode
;
3062 /* Try folding our operands.
3063 Then see which ones have constant values known. */
3065 fmt
= GET_RTX_FORMAT (code
);
3066 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3069 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3070 enum machine_mode mode_arg
= GET_MODE (folded_arg
);
3072 switch (GET_CODE (folded_arg
))
3077 const_arg
= equiv_constant (folded_arg
);
3087 const_arg
= folded_arg
;
3092 folded_arg
= prev_insn_cc0
;
3093 mode_arg
= prev_insn_cc0_mode
;
3094 const_arg
= equiv_constant (folded_arg
);
3099 folded_arg
= fold_rtx (folded_arg
, insn
);
3100 const_arg
= equiv_constant (folded_arg
);
3104 /* For the first three operands, see if the operand
3105 is constant or equivalent to a constant. */
3109 folded_arg0
= folded_arg
;
3110 const_arg0
= const_arg
;
3111 mode_arg0
= mode_arg
;
3114 folded_arg1
= folded_arg
;
3115 const_arg1
= const_arg
;
3118 const_arg2
= const_arg
;
3122 /* Pick the least expensive of the argument and an equivalent constant
3125 && const_arg
!= folded_arg
3126 && COST_IN (const_arg
, code
) <= COST_IN (folded_arg
, code
)
3128 /* It's not safe to substitute the operand of a conversion
3129 operator with a constant, as the conversion's identity
3130 depends upon the mode of its operand. This optimization
3131 is handled by the call to simplify_unary_operation. */
3132 && (GET_RTX_CLASS (code
) != RTX_UNARY
3133 || GET_MODE (const_arg
) == mode_arg0
3134 || (code
!= ZERO_EXTEND
3135 && code
!= SIGN_EXTEND
3137 && code
!= FLOAT_TRUNCATE
3138 && code
!= FLOAT_EXTEND
3141 && code
!= UNSIGNED_FLOAT
3142 && code
!= UNSIGNED_FIX
)))
3143 folded_arg
= const_arg
;
3145 if (folded_arg
== XEXP (x
, i
))
3148 if (insn
== NULL_RTX
&& !changed
)
3151 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3156 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3157 consistent with the order in X. */
3158 if (canonicalize_change_group (insn
, x
))
3161 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
3162 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
3165 apply_change_group ();
3168 /* If X is an arithmetic operation, see if we can simplify it. */
3170 switch (GET_RTX_CLASS (code
))
3174 /* We can't simplify extension ops unless we know the
3176 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3177 && mode_arg0
== VOIDmode
)
3180 new_rtx
= simplify_unary_operation (code
, mode
,
3181 const_arg0
? const_arg0
: folded_arg0
,
3187 case RTX_COMM_COMPARE
:
3188 /* See what items are actually being compared and set FOLDED_ARG[01]
3189 to those values and CODE to the actual comparison code. If any are
3190 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3191 do anything if both operands are already known to be constant. */
3193 /* ??? Vector mode comparisons are not supported yet. */
3194 if (VECTOR_MODE_P (mode
))
3197 if (const_arg0
== 0 || const_arg1
== 0)
3199 struct table_elt
*p0
, *p1
;
3200 rtx true_rtx
, false_rtx
;
3201 enum machine_mode mode_arg1
;
3203 if (SCALAR_FLOAT_MODE_P (mode
))
3205 #ifdef FLOAT_STORE_FLAG_VALUE
3206 true_rtx
= (CONST_DOUBLE_FROM_REAL_VALUE
3207 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3209 true_rtx
= NULL_RTX
;
3211 false_rtx
= CONST0_RTX (mode
);
3215 true_rtx
= const_true_rtx
;
3216 false_rtx
= const0_rtx
;
3219 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3220 &mode_arg0
, &mode_arg1
);
3222 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3223 what kinds of things are being compared, so we can't do
3224 anything with this comparison. */
3226 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3229 const_arg0
= equiv_constant (folded_arg0
);
3230 const_arg1
= equiv_constant (folded_arg1
);
3232 /* If we do not now have two constants being compared, see
3233 if we can nevertheless deduce some things about the
3235 if (const_arg0
== 0 || const_arg1
== 0)
3237 if (const_arg1
!= NULL
)
3239 rtx cheapest_simplification
;
3242 struct table_elt
*p
;
3244 /* See if we can find an equivalent of folded_arg0
3245 that gets us a cheaper expression, possibly a
3246 constant through simplifications. */
3247 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3252 cheapest_simplification
= x
;
3253 cheapest_cost
= COST (x
);
3255 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3259 /* If the entry isn't valid, skip it. */
3260 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3263 /* Try to simplify using this equivalence. */
3265 = simplify_relational_operation (code
, mode
,
3270 if (simp_result
== NULL
)
3273 cost
= COST (simp_result
);
3274 if (cost
< cheapest_cost
)
3276 cheapest_cost
= cost
;
3277 cheapest_simplification
= simp_result
;
3281 /* If we have a cheaper expression now, use that
3282 and try folding it further, from the top. */
3283 if (cheapest_simplification
!= x
)
3284 return fold_rtx (copy_rtx (cheapest_simplification
),
3289 /* See if the two operands are the same. */
3291 if ((REG_P (folded_arg0
)
3292 && REG_P (folded_arg1
)
3293 && (REG_QTY (REGNO (folded_arg0
))
3294 == REG_QTY (REGNO (folded_arg1
))))
3295 || ((p0
= lookup (folded_arg0
,
3296 SAFE_HASH (folded_arg0
, mode_arg0
),
3298 && (p1
= lookup (folded_arg1
,
3299 SAFE_HASH (folded_arg1
, mode_arg0
),
3301 && p0
->first_same_value
== p1
->first_same_value
))
3302 folded_arg1
= folded_arg0
;
3304 /* If FOLDED_ARG0 is a register, see if the comparison we are
3305 doing now is either the same as we did before or the reverse
3306 (we only check the reverse if not floating-point). */
3307 else if (REG_P (folded_arg0
))
3309 int qty
= REG_QTY (REGNO (folded_arg0
));
3311 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3313 struct qty_table_elem
*ent
= &qty_table
[qty
];
3315 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3316 || (! FLOAT_MODE_P (mode_arg0
)
3317 && comparison_dominates_p (ent
->comparison_code
,
3318 reverse_condition (code
))))
3319 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3321 && rtx_equal_p (ent
->comparison_const
,
3323 || (REG_P (folded_arg1
)
3324 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3326 if (comparison_dominates_p (ent
->comparison_code
, code
))
3341 /* If we are comparing against zero, see if the first operand is
3342 equivalent to an IOR with a constant. If so, we may be able to
3343 determine the result of this comparison. */
3344 if (const_arg1
== const0_rtx
&& !const_arg0
)
3346 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3350 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3351 && GET_CODE (inner_const
) == CONST_INT
3352 && INTVAL (inner_const
) != 0)
3353 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3357 rtx op0
= const_arg0
? const_arg0
: folded_arg0
;
3358 rtx op1
= const_arg1
? const_arg1
: folded_arg1
;
3359 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
, op0
, op1
);
3364 case RTX_COMM_ARITH
:
3368 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3369 with that LABEL_REF as its second operand. If so, the result is
3370 the first operand of that MINUS. This handles switches with an
3371 ADDR_DIFF_VEC table. */
3372 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3375 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3376 : lookup_as_function (folded_arg0
, MINUS
);
3378 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3379 && XEXP (XEXP (y
, 1), 0) == XEXP (const_arg1
, 0))
3382 /* Now try for a CONST of a MINUS like the above. */
3383 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3384 : lookup_as_function (folded_arg0
, CONST
))) != 0
3385 && GET_CODE (XEXP (y
, 0)) == MINUS
3386 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3387 && XEXP (XEXP (XEXP (y
, 0), 1), 0) == XEXP (const_arg1
, 0))
3388 return XEXP (XEXP (y
, 0), 0);
3391 /* Likewise if the operands are in the other order. */
3392 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3395 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3396 : lookup_as_function (folded_arg1
, MINUS
);
3398 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3399 && XEXP (XEXP (y
, 1), 0) == XEXP (const_arg0
, 0))
3402 /* Now try for a CONST of a MINUS like the above. */
3403 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3404 : lookup_as_function (folded_arg1
, CONST
))) != 0
3405 && GET_CODE (XEXP (y
, 0)) == MINUS
3406 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3407 && XEXP (XEXP (XEXP (y
, 0), 1), 0) == XEXP (const_arg0
, 0))
3408 return XEXP (XEXP (y
, 0), 0);
3411 /* If second operand is a register equivalent to a negative
3412 CONST_INT, see if we can find a register equivalent to the
3413 positive constant. Make a MINUS if so. Don't do this for
3414 a non-negative constant since we might then alternate between
3415 choosing positive and negative constants. Having the positive
3416 constant previously-used is the more common case. Be sure
3417 the resulting constant is non-negative; if const_arg1 were
3418 the smallest negative number this would overflow: depending
3419 on the mode, this would either just be the same value (and
3420 hence not save anything) or be incorrect. */
3421 if (const_arg1
!= 0 && GET_CODE (const_arg1
) == CONST_INT
3422 && INTVAL (const_arg1
) < 0
3423 /* This used to test
3425 -INTVAL (const_arg1) >= 0
3427 But The Sun V5.0 compilers mis-compiled that test. So
3428 instead we test for the problematic value in a more direct
3429 manner and hope the Sun compilers get it correct. */
3430 && INTVAL (const_arg1
) !=
3431 ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1))
3432 && REG_P (folded_arg1
))
3434 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3436 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3439 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3441 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3442 canon_reg (p
->exp
, NULL_RTX
));
3447 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3448 If so, produce (PLUS Z C2-C). */
3449 if (const_arg1
!= 0 && GET_CODE (const_arg1
) == CONST_INT
)
3451 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3452 if (y
&& GET_CODE (XEXP (y
, 1)) == CONST_INT
)
3453 return fold_rtx (plus_constant (copy_rtx (y
),
3454 -INTVAL (const_arg1
)),
3461 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3462 case IOR
: case AND
: case XOR
:
3464 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3465 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3466 is known to be of similar form, we may be able to replace the
3467 operation with a combined operation. This may eliminate the
3468 intermediate operation if every use is simplified in this way.
3469 Note that the similar optimization done by combine.c only works
3470 if the intermediate operation's result has only one reference. */
3472 if (REG_P (folded_arg0
)
3473 && const_arg1
&& GET_CODE (const_arg1
) == CONST_INT
)
3476 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3477 rtx y
, inner_const
, new_const
;
3478 enum rtx_code associate_code
;
3481 && (INTVAL (const_arg1
) >= GET_MODE_BITSIZE (mode
)
3482 || INTVAL (const_arg1
) < 0))
3484 if (SHIFT_COUNT_TRUNCATED
)
3485 const_arg1
= GEN_INT (INTVAL (const_arg1
)
3486 & (GET_MODE_BITSIZE (mode
) - 1));
3491 y
= lookup_as_function (folded_arg0
, code
);
3495 /* If we have compiled a statement like
3496 "if (x == (x & mask1))", and now are looking at
3497 "x & mask2", we will have a case where the first operand
3498 of Y is the same as our first operand. Unless we detect
3499 this case, an infinite loop will result. */
3500 if (XEXP (y
, 0) == folded_arg0
)
3503 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3504 if (!inner_const
|| GET_CODE (inner_const
) != CONST_INT
)
3507 /* Don't associate these operations if they are a PLUS with the
3508 same constant and it is a power of two. These might be doable
3509 with a pre- or post-increment. Similarly for two subtracts of
3510 identical powers of two with post decrement. */
3512 if (code
== PLUS
&& const_arg1
== inner_const
3513 && ((HAVE_PRE_INCREMENT
3514 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3515 || (HAVE_POST_INCREMENT
3516 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3517 || (HAVE_PRE_DECREMENT
3518 && exact_log2 (- INTVAL (const_arg1
)) >= 0)
3519 || (HAVE_POST_DECREMENT
3520 && exact_log2 (- INTVAL (const_arg1
)) >= 0)))
3523 /* ??? Vector mode shifts by scalar
3524 shift operand are not supported yet. */
3525 if (is_shift
&& VECTOR_MODE_P (mode
))
3529 && (INTVAL (inner_const
) >= GET_MODE_BITSIZE (mode
)
3530 || INTVAL (inner_const
) < 0))
3532 if (SHIFT_COUNT_TRUNCATED
)
3533 inner_const
= GEN_INT (INTVAL (inner_const
)
3534 & (GET_MODE_BITSIZE (mode
) - 1));
3539 /* Compute the code used to compose the constants. For example,
3540 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3542 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3544 new_const
= simplify_binary_operation (associate_code
, mode
,
3545 const_arg1
, inner_const
);
3550 /* If we are associating shift operations, don't let this
3551 produce a shift of the size of the object or larger.
3552 This could occur when we follow a sign-extend by a right
3553 shift on a machine that does a sign-extend as a pair
3557 && GET_CODE (new_const
) == CONST_INT
3558 && INTVAL (new_const
) >= GET_MODE_BITSIZE (mode
))
3560 /* As an exception, we can turn an ASHIFTRT of this
3561 form into a shift of the number of bits - 1. */
3562 if (code
== ASHIFTRT
)
3563 new_const
= GEN_INT (GET_MODE_BITSIZE (mode
) - 1);
3564 else if (!side_effects_p (XEXP (y
, 0)))
3565 return CONST0_RTX (mode
);
3570 y
= copy_rtx (XEXP (y
, 0));
3572 /* If Y contains our first operand (the most common way this
3573 can happen is if Y is a MEM), we would do into an infinite
3574 loop if we tried to fold it. So don't in that case. */
3576 if (! reg_mentioned_p (folded_arg0
, y
))
3577 y
= fold_rtx (y
, insn
);
3579 return simplify_gen_binary (code
, mode
, y
, new_const
);
3583 case DIV
: case UDIV
:
3584 /* ??? The associative optimization performed immediately above is
3585 also possible for DIV and UDIV using associate_code of MULT.
3586 However, we would need extra code to verify that the
3587 multiplication does not overflow, that is, there is no overflow
3588 in the calculation of new_const. */
3595 new_rtx
= simplify_binary_operation (code
, mode
,
3596 const_arg0
? const_arg0
: folded_arg0
,
3597 const_arg1
? const_arg1
: folded_arg1
);
3601 /* (lo_sum (high X) X) is simply X. */
3602 if (code
== LO_SUM
&& const_arg0
!= 0
3603 && GET_CODE (const_arg0
) == HIGH
3604 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3609 case RTX_BITFIELD_OPS
:
3610 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3611 const_arg0
? const_arg0
: folded_arg0
,
3612 const_arg1
? const_arg1
: folded_arg1
,
3613 const_arg2
? const_arg2
: XEXP (x
, 2));
3620 return new_rtx
? new_rtx
: x
;
3623 /* Return a constant value currently equivalent to X.
3624 Return 0 if we don't know one. */
3627 equiv_constant (rtx x
)
3630 && REGNO_QTY_VALID_P (REGNO (x
)))
3632 int x_q
= REG_QTY (REGNO (x
));
3633 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3635 if (x_ent
->const_rtx
)
3636 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3639 if (x
== 0 || CONSTANT_P (x
))
3642 if (GET_CODE (x
) == SUBREG
)
3646 /* See if we previously assigned a constant value to this SUBREG. */
3647 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3648 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3649 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3652 if (REG_P (SUBREG_REG (x
))
3653 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3654 return simplify_subreg (GET_MODE (x
), SUBREG_REG (x
),
3655 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
3660 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3661 the hash table in case its value was seen before. */
3665 struct table_elt
*elt
;
3667 x
= avoid_constant_pool_reference (x
);
3671 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3675 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3676 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3683 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3686 In certain cases, this can cause us to add an equivalence. For example,
3687 if we are following the taken case of
3689 we can add the fact that `i' and '2' are now equivalent.
3691 In any case, we can record that this comparison was passed. If the same
3692 comparison is seen later, we will know its value. */
3695 record_jump_equiv (rtx insn
, bool taken
)
3697 int cond_known_true
;
3700 enum machine_mode mode
, mode0
, mode1
;
3701 int reversed_nonequality
= 0;
3704 /* Ensure this is the right kind of insn. */
3705 gcc_assert (any_condjump_p (insn
));
3707 set
= pc_set (insn
);
3709 /* See if this jump condition is known true or false. */
3711 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3713 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3715 /* Get the type of comparison being done and the operands being compared.
3716 If we had to reverse a non-equality condition, record that fact so we
3717 know that it isn't valid for floating-point. */
3718 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3719 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3720 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3722 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3723 if (! cond_known_true
)
3725 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3727 /* Don't remember if we can't find the inverse. */
3728 if (code
== UNKNOWN
)
3732 /* The mode is the mode of the non-constant. */
3734 if (mode1
!= VOIDmode
)
3737 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3740 /* Yet another form of subreg creation. In this case, we want something in
3741 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3744 record_jump_cond_subreg (enum machine_mode mode
, rtx op
)
3746 enum machine_mode op_mode
= GET_MODE (op
);
3747 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3749 return lowpart_subreg (mode
, op
, op_mode
);
3752 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3753 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3754 Make any useful entries we can with that information. Called from
3755 above function and called recursively. */
3758 record_jump_cond (enum rtx_code code
, enum machine_mode mode
, rtx op0
,
3759 rtx op1
, int reversed_nonequality
)
3761 unsigned op0_hash
, op1_hash
;
3762 int op0_in_memory
, op1_in_memory
;
3763 struct table_elt
*op0_elt
, *op1_elt
;
3765 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3766 we know that they are also equal in the smaller mode (this is also
3767 true for all smaller modes whether or not there is a SUBREG, but
3768 is not worth testing for with no SUBREG). */
3770 /* Note that GET_MODE (op0) may not equal MODE. */
3771 if (code
== EQ
&& GET_CODE (op0
) == SUBREG
3772 && (GET_MODE_SIZE (GET_MODE (op0
))
3773 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
3775 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3776 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3778 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3779 reversed_nonequality
);
3782 if (code
== EQ
&& GET_CODE (op1
) == SUBREG
3783 && (GET_MODE_SIZE (GET_MODE (op1
))
3784 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
3786 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3787 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3789 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3790 reversed_nonequality
);
3793 /* Similarly, if this is an NE comparison, and either is a SUBREG
3794 making a smaller mode, we know the whole thing is also NE. */
3796 /* Note that GET_MODE (op0) may not equal MODE;
3797 if we test MODE instead, we can get an infinite recursion
3798 alternating between two modes each wider than MODE. */
3800 if (code
== NE
&& GET_CODE (op0
) == SUBREG
3801 && subreg_lowpart_p (op0
)
3802 && (GET_MODE_SIZE (GET_MODE (op0
))
3803 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
3805 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3806 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3808 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3809 reversed_nonequality
);
3812 if (code
== NE
&& GET_CODE (op1
) == SUBREG
3813 && subreg_lowpart_p (op1
)
3814 && (GET_MODE_SIZE (GET_MODE (op1
))
3815 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
3817 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3818 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3820 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3821 reversed_nonequality
);
3824 /* Hash both operands. */
3827 hash_arg_in_memory
= 0;
3828 op0_hash
= HASH (op0
, mode
);
3829 op0_in_memory
= hash_arg_in_memory
;
3835 hash_arg_in_memory
= 0;
3836 op1_hash
= HASH (op1
, mode
);
3837 op1_in_memory
= hash_arg_in_memory
;
3842 /* Look up both operands. */
3843 op0_elt
= lookup (op0
, op0_hash
, mode
);
3844 op1_elt
= lookup (op1
, op1_hash
, mode
);
3846 /* If both operands are already equivalent or if they are not in the
3847 table but are identical, do nothing. */
3848 if ((op0_elt
!= 0 && op1_elt
!= 0
3849 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
3850 || op0
== op1
|| rtx_equal_p (op0
, op1
))
3853 /* If we aren't setting two things equal all we can do is save this
3854 comparison. Similarly if this is floating-point. In the latter
3855 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
3856 If we record the equality, we might inadvertently delete code
3857 whose intent was to change -0 to +0. */
3859 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
3861 struct qty_table_elem
*ent
;
3864 /* If we reversed a floating-point comparison, if OP0 is not a
3865 register, or if OP1 is neither a register or constant, we can't
3869 op1
= equiv_constant (op1
);
3871 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
3872 || !REG_P (op0
) || op1
== 0)
3875 /* Put OP0 in the hash table if it isn't already. This gives it a
3876 new quantity number. */
3879 if (insert_regs (op0
, NULL
, 0))
3881 rehash_using_reg (op0
);
3882 op0_hash
= HASH (op0
, mode
);
3884 /* If OP0 is contained in OP1, this changes its hash code
3885 as well. Faster to rehash than to check, except
3886 for the simple case of a constant. */
3887 if (! CONSTANT_P (op1
))
3888 op1_hash
= HASH (op1
,mode
);
3891 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
3892 op0_elt
->in_memory
= op0_in_memory
;
3895 qty
= REG_QTY (REGNO (op0
));
3896 ent
= &qty_table
[qty
];
3898 ent
->comparison_code
= code
;
3901 /* Look it up again--in case op0 and op1 are the same. */
3902 op1_elt
= lookup (op1
, op1_hash
, mode
);
3904 /* Put OP1 in the hash table so it gets a new quantity number. */
3907 if (insert_regs (op1
, NULL
, 0))
3909 rehash_using_reg (op1
);
3910 op1_hash
= HASH (op1
, mode
);
3913 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
3914 op1_elt
->in_memory
= op1_in_memory
;
3917 ent
->comparison_const
= NULL_RTX
;
3918 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
3922 ent
->comparison_const
= op1
;
3923 ent
->comparison_qty
= -1;
3929 /* If either side is still missing an equivalence, make it now,
3930 then merge the equivalences. */
3934 if (insert_regs (op0
, NULL
, 0))
3936 rehash_using_reg (op0
);
3937 op0_hash
= HASH (op0
, mode
);
3940 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
3941 op0_elt
->in_memory
= op0_in_memory
;
3946 if (insert_regs (op1
, NULL
, 0))
3948 rehash_using_reg (op1
);
3949 op1_hash
= HASH (op1
, mode
);
3952 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
3953 op1_elt
->in_memory
= op1_in_memory
;
3956 merge_equiv_classes (op0_elt
, op1_elt
);
3959 /* CSE processing for one instruction.
3960 First simplify sources and addresses of all assignments
3961 in the instruction, using previously-computed equivalents values.
3962 Then install the new sources and destinations in the table
3963 of available values. */
3965 /* Data on one SET contained in the instruction. */
3969 /* The SET rtx itself. */
3971 /* The SET_SRC of the rtx (the original value, if it is changing). */
3973 /* The hash-table element for the SET_SRC of the SET. */
3974 struct table_elt
*src_elt
;
3975 /* Hash value for the SET_SRC. */
3977 /* Hash value for the SET_DEST. */
3979 /* The SET_DEST, with SUBREG, etc., stripped. */
3981 /* Nonzero if the SET_SRC is in memory. */
3983 /* Nonzero if the SET_SRC contains something
3984 whose value cannot be predicted and understood. */
3986 /* Original machine mode, in case it becomes a CONST_INT.
3987 The size of this field should match the size of the mode
3988 field of struct rtx_def (see rtl.h). */
3989 ENUM_BITFIELD(machine_mode
) mode
: 8;
3990 /* A constant equivalent for SET_SRC, if any. */
3992 /* Hash value of constant equivalent for SET_SRC. */
3993 unsigned src_const_hash
;
3994 /* Table entry for constant equivalent for SET_SRC, if any. */
3995 struct table_elt
*src_const_elt
;
3996 /* Table entry for the destination address. */
3997 struct table_elt
*dest_addr_elt
;
4003 rtx x
= PATTERN (insn
);
4009 struct table_elt
*src_eqv_elt
= 0;
4010 int src_eqv_volatile
= 0;
4011 int src_eqv_in_memory
= 0;
4012 unsigned src_eqv_hash
= 0;
4014 struct set
*sets
= (struct set
*) 0;
4018 /* Records what this insn does to set CC0. */
4020 this_insn_cc0_mode
= VOIDmode
;
4023 /* Find all the SETs and CLOBBERs in this instruction.
4024 Record all the SETs in the array `set' and count them.
4025 Also determine whether there is a CLOBBER that invalidates
4026 all memory references, or all references at varying addresses. */
4030 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4032 if (GET_CODE (XEXP (tem
, 0)) == CLOBBER
)
4033 invalidate (SET_DEST (XEXP (tem
, 0)), VOIDmode
);
4034 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4038 if (GET_CODE (x
) == SET
)
4040 sets
= XALLOCA (struct set
);
4043 /* Ignore SETs that are unconditional jumps.
4044 They never need cse processing, so this does not hurt.
4045 The reason is not efficiency but rather
4046 so that we can test at the end for instructions
4047 that have been simplified to unconditional jumps
4048 and not be misled by unchanged instructions
4049 that were unconditional jumps to begin with. */
4050 if (SET_DEST (x
) == pc_rtx
4051 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4054 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4055 The hard function value register is used only once, to copy to
4056 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4057 Ensure we invalidate the destination register. On the 80386 no
4058 other code would invalidate it since it is a fixed_reg.
4059 We need not check the return of apply_change_group; see canon_reg. */
4061 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4063 canon_reg (SET_SRC (x
), insn
);
4064 apply_change_group ();
4065 fold_rtx (SET_SRC (x
), insn
);
4066 invalidate (SET_DEST (x
), VOIDmode
);
4071 else if (GET_CODE (x
) == PARALLEL
)
4073 int lim
= XVECLEN (x
, 0);
4075 sets
= XALLOCAVEC (struct set
, lim
);
4077 /* Find all regs explicitly clobbered in this insn,
4078 and ensure they are not replaced with any other regs
4079 elsewhere in this insn.
4080 When a reg that is clobbered is also used for input,
4081 we should presume that that is for a reason,
4082 and we should not substitute some other register
4083 which is not supposed to be clobbered.
4084 Therefore, this loop cannot be merged into the one below
4085 because a CALL may precede a CLOBBER and refer to the
4086 value clobbered. We must not let a canonicalization do
4087 anything in that case. */
4088 for (i
= 0; i
< lim
; i
++)
4090 rtx y
= XVECEXP (x
, 0, i
);
4091 if (GET_CODE (y
) == CLOBBER
)
4093 rtx clobbered
= XEXP (y
, 0);
4095 if (REG_P (clobbered
)
4096 || GET_CODE (clobbered
) == SUBREG
)
4097 invalidate (clobbered
, VOIDmode
);
4098 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
4099 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
4100 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
4104 for (i
= 0; i
< lim
; i
++)
4106 rtx y
= XVECEXP (x
, 0, i
);
4107 if (GET_CODE (y
) == SET
)
4109 /* As above, we ignore unconditional jumps and call-insns and
4110 ignore the result of apply_change_group. */
4111 if (GET_CODE (SET_SRC (y
)) == CALL
)
4113 canon_reg (SET_SRC (y
), insn
);
4114 apply_change_group ();
4115 fold_rtx (SET_SRC (y
), insn
);
4116 invalidate (SET_DEST (y
), VOIDmode
);
4118 else if (SET_DEST (y
) == pc_rtx
4119 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4122 sets
[n_sets
++].rtl
= y
;
4124 else if (GET_CODE (y
) == CLOBBER
)
4126 /* If we clobber memory, canon the address.
4127 This does nothing when a register is clobbered
4128 because we have already invalidated the reg. */
4129 if (MEM_P (XEXP (y
, 0)))
4130 canon_reg (XEXP (y
, 0), insn
);
4132 else if (GET_CODE (y
) == USE
4133 && ! (REG_P (XEXP (y
, 0))
4134 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4135 canon_reg (y
, insn
);
4136 else if (GET_CODE (y
) == CALL
)
4138 /* The result of apply_change_group can be ignored; see
4140 canon_reg (y
, insn
);
4141 apply_change_group ();
4146 else if (GET_CODE (x
) == CLOBBER
)
4148 if (MEM_P (XEXP (x
, 0)))
4149 canon_reg (XEXP (x
, 0), insn
);
4152 /* Canonicalize a USE of a pseudo register or memory location. */
4153 else if (GET_CODE (x
) == USE
4154 && ! (REG_P (XEXP (x
, 0))
4155 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4156 canon_reg (XEXP (x
, 0), insn
);
4157 else if (GET_CODE (x
) == CALL
)
4159 /* The result of apply_change_group can be ignored; see canon_reg. */
4160 canon_reg (x
, insn
);
4161 apply_change_group ();
4165 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4166 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4167 is handled specially for this case, and if it isn't set, then there will
4168 be no equivalence for the destination. */
4169 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4170 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0
4171 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4172 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4174 /* The result of apply_change_group can be ignored; see canon_reg. */
4175 canon_reg (XEXP (tem
, 0), insn
);
4176 apply_change_group ();
4177 src_eqv
= fold_rtx (XEXP (tem
, 0), insn
);
4178 XEXP (tem
, 0) = copy_rtx (src_eqv
);
4179 df_notes_rescan (insn
);
4182 /* Canonicalize sources and addresses of destinations.
4183 We do this in a separate pass to avoid problems when a MATCH_DUP is
4184 present in the insn pattern. In that case, we want to ensure that
4185 we don't break the duplicate nature of the pattern. So we will replace
4186 both operands at the same time. Otherwise, we would fail to find an
4187 equivalent substitution in the loop calling validate_change below.
4189 We used to suppress canonicalization of DEST if it appears in SRC,
4190 but we don't do this any more. */
4192 for (i
= 0; i
< n_sets
; i
++)
4194 rtx dest
= SET_DEST (sets
[i
].rtl
);
4195 rtx src
= SET_SRC (sets
[i
].rtl
);
4196 rtx new_rtx
= canon_reg (src
, insn
);
4198 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4200 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4202 validate_change (insn
, &XEXP (dest
, 1),
4203 canon_reg (XEXP (dest
, 1), insn
), 1);
4204 validate_change (insn
, &XEXP (dest
, 2),
4205 canon_reg (XEXP (dest
, 2), insn
), 1);
4208 while (GET_CODE (dest
) == SUBREG
4209 || GET_CODE (dest
) == ZERO_EXTRACT
4210 || GET_CODE (dest
) == STRICT_LOW_PART
)
4211 dest
= XEXP (dest
, 0);
4214 canon_reg (dest
, insn
);
4217 /* Now that we have done all the replacements, we can apply the change
4218 group and see if they all work. Note that this will cause some
4219 canonicalizations that would have worked individually not to be applied
4220 because some other canonicalization didn't work, but this should not
4223 The result of apply_change_group can be ignored; see canon_reg. */
4225 apply_change_group ();
4227 /* Set sets[i].src_elt to the class each source belongs to.
4228 Detect assignments from or to volatile things
4229 and set set[i] to zero so they will be ignored
4230 in the rest of this function.
4232 Nothing in this loop changes the hash table or the register chains. */
4234 for (i
= 0; i
< n_sets
; i
++)
4238 struct table_elt
*elt
= 0, *p
;
4239 enum machine_mode mode
;
4242 rtx src_related
= 0;
4243 struct table_elt
*src_const_elt
= 0;
4244 int src_cost
= MAX_COST
;
4245 int src_eqv_cost
= MAX_COST
;
4246 int src_folded_cost
= MAX_COST
;
4247 int src_related_cost
= MAX_COST
;
4248 int src_elt_cost
= MAX_COST
;
4249 int src_regcost
= MAX_COST
;
4250 int src_eqv_regcost
= MAX_COST
;
4251 int src_folded_regcost
= MAX_COST
;
4252 int src_related_regcost
= MAX_COST
;
4253 int src_elt_regcost
= MAX_COST
;
4254 /* Set nonzero if we need to call force_const_mem on with the
4255 contents of src_folded before using it. */
4256 int src_folded_force_flag
= 0;
4258 dest
= SET_DEST (sets
[i
].rtl
);
4259 src
= SET_SRC (sets
[i
].rtl
);
4261 /* If SRC is a constant that has no machine mode,
4262 hash it with the destination's machine mode.
4263 This way we can keep different modes separate. */
4265 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4266 sets
[i
].mode
= mode
;
4270 enum machine_mode eqvmode
= mode
;
4271 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4272 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4274 hash_arg_in_memory
= 0;
4275 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4277 /* Find the equivalence class for the equivalent expression. */
4280 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4282 src_eqv_volatile
= do_not_record
;
4283 src_eqv_in_memory
= hash_arg_in_memory
;
4286 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4287 value of the INNER register, not the destination. So it is not
4288 a valid substitution for the source. But save it for later. */
4289 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4292 src_eqv_here
= src_eqv
;
4294 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4295 simplified result, which may not necessarily be valid. */
4296 src_folded
= fold_rtx (src
, insn
);
4299 /* ??? This caused bad code to be generated for the m68k port with -O2.
4300 Suppose src is (CONST_INT -1), and that after truncation src_folded
4301 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4302 At the end we will add src and src_const to the same equivalence
4303 class. We now have 3 and -1 on the same equivalence class. This
4304 causes later instructions to be mis-optimized. */
4305 /* If storing a constant in a bitfield, pre-truncate the constant
4306 so we will be able to record it later. */
4307 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4309 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4311 if (GET_CODE (src
) == CONST_INT
4312 && GET_CODE (width
) == CONST_INT
4313 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4314 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4316 = GEN_INT (INTVAL (src
) & (((HOST_WIDE_INT
) 1
4317 << INTVAL (width
)) - 1));
4321 /* Compute SRC's hash code, and also notice if it
4322 should not be recorded at all. In that case,
4323 prevent any further processing of this assignment. */
4325 hash_arg_in_memory
= 0;
4328 sets
[i
].src_hash
= HASH (src
, mode
);
4329 sets
[i
].src_volatile
= do_not_record
;
4330 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4332 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4333 a pseudo, do not record SRC. Using SRC as a replacement for
4334 anything else will be incorrect in that situation. Note that
4335 this usually occurs only for stack slots, in which case all the
4336 RTL would be referring to SRC, so we don't lose any optimization
4337 opportunities by not having SRC in the hash table. */
4340 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4342 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4343 sets
[i
].src_volatile
= 1;
4346 /* It is no longer clear why we used to do this, but it doesn't
4347 appear to still be needed. So let's try without it since this
4348 code hurts cse'ing widened ops. */
4349 /* If source is a paradoxical subreg (such as QI treated as an SI),
4350 treat it as volatile. It may do the work of an SI in one context
4351 where the extra bits are not being used, but cannot replace an SI
4353 if (GET_CODE (src
) == SUBREG
4354 && (GET_MODE_SIZE (GET_MODE (src
))
4355 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))))
4356 sets
[i
].src_volatile
= 1;
4359 /* Locate all possible equivalent forms for SRC. Try to replace
4360 SRC in the insn with each cheaper equivalent.
4362 We have the following types of equivalents: SRC itself, a folded
4363 version, a value given in a REG_EQUAL note, or a value related
4366 Each of these equivalents may be part of an additional class
4367 of equivalents (if more than one is in the table, they must be in
4368 the same class; we check for this).
4370 If the source is volatile, we don't do any table lookups.
4372 We note any constant equivalent for possible later use in a
4375 if (!sets
[i
].src_volatile
)
4376 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4378 sets
[i
].src_elt
= elt
;
4380 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4382 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4384 /* The REG_EQUAL is indicating that two formerly distinct
4385 classes are now equivalent. So merge them. */
4386 merge_equiv_classes (elt
, src_eqv_elt
);
4387 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4388 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4394 else if (src_eqv_elt
)
4397 /* Try to find a constant somewhere and record it in `src_const'.
4398 Record its table element, if any, in `src_const_elt'. Look in
4399 any known equivalences first. (If the constant is not in the
4400 table, also set `sets[i].src_const_hash'). */
4402 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4406 src_const_elt
= elt
;
4411 && (CONSTANT_P (src_folded
)
4412 /* Consider (minus (label_ref L1) (label_ref L2)) as
4413 "constant" here so we will record it. This allows us
4414 to fold switch statements when an ADDR_DIFF_VEC is used. */
4415 || (GET_CODE (src_folded
) == MINUS
4416 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4417 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4418 src_const
= src_folded
, src_const_elt
= elt
;
4419 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4420 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4422 /* If we don't know if the constant is in the table, get its
4423 hash code and look it up. */
4424 if (src_const
&& src_const_elt
== 0)
4426 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4427 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4430 sets
[i
].src_const
= src_const
;
4431 sets
[i
].src_const_elt
= src_const_elt
;
4433 /* If the constant and our source are both in the table, mark them as
4434 equivalent. Otherwise, if a constant is in the table but the source
4435 isn't, set ELT to it. */
4436 if (src_const_elt
&& elt
4437 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4438 merge_equiv_classes (elt
, src_const_elt
);
4439 else if (src_const_elt
&& elt
== 0)
4440 elt
= src_const_elt
;
4442 /* See if there is a register linearly related to a constant
4443 equivalent of SRC. */
4445 && (GET_CODE (src_const
) == CONST
4446 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4448 src_related
= use_related_value (src_const
, src_const_elt
);
4451 struct table_elt
*src_related_elt
4452 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4453 if (src_related_elt
&& elt
)
4455 if (elt
->first_same_value
4456 != src_related_elt
->first_same_value
)
4457 /* This can occur when we previously saw a CONST
4458 involving a SYMBOL_REF and then see the SYMBOL_REF
4459 twice. Merge the involved classes. */
4460 merge_equiv_classes (elt
, src_related_elt
);
4463 src_related_elt
= 0;
4465 else if (src_related_elt
&& elt
== 0)
4466 elt
= src_related_elt
;
4470 /* See if we have a CONST_INT that is already in a register in a
4473 if (src_const
&& src_related
== 0 && GET_CODE (src_const
) == CONST_INT
4474 && GET_MODE_CLASS (mode
) == MODE_INT
4475 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
)
4477 enum machine_mode wider_mode
;
4479 for (wider_mode
= GET_MODE_WIDER_MODE (mode
);
4480 GET_MODE_BITSIZE (wider_mode
) <= BITS_PER_WORD
4481 && src_related
== 0;
4482 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4484 struct table_elt
*const_elt
4485 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4490 for (const_elt
= const_elt
->first_same_value
;
4491 const_elt
; const_elt
= const_elt
->next_same_value
)
4492 if (REG_P (const_elt
->exp
))
4494 src_related
= gen_lowpart (mode
, const_elt
->exp
);
4500 /* Another possibility is that we have an AND with a constant in
4501 a mode narrower than a word. If so, it might have been generated
4502 as part of an "if" which would narrow the AND. If we already
4503 have done the AND in a wider mode, we can use a SUBREG of that
4506 if (flag_expensive_optimizations
&& ! src_related
4507 && GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 1)) == CONST_INT
4508 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4510 enum machine_mode tmode
;
4511 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4513 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4514 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4515 tmode
= GET_MODE_WIDER_MODE (tmode
))
4517 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4518 struct table_elt
*larger_elt
;
4522 PUT_MODE (new_and
, tmode
);
4523 XEXP (new_and
, 0) = inner
;
4524 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4525 if (larger_elt
== 0)
4528 for (larger_elt
= larger_elt
->first_same_value
;
4529 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4530 if (REG_P (larger_elt
->exp
))
4533 = gen_lowpart (mode
, larger_elt
->exp
);
4543 #ifdef LOAD_EXTEND_OP
4544 /* See if a MEM has already been loaded with a widening operation;
4545 if it has, we can use a subreg of that. Many CISC machines
4546 also have such operations, but this is only likely to be
4547 beneficial on these machines. */
4549 if (flag_expensive_optimizations
&& src_related
== 0
4550 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4551 && GET_MODE_CLASS (mode
) == MODE_INT
4552 && MEM_P (src
) && ! do_not_record
4553 && LOAD_EXTEND_OP (mode
) != UNKNOWN
)
4555 struct rtx_def memory_extend_buf
;
4556 rtx memory_extend_rtx
= &memory_extend_buf
;
4557 enum machine_mode tmode
;
4559 /* Set what we are trying to extend and the operation it might
4560 have been extended with. */
4561 memset (memory_extend_rtx
, 0, sizeof(*memory_extend_rtx
));
4562 PUT_CODE (memory_extend_rtx
, LOAD_EXTEND_OP (mode
));
4563 XEXP (memory_extend_rtx
, 0) = src
;
4565 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4566 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4567 tmode
= GET_MODE_WIDER_MODE (tmode
))
4569 struct table_elt
*larger_elt
;
4571 PUT_MODE (memory_extend_rtx
, tmode
);
4572 larger_elt
= lookup (memory_extend_rtx
,
4573 HASH (memory_extend_rtx
, tmode
), tmode
);
4574 if (larger_elt
== 0)
4577 for (larger_elt
= larger_elt
->first_same_value
;
4578 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4579 if (REG_P (larger_elt
->exp
))
4581 src_related
= gen_lowpart (mode
, larger_elt
->exp
);
4589 #endif /* LOAD_EXTEND_OP */
4591 if (src
== src_folded
)
4594 /* At this point, ELT, if nonzero, points to a class of expressions
4595 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4596 and SRC_RELATED, if nonzero, each contain additional equivalent
4597 expressions. Prune these latter expressions by deleting expressions
4598 already in the equivalence class.
4600 Check for an equivalent identical to the destination. If found,
4601 this is the preferred equivalent since it will likely lead to
4602 elimination of the insn. Indicate this by placing it in
4606 elt
= elt
->first_same_value
;
4607 for (p
= elt
; p
; p
= p
->next_same_value
)
4609 enum rtx_code code
= GET_CODE (p
->exp
);
4611 /* If the expression is not valid, ignore it. Then we do not
4612 have to check for validity below. In most cases, we can use
4613 `rtx_equal_p', since canonicalization has already been done. */
4614 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
4617 /* Also skip paradoxical subregs, unless that's what we're
4620 && (GET_MODE_SIZE (GET_MODE (p
->exp
))
4621 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))
4623 && GET_CODE (src
) == SUBREG
4624 && GET_MODE (src
) == GET_MODE (p
->exp
)
4625 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4626 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))))
4629 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
4631 else if (src_folded
&& GET_CODE (src_folded
) == code
4632 && rtx_equal_p (src_folded
, p
->exp
))
4634 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
4635 && rtx_equal_p (src_eqv_here
, p
->exp
))
4637 else if (src_related
&& GET_CODE (src_related
) == code
4638 && rtx_equal_p (src_related
, p
->exp
))
4641 /* This is the same as the destination of the insns, we want
4642 to prefer it. Copy it to src_related. The code below will
4643 then give it a negative cost. */
4644 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
4648 /* Find the cheapest valid equivalent, trying all the available
4649 possibilities. Prefer items not in the hash table to ones
4650 that are when they are equal cost. Note that we can never
4651 worsen an insn as the current contents will also succeed.
4652 If we find an equivalent identical to the destination, use it as best,
4653 since this insn will probably be eliminated in that case. */
4656 if (rtx_equal_p (src
, dest
))
4657 src_cost
= src_regcost
= -1;
4660 src_cost
= COST (src
);
4661 src_regcost
= approx_reg_cost (src
);
4667 if (rtx_equal_p (src_eqv_here
, dest
))
4668 src_eqv_cost
= src_eqv_regcost
= -1;
4671 src_eqv_cost
= COST (src_eqv_here
);
4672 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
4678 if (rtx_equal_p (src_folded
, dest
))
4679 src_folded_cost
= src_folded_regcost
= -1;
4682 src_folded_cost
= COST (src_folded
);
4683 src_folded_regcost
= approx_reg_cost (src_folded
);
4689 if (rtx_equal_p (src_related
, dest
))
4690 src_related_cost
= src_related_regcost
= -1;
4693 src_related_cost
= COST (src_related
);
4694 src_related_regcost
= approx_reg_cost (src_related
);
4698 /* If this was an indirect jump insn, a known label will really be
4699 cheaper even though it looks more expensive. */
4700 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
4701 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
4703 /* Terminate loop when replacement made. This must terminate since
4704 the current contents will be tested and will always be valid. */
4709 /* Skip invalid entries. */
4710 while (elt
&& !REG_P (elt
->exp
)
4711 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
4712 elt
= elt
->next_same_value
;
4714 /* A paradoxical subreg would be bad here: it'll be the right
4715 size, but later may be adjusted so that the upper bits aren't
4716 what we want. So reject it. */
4718 && GET_CODE (elt
->exp
) == SUBREG
4719 && (GET_MODE_SIZE (GET_MODE (elt
->exp
))
4720 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))
4721 /* It is okay, though, if the rtx we're trying to match
4722 will ignore any of the bits we can't predict. */
4724 && GET_CODE (src
) == SUBREG
4725 && GET_MODE (src
) == GET_MODE (elt
->exp
)
4726 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4727 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))))
4729 elt
= elt
->next_same_value
;
4735 src_elt_cost
= elt
->cost
;
4736 src_elt_regcost
= elt
->regcost
;
4739 /* Find cheapest and skip it for the next time. For items
4740 of equal cost, use this order:
4741 src_folded, src, src_eqv, src_related and hash table entry. */
4743 && preferable (src_folded_cost
, src_folded_regcost
,
4744 src_cost
, src_regcost
) <= 0
4745 && preferable (src_folded_cost
, src_folded_regcost
,
4746 src_eqv_cost
, src_eqv_regcost
) <= 0
4747 && preferable (src_folded_cost
, src_folded_regcost
,
4748 src_related_cost
, src_related_regcost
) <= 0
4749 && preferable (src_folded_cost
, src_folded_regcost
,
4750 src_elt_cost
, src_elt_regcost
) <= 0)
4752 trial
= src_folded
, src_folded_cost
= MAX_COST
;
4753 if (src_folded_force_flag
)
4755 rtx forced
= force_const_mem (mode
, trial
);
4761 && preferable (src_cost
, src_regcost
,
4762 src_eqv_cost
, src_eqv_regcost
) <= 0
4763 && preferable (src_cost
, src_regcost
,
4764 src_related_cost
, src_related_regcost
) <= 0
4765 && preferable (src_cost
, src_regcost
,
4766 src_elt_cost
, src_elt_regcost
) <= 0)
4767 trial
= src
, src_cost
= MAX_COST
;
4768 else if (src_eqv_here
4769 && preferable (src_eqv_cost
, src_eqv_regcost
,
4770 src_related_cost
, src_related_regcost
) <= 0
4771 && preferable (src_eqv_cost
, src_eqv_regcost
,
4772 src_elt_cost
, src_elt_regcost
) <= 0)
4773 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
4774 else if (src_related
4775 && preferable (src_related_cost
, src_related_regcost
,
4776 src_elt_cost
, src_elt_regcost
) <= 0)
4777 trial
= src_related
, src_related_cost
= MAX_COST
;
4781 elt
= elt
->next_same_value
;
4782 src_elt_cost
= MAX_COST
;
4785 /* Avoid creation of overlapping memory moves. */
4786 if (MEM_P (trial
) && MEM_P (SET_DEST (sets
[i
].rtl
)))
4790 /* BLKmode moves are not handled by cse anyway. */
4791 if (GET_MODE (trial
) == BLKmode
)
4794 src
= canon_rtx (trial
);
4795 dest
= canon_rtx (SET_DEST (sets
[i
].rtl
));
4797 if (!MEM_P (src
) || !MEM_P (dest
)
4798 || !nonoverlapping_memrefs_p (src
, dest
))
4802 /* We don't normally have an insn matching (set (pc) (pc)), so
4803 check for this separately here. We will delete such an
4806 For other cases such as a table jump or conditional jump
4807 where we know the ultimate target, go ahead and replace the
4808 operand. While that may not make a valid insn, we will
4809 reemit the jump below (and also insert any necessary
4811 if (n_sets
== 1 && dest
== pc_rtx
4813 || (GET_CODE (trial
) == LABEL_REF
4814 && ! condjump_p (insn
))))
4816 /* Don't substitute non-local labels, this confuses CFG. */
4817 if (GET_CODE (trial
) == LABEL_REF
4818 && LABEL_REF_NONLOCAL_P (trial
))
4821 SET_SRC (sets
[i
].rtl
) = trial
;
4822 cse_jumps_altered
= true;
4826 /* Reject certain invalid forms of CONST that we create. */
4827 else if (CONSTANT_P (trial
)
4828 && GET_CODE (trial
) == CONST
4829 /* Reject cases that will cause decode_rtx_const to
4830 die. On the alpha when simplifying a switch, we
4831 get (const (truncate (minus (label_ref)
4833 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
4834 /* Likewise on IA-64, except without the
4836 || (GET_CODE (XEXP (trial
, 0)) == MINUS
4837 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
4838 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
4839 /* Do nothing for this case. */
4842 /* Look for a substitution that makes a valid insn. */
4843 else if (validate_unshare_change
4844 (insn
, &SET_SRC (sets
[i
].rtl
), trial
, 0))
4846 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
4848 /* The result of apply_change_group can be ignored; see
4851 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4852 apply_change_group ();
4857 /* If we previously found constant pool entries for
4858 constants and this is a constant, try making a
4859 pool entry. Put it in src_folded unless we already have done
4860 this since that is where it likely came from. */
4862 else if (constant_pool_entries_cost
4863 && CONSTANT_P (trial
)
4865 || (!MEM_P (src_folded
)
4866 && ! src_folded_force_flag
))
4867 && GET_MODE_CLASS (mode
) != MODE_CC
4868 && mode
!= VOIDmode
)
4870 src_folded_force_flag
= 1;
4872 src_folded_cost
= constant_pool_entries_cost
;
4873 src_folded_regcost
= constant_pool_entries_regcost
;
4877 src
= SET_SRC (sets
[i
].rtl
);
4879 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
4880 However, there is an important exception: If both are registers
4881 that are not the head of their equivalence class, replace SET_SRC
4882 with the head of the class. If we do not do this, we will have
4883 both registers live over a portion of the basic block. This way,
4884 their lifetimes will likely abut instead of overlapping. */
4886 && REGNO_QTY_VALID_P (REGNO (dest
)))
4888 int dest_q
= REG_QTY (REGNO (dest
));
4889 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
4891 if (dest_ent
->mode
== GET_MODE (dest
)
4892 && dest_ent
->first_reg
!= REGNO (dest
)
4893 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
4894 /* Don't do this if the original insn had a hard reg as
4895 SET_SRC or SET_DEST. */
4896 && (!REG_P (sets
[i
].src
)
4897 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
4898 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
4899 /* We can't call canon_reg here because it won't do anything if
4900 SRC is a hard register. */
4902 int src_q
= REG_QTY (REGNO (src
));
4903 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
4904 int first
= src_ent
->first_reg
;
4906 = (first
>= FIRST_PSEUDO_REGISTER
4907 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
4909 /* We must use validate-change even for this, because this
4910 might be a special no-op instruction, suitable only to
4912 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
4915 /* If we had a constant that is cheaper than what we are now
4916 setting SRC to, use that constant. We ignored it when we
4917 thought we could make this into a no-op. */
4918 if (src_const
&& COST (src_const
) < COST (src
)
4919 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
4926 /* If we made a change, recompute SRC values. */
4927 if (src
!= sets
[i
].src
)
4930 hash_arg_in_memory
= 0;
4932 sets
[i
].src_hash
= HASH (src
, mode
);
4933 sets
[i
].src_volatile
= do_not_record
;
4934 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4935 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4938 /* If this is a single SET, we are setting a register, and we have an
4939 equivalent constant, we want to add a REG_NOTE. We don't want
4940 to write a REG_EQUAL note for a constant pseudo since verifying that
4941 that pseudo hasn't been eliminated is a pain. Such a note also
4942 won't help anything.
4944 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
4945 which can be created for a reference to a compile time computable
4946 entry in a jump table. */
4948 if (n_sets
== 1 && src_const
&& REG_P (dest
)
4949 && !REG_P (src_const
)
4950 && ! (GET_CODE (src_const
) == CONST
4951 && GET_CODE (XEXP (src_const
, 0)) == MINUS
4952 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
4953 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
))
4955 /* We only want a REG_EQUAL note if src_const != src. */
4956 if (! rtx_equal_p (src
, src_const
))
4958 /* Make sure that the rtx is not shared. */
4959 src_const
= copy_rtx (src_const
);
4961 /* Record the actual constant value in a REG_EQUAL note,
4962 making a new one if one does not already exist. */
4963 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
4964 df_notes_rescan (insn
);
4968 /* Now deal with the destination. */
4971 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
4972 while (GET_CODE (dest
) == SUBREG
4973 || GET_CODE (dest
) == ZERO_EXTRACT
4974 || GET_CODE (dest
) == STRICT_LOW_PART
)
4975 dest
= XEXP (dest
, 0);
4977 sets
[i
].inner_dest
= dest
;
4981 #ifdef PUSH_ROUNDING
4982 /* Stack pushes invalidate the stack pointer. */
4983 rtx addr
= XEXP (dest
, 0);
4984 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
4985 && XEXP (addr
, 0) == stack_pointer_rtx
)
4986 invalidate (stack_pointer_rtx
, VOIDmode
);
4988 dest
= fold_rtx (dest
, insn
);
4991 /* Compute the hash code of the destination now,
4992 before the effects of this instruction are recorded,
4993 since the register values used in the address computation
4994 are those before this instruction. */
4995 sets
[i
].dest_hash
= HASH (dest
, mode
);
4997 /* Don't enter a bit-field in the hash table
4998 because the value in it after the store
4999 may not equal what was stored, due to truncation. */
5001 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5003 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5005 if (src_const
!= 0 && GET_CODE (src_const
) == CONST_INT
5006 && GET_CODE (width
) == CONST_INT
5007 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5008 && ! (INTVAL (src_const
)
5009 & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
5010 /* Exception: if the value is constant,
5011 and it won't be truncated, record it. */
5015 /* This is chosen so that the destination will be invalidated
5016 but no new value will be recorded.
5017 We must invalidate because sometimes constant
5018 values can be recorded for bitfields. */
5019 sets
[i
].src_elt
= 0;
5020 sets
[i
].src_volatile
= 1;
5026 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5028 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5030 /* One less use of the label this insn used to jump to. */
5031 delete_insn_and_edges (insn
);
5032 cse_jumps_altered
= true;
5033 /* No more processing for this set. */
5037 /* If this SET is now setting PC to a label, we know it used to
5038 be a conditional or computed branch. */
5039 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5040 && !LABEL_REF_NONLOCAL_P (src
))
5042 /* We reemit the jump in as many cases as possible just in
5043 case the form of an unconditional jump is significantly
5044 different than a computed jump or conditional jump.
5046 If this insn has multiple sets, then reemitting the
5047 jump is nontrivial. So instead we just force rerecognition
5048 and hope for the best. */
5053 new_rtx
= emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
5054 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5055 LABEL_NUSES (XEXP (src
, 0))++;
5057 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5058 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5061 XEXP (note
, 1) = NULL_RTX
;
5062 REG_NOTES (new_rtx
) = note
;
5065 delete_insn_and_edges (insn
);
5069 INSN_CODE (insn
) = -1;
5071 /* Do not bother deleting any unreachable code, let jump do it. */
5072 cse_jumps_altered
= true;
5076 /* If destination is volatile, invalidate it and then do no further
5077 processing for this assignment. */
5079 else if (do_not_record
)
5081 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5082 invalidate (dest
, VOIDmode
);
5083 else if (MEM_P (dest
))
5084 invalidate (dest
, VOIDmode
);
5085 else if (GET_CODE (dest
) == STRICT_LOW_PART
5086 || GET_CODE (dest
) == ZERO_EXTRACT
)
5087 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5091 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5092 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5095 /* If setting CC0, record what it was set to, or a constant, if it
5096 is equivalent to a constant. If it is being set to a floating-point
5097 value, make a COMPARE with the appropriate constant of 0. If we
5098 don't do this, later code can interpret this as a test against
5099 const0_rtx, which can cause problems if we try to put it into an
5100 insn as a floating-point operand. */
5101 if (dest
== cc0_rtx
)
5103 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5104 this_insn_cc0_mode
= mode
;
5105 if (FLOAT_MODE_P (mode
))
5106 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5112 /* Now enter all non-volatile source expressions in the hash table
5113 if they are not already present.
5114 Record their equivalence classes in src_elt.
5115 This way we can insert the corresponding destinations into
5116 the same classes even if the actual sources are no longer in them
5117 (having been invalidated). */
5119 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5120 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5122 struct table_elt
*elt
;
5123 struct table_elt
*classp
= sets
[0].src_elt
;
5124 rtx dest
= SET_DEST (sets
[0].rtl
);
5125 enum machine_mode eqvmode
= GET_MODE (dest
);
5127 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5129 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5132 if (insert_regs (src_eqv
, classp
, 0))
5134 rehash_using_reg (src_eqv
);
5135 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5137 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5138 elt
->in_memory
= src_eqv_in_memory
;
5141 /* Check to see if src_eqv_elt is the same as a set source which
5142 does not yet have an elt, and if so set the elt of the set source
5144 for (i
= 0; i
< n_sets
; i
++)
5145 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5146 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5147 sets
[i
].src_elt
= src_eqv_elt
;
5150 for (i
= 0; i
< n_sets
; i
++)
5151 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5152 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5154 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5156 /* REG_EQUAL in setting a STRICT_LOW_PART
5157 gives an equivalent for the entire destination register,
5158 not just for the subreg being stored in now.
5159 This is a more interesting equivalence, so we arrange later
5160 to treat the entire reg as the destination. */
5161 sets
[i
].src_elt
= src_eqv_elt
;
5162 sets
[i
].src_hash
= src_eqv_hash
;
5166 /* Insert source and constant equivalent into hash table, if not
5168 struct table_elt
*classp
= src_eqv_elt
;
5169 rtx src
= sets
[i
].src
;
5170 rtx dest
= SET_DEST (sets
[i
].rtl
);
5171 enum machine_mode mode
5172 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5174 /* It's possible that we have a source value known to be
5175 constant but don't have a REG_EQUAL note on the insn.
5176 Lack of a note will mean src_eqv_elt will be NULL. This
5177 can happen where we've generated a SUBREG to access a
5178 CONST_INT that is already in a register in a wider mode.
5179 Ensure that the source expression is put in the proper
5182 classp
= sets
[i
].src_const_elt
;
5184 if (sets
[i
].src_elt
== 0)
5186 struct table_elt
*elt
;
5188 /* Note that these insert_regs calls cannot remove
5189 any of the src_elt's, because they would have failed to
5190 match if not still valid. */
5191 if (insert_regs (src
, classp
, 0))
5193 rehash_using_reg (src
);
5194 sets
[i
].src_hash
= HASH (src
, mode
);
5196 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5197 elt
->in_memory
= sets
[i
].src_in_memory
;
5198 sets
[i
].src_elt
= classp
= elt
;
5200 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5201 && src
!= sets
[i
].src_const
5202 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5203 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5204 sets
[i
].src_const_hash
, mode
);
5207 else if (sets
[i
].src_elt
== 0)
5208 /* If we did not insert the source into the hash table (e.g., it was
5209 volatile), note the equivalence class for the REG_EQUAL value, if any,
5210 so that the destination goes into that class. */
5211 sets
[i
].src_elt
= src_eqv_elt
;
5213 /* Record destination addresses in the hash table. This allows us to
5214 check if they are invalidated by other sets. */
5215 for (i
= 0; i
< n_sets
; i
++)
5219 rtx x
= sets
[i
].inner_dest
;
5220 struct table_elt
*elt
;
5221 enum machine_mode mode
;
5227 mode
= GET_MODE (x
);
5228 hash
= HASH (x
, mode
);
5229 elt
= lookup (x
, hash
, mode
);
5232 if (insert_regs (x
, NULL
, 0))
5234 rtx dest
= SET_DEST (sets
[i
].rtl
);
5236 rehash_using_reg (x
);
5237 hash
= HASH (x
, mode
);
5238 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5240 elt
= insert (x
, NULL
, hash
, mode
);
5243 sets
[i
].dest_addr_elt
= elt
;
5246 sets
[i
].dest_addr_elt
= NULL
;
5250 invalidate_from_clobbers (x
);
5252 /* Some registers are invalidated by subroutine calls. Memory is
5253 invalidated by non-constant calls. */
5257 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5258 invalidate_memory ();
5259 invalidate_for_call ();
5262 /* Now invalidate everything set by this instruction.
5263 If a SUBREG or other funny destination is being set,
5264 sets[i].rtl is still nonzero, so here we invalidate the reg
5265 a part of which is being set. */
5267 for (i
= 0; i
< n_sets
; i
++)
5270 /* We can't use the inner dest, because the mode associated with
5271 a ZERO_EXTRACT is significant. */
5272 rtx dest
= SET_DEST (sets
[i
].rtl
);
5274 /* Needed for registers to remove the register from its
5275 previous quantity's chain.
5276 Needed for memory if this is a nonvarying address, unless
5277 we have just done an invalidate_memory that covers even those. */
5278 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5279 invalidate (dest
, VOIDmode
);
5280 else if (MEM_P (dest
))
5281 invalidate (dest
, VOIDmode
);
5282 else if (GET_CODE (dest
) == STRICT_LOW_PART
5283 || GET_CODE (dest
) == ZERO_EXTRACT
)
5284 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5287 /* A volatile ASM invalidates everything. */
5288 if (NONJUMP_INSN_P (insn
)
5289 && GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
5290 && MEM_VOLATILE_P (PATTERN (insn
)))
5291 flush_hash_table ();
5293 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5294 the regs restored by the longjmp come from a later time
5296 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5298 flush_hash_table ();
5302 /* Make sure registers mentioned in destinations
5303 are safe for use in an expression to be inserted.
5304 This removes from the hash table
5305 any invalid entry that refers to one of these registers.
5307 We don't care about the return value from mention_regs because
5308 we are going to hash the SET_DEST values unconditionally. */
5310 for (i
= 0; i
< n_sets
; i
++)
5314 rtx x
= SET_DEST (sets
[i
].rtl
);
5320 /* We used to rely on all references to a register becoming
5321 inaccessible when a register changes to a new quantity,
5322 since that changes the hash code. However, that is not
5323 safe, since after HASH_SIZE new quantities we get a
5324 hash 'collision' of a register with its own invalid
5325 entries. And since SUBREGs have been changed not to
5326 change their hash code with the hash code of the register,
5327 it wouldn't work any longer at all. So we have to check
5328 for any invalid references lying around now.
5329 This code is similar to the REG case in mention_regs,
5330 but it knows that reg_tick has been incremented, and
5331 it leaves reg_in_table as -1 . */
5332 unsigned int regno
= REGNO (x
);
5333 unsigned int endregno
= END_REGNO (x
);
5336 for (i
= regno
; i
< endregno
; i
++)
5338 if (REG_IN_TABLE (i
) >= 0)
5340 remove_invalid_refs (i
);
5341 REG_IN_TABLE (i
) = -1;
5348 /* We may have just removed some of the src_elt's from the hash table.
5349 So replace each one with the current head of the same class.
5350 Also check if destination addresses have been removed. */
5352 for (i
= 0; i
< n_sets
; i
++)
5355 if (sets
[i
].dest_addr_elt
5356 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5358 /* The elt was removed, which means this destination is not
5359 valid after this instruction. */
5360 sets
[i
].rtl
= NULL_RTX
;
5362 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5363 /* If elt was removed, find current head of same class,
5364 or 0 if nothing remains of that class. */
5366 struct table_elt
*elt
= sets
[i
].src_elt
;
5368 while (elt
&& elt
->prev_same_value
)
5369 elt
= elt
->prev_same_value
;
5371 while (elt
&& elt
->first_same_value
== 0)
5372 elt
= elt
->next_same_value
;
5373 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5377 /* Now insert the destinations into their equivalence classes. */
5379 for (i
= 0; i
< n_sets
; i
++)
5382 rtx dest
= SET_DEST (sets
[i
].rtl
);
5383 struct table_elt
*elt
;
5385 /* Don't record value if we are not supposed to risk allocating
5386 floating-point values in registers that might be wider than
5388 if ((flag_float_store
5390 && FLOAT_MODE_P (GET_MODE (dest
)))
5391 /* Don't record BLKmode values, because we don't know the
5392 size of it, and can't be sure that other BLKmode values
5393 have the same or smaller size. */
5394 || GET_MODE (dest
) == BLKmode
5395 /* If we didn't put a REG_EQUAL value or a source into the hash
5396 table, there is no point is recording DEST. */
5397 || sets
[i
].src_elt
== 0
5398 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5399 or SIGN_EXTEND, don't record DEST since it can cause
5400 some tracking to be wrong.
5402 ??? Think about this more later. */
5403 || (GET_CODE (dest
) == SUBREG
5404 && (GET_MODE_SIZE (GET_MODE (dest
))
5405 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5406 && (GET_CODE (sets
[i
].src
) == SIGN_EXTEND
5407 || GET_CODE (sets
[i
].src
) == ZERO_EXTEND
)))
5410 /* STRICT_LOW_PART isn't part of the value BEING set,
5411 and neither is the SUBREG inside it.
5412 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5413 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5414 dest
= SUBREG_REG (XEXP (dest
, 0));
5416 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5417 /* Registers must also be inserted into chains for quantities. */
5418 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5420 /* If `insert_regs' changes something, the hash code must be
5422 rehash_using_reg (dest
);
5423 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5426 elt
= insert (dest
, sets
[i
].src_elt
,
5427 sets
[i
].dest_hash
, GET_MODE (dest
));
5429 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
5430 && !MEM_READONLY_P (sets
[i
].inner_dest
));
5432 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5433 narrower than M2, and both M1 and M2 are the same number of words,
5434 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5435 make that equivalence as well.
5437 However, BAR may have equivalences for which gen_lowpart
5438 will produce a simpler value than gen_lowpart applied to
5439 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5440 BAR's equivalences. If we don't get a simplified form, make
5441 the SUBREG. It will not be used in an equivalence, but will
5442 cause two similar assignments to be detected.
5444 Note the loop below will find SUBREG_REG (DEST) since we have
5445 already entered SRC and DEST of the SET in the table. */
5447 if (GET_CODE (dest
) == SUBREG
5448 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1)
5450 == (GET_MODE_SIZE (GET_MODE (dest
)) - 1) / UNITS_PER_WORD
)
5451 && (GET_MODE_SIZE (GET_MODE (dest
))
5452 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5453 && sets
[i
].src_elt
!= 0)
5455 enum machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
5456 struct table_elt
*elt
, *classp
= 0;
5458 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
5459 elt
= elt
->next_same_value
)
5463 struct table_elt
*src_elt
;
5466 /* Ignore invalid entries. */
5467 if (!REG_P (elt
->exp
)
5468 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5471 /* We may have already been playing subreg games. If the
5472 mode is already correct for the destination, use it. */
5473 if (GET_MODE (elt
->exp
) == new_mode
)
5477 /* Calculate big endian correction for the SUBREG_BYTE.
5478 We have already checked that M1 (GET_MODE (dest))
5479 is not narrower than M2 (new_mode). */
5480 if (BYTES_BIG_ENDIAN
)
5481 byte
= (GET_MODE_SIZE (GET_MODE (dest
))
5482 - GET_MODE_SIZE (new_mode
));
5484 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
5485 GET_MODE (dest
), byte
);
5488 /* The call to simplify_gen_subreg fails if the value
5489 is VOIDmode, yet we can't do any simplification, e.g.
5490 for EXPR_LISTs denoting function call results.
5491 It is invalid to construct a SUBREG with a VOIDmode
5492 SUBREG_REG, hence a zero new_src means we can't do
5493 this substitution. */
5497 src_hash
= HASH (new_src
, new_mode
);
5498 src_elt
= lookup (new_src
, src_hash
, new_mode
);
5500 /* Put the new source in the hash table is if isn't
5504 if (insert_regs (new_src
, classp
, 0))
5506 rehash_using_reg (new_src
);
5507 src_hash
= HASH (new_src
, new_mode
);
5509 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
5510 src_elt
->in_memory
= elt
->in_memory
;
5512 else if (classp
&& classp
!= src_elt
->first_same_value
)
5513 /* Show that two things that we've seen before are
5514 actually the same. */
5515 merge_equiv_classes (src_elt
, classp
);
5517 classp
= src_elt
->first_same_value
;
5518 /* Ignore invalid entries. */
5520 && !REG_P (classp
->exp
)
5521 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
5522 classp
= classp
->next_same_value
;
5527 /* Special handling for (set REG0 REG1) where REG0 is the
5528 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5529 be used in the sequel, so (if easily done) change this insn to
5530 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5531 that computed their value. Then REG1 will become a dead store
5532 and won't cloud the situation for later optimizations.
5534 Do not make this change if REG1 is a hard register, because it will
5535 then be used in the sequel and we may be changing a two-operand insn
5536 into a three-operand insn.
5538 Also do not do this if we are operating on a copy of INSN. */
5540 if (n_sets
== 1 && sets
[0].rtl
&& REG_P (SET_DEST (sets
[0].rtl
))
5541 && NEXT_INSN (PREV_INSN (insn
)) == insn
5542 && REG_P (SET_SRC (sets
[0].rtl
))
5543 && REGNO (SET_SRC (sets
[0].rtl
)) >= FIRST_PSEUDO_REGISTER
5544 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets
[0].rtl
))))
5546 int src_q
= REG_QTY (REGNO (SET_SRC (sets
[0].rtl
)));
5547 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5549 if (src_ent
->first_reg
== REGNO (SET_DEST (sets
[0].rtl
)))
5551 /* Scan for the previous nonnote insn, but stop at a basic
5554 rtx bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
5557 prev
= PREV_INSN (prev
);
5559 while (prev
!= bb_head
&& NOTE_P (prev
));
5561 /* Do not swap the registers around if the previous instruction
5562 attaches a REG_EQUIV note to REG1.
5564 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5565 from the pseudo that originally shadowed an incoming argument
5566 to another register. Some uses of REG_EQUIV might rely on it
5567 being attached to REG1 rather than REG2.
5569 This section previously turned the REG_EQUIV into a REG_EQUAL
5570 note. We cannot do that because REG_EQUIV may provide an
5571 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5572 if (NONJUMP_INSN_P (prev
)
5573 && GET_CODE (PATTERN (prev
)) == SET
5574 && SET_DEST (PATTERN (prev
)) == SET_SRC (sets
[0].rtl
)
5575 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
5577 rtx dest
= SET_DEST (sets
[0].rtl
);
5578 rtx src
= SET_SRC (sets
[0].rtl
);
5581 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
5582 validate_change (insn
, &SET_DEST (sets
[0].rtl
), src
, 1);
5583 validate_change (insn
, &SET_SRC (sets
[0].rtl
), dest
, 1);
5584 apply_change_group ();
5586 /* If INSN has a REG_EQUAL note, and this note mentions
5587 REG0, then we must delete it, because the value in
5588 REG0 has changed. If the note's value is REG1, we must
5589 also delete it because that is now this insn's dest. */
5590 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5592 && (reg_mentioned_p (dest
, XEXP (note
, 0))
5593 || rtx_equal_p (src
, XEXP (note
, 0))))
5594 remove_note (insn
, note
);
5602 /* Remove from the hash table all expressions that reference memory. */
5605 invalidate_memory (void)
5608 struct table_elt
*p
, *next
;
5610 for (i
= 0; i
< HASH_SIZE
; i
++)
5611 for (p
= table
[i
]; p
; p
= next
)
5613 next
= p
->next_same_hash
;
5615 remove_from_table (p
, i
);
5619 /* Perform invalidation on the basis of everything about an insn
5620 except for invalidating the actual places that are SET in it.
5621 This includes the places CLOBBERed, and anything that might
5622 alias with something that is SET or CLOBBERed.
5624 X is the pattern of the insn. */
5627 invalidate_from_clobbers (rtx x
)
5629 if (GET_CODE (x
) == CLOBBER
)
5631 rtx ref
= XEXP (x
, 0);
5634 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
5636 invalidate (ref
, VOIDmode
);
5637 else if (GET_CODE (ref
) == STRICT_LOW_PART
5638 || GET_CODE (ref
) == ZERO_EXTRACT
)
5639 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
5642 else if (GET_CODE (x
) == PARALLEL
)
5645 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
5647 rtx y
= XVECEXP (x
, 0, i
);
5648 if (GET_CODE (y
) == CLOBBER
)
5650 rtx ref
= XEXP (y
, 0);
5651 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
5653 invalidate (ref
, VOIDmode
);
5654 else if (GET_CODE (ref
) == STRICT_LOW_PART
5655 || GET_CODE (ref
) == ZERO_EXTRACT
)
5656 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
5662 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5663 and replace any registers in them with either an equivalent constant
5664 or the canonical form of the register. If we are inside an address,
5665 only do this if the address remains valid.
5667 OBJECT is 0 except when within a MEM in which case it is the MEM.
5669 Return the replacement for X. */
5672 cse_process_notes_1 (rtx x
, rtx object
, bool *changed
)
5674 enum rtx_code code
= GET_CODE (x
);
5675 const char *fmt
= GET_RTX_FORMAT (code
);
5693 validate_change (x
, &XEXP (x
, 0),
5694 cse_process_notes (XEXP (x
, 0), x
, changed
), 0);
5699 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
5700 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
, changed
);
5702 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
, changed
);
5709 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
5710 /* We don't substitute VOIDmode constants into these rtx,
5711 since they would impede folding. */
5712 if (GET_MODE (new_rtx
) != VOIDmode
)
5713 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
5718 i
= REG_QTY (REGNO (x
));
5720 /* Return a constant or a constant register. */
5721 if (REGNO_QTY_VALID_P (REGNO (x
)))
5723 struct qty_table_elem
*ent
= &qty_table
[i
];
5725 if (ent
->const_rtx
!= NULL_RTX
5726 && (CONSTANT_P (ent
->const_rtx
)
5727 || REG_P (ent
->const_rtx
)))
5729 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
5731 return copy_rtx (new_rtx
);
5735 /* Otherwise, canonicalize this register. */
5736 return canon_reg (x
, NULL_RTX
);
5742 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
5744 validate_change (object
, &XEXP (x
, i
),
5745 cse_process_notes (XEXP (x
, i
), object
, changed
), 0);
5751 cse_process_notes (rtx x
, rtx object
, bool *changed
)
5753 rtx new_rtx
= cse_process_notes_1 (x
, object
, changed
);
5760 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
5762 DATA is a pointer to a struct cse_basic_block_data, that is used to
5764 It is filled with a queue of basic blocks, starting with FIRST_BB
5765 and following a trace through the CFG.
5767 If all paths starting at FIRST_BB have been followed, or no new path
5768 starting at FIRST_BB can be constructed, this function returns FALSE.
5769 Otherwise, DATA->path is filled and the function returns TRUE indicating
5770 that a path to follow was found.
5772 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
5773 block in the path will be FIRST_BB. */
5776 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
5783 SET_BIT (cse_visited_basic_blocks
, first_bb
->index
);
5785 /* See if there is a previous path. */
5786 path_size
= data
->path_size
;
5788 /* There is a previous path. Make sure it started with FIRST_BB. */
5790 gcc_assert (data
->path
[0].bb
== first_bb
);
5792 /* There was only one basic block in the last path. Clear the path and
5793 return, so that paths starting at another basic block can be tried. */
5800 /* If the path was empty from the beginning, construct a new path. */
5802 data
->path
[path_size
++].bb
= first_bb
;
5805 /* Otherwise, path_size must be equal to or greater than 2, because
5806 a previous path exists that is at least two basic blocks long.
5808 Update the previous branch path, if any. If the last branch was
5809 previously along the branch edge, take the fallthrough edge now. */
5810 while (path_size
>= 2)
5812 basic_block last_bb_in_path
, previous_bb_in_path
;
5816 last_bb_in_path
= data
->path
[path_size
].bb
;
5817 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
5819 /* If we previously followed a path along the branch edge, try
5820 the fallthru edge now. */
5821 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
5822 && any_condjump_p (BB_END (previous_bb_in_path
))
5823 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
5824 && e
== BRANCH_EDGE (previous_bb_in_path
))
5826 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
5827 if (bb
!= EXIT_BLOCK_PTR
5828 && single_pred_p (bb
)
5829 /* We used to assert here that we would only see blocks
5830 that we have not visited yet. But we may end up
5831 visiting basic blocks twice if the CFG has changed
5832 in this run of cse_main, because when the CFG changes
5833 the topological sort of the CFG also changes. A basic
5834 blocks that previously had more than two predecessors
5835 may now have a single predecessor, and become part of
5836 a path that starts at another basic block.
5838 We still want to visit each basic block only once, so
5839 halt the path here if we have already visited BB. */
5840 && !TEST_BIT (cse_visited_basic_blocks
, bb
->index
))
5842 SET_BIT (cse_visited_basic_blocks
, bb
->index
);
5843 data
->path
[path_size
++].bb
= bb
;
5848 data
->path
[path_size
].bb
= NULL
;
5851 /* If only one block remains in the path, bail. */
5859 /* Extend the path if possible. */
5862 bb
= data
->path
[path_size
- 1].bb
;
5863 while (bb
&& path_size
< PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
))
5865 if (single_succ_p (bb
))
5866 e
= single_succ_edge (bb
);
5867 else if (EDGE_COUNT (bb
->succs
) == 2
5868 && any_condjump_p (BB_END (bb
)))
5870 /* First try to follow the branch. If that doesn't lead
5871 to a useful path, follow the fallthru edge. */
5872 e
= BRANCH_EDGE (bb
);
5873 if (!single_pred_p (e
->dest
))
5874 e
= FALLTHRU_EDGE (bb
);
5879 if (e
&& e
->dest
!= EXIT_BLOCK_PTR
5880 && single_pred_p (e
->dest
)
5881 /* Avoid visiting basic blocks twice. The large comment
5882 above explains why this can happen. */
5883 && !TEST_BIT (cse_visited_basic_blocks
, e
->dest
->index
))
5885 basic_block bb2
= e
->dest
;
5886 SET_BIT (cse_visited_basic_blocks
, bb2
->index
);
5887 data
->path
[path_size
++].bb
= bb2
;
5896 data
->path_size
= path_size
;
5897 return path_size
!= 0;
5900 /* Dump the path in DATA to file F. NSETS is the number of sets
5904 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
5908 fprintf (f
, ";; Following path with %d sets: ", nsets
);
5909 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
5910 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
5911 fputc ('\n', dump_file
);
5916 /* Return true if BB has exception handling successor edges. */
5919 have_eh_succ_edges (basic_block bb
)
5924 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5925 if (e
->flags
& EDGE_EH
)
5932 /* Scan to the end of the path described by DATA. Return an estimate of
5933 the total number of SETs of all insns in the path. */
5936 cse_prescan_path (struct cse_basic_block_data
*data
)
5939 int path_size
= data
->path_size
;
5942 /* Scan to end of each basic block in the path. */
5943 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
5948 bb
= data
->path
[path_entry
].bb
;
5950 FOR_BB_INSNS (bb
, insn
)
5955 /* A PARALLEL can have lots of SETs in it,
5956 especially if it is really an ASM_OPERANDS. */
5957 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
5958 nsets
+= XVECLEN (PATTERN (insn
), 0);
5964 data
->nsets
= nsets
;
5967 /* Process a single extended basic block described by EBB_DATA. */
5970 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
5972 int path_size
= ebb_data
->path_size
;
5976 /* Allocate the space needed by qty_table. */
5977 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
5980 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
5981 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
5982 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
5987 bb
= ebb_data
->path
[path_entry
].bb
;
5989 /* Invalidate recorded information for eh regs if there is an EH
5990 edge pointing to that bb. */
5991 if (bb_has_eh_pred (bb
))
5995 for (def_rec
= df_get_artificial_defs (bb
->index
); *def_rec
; def_rec
++)
5997 df_ref def
= *def_rec
;
5998 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
5999 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6003 FOR_BB_INSNS (bb
, insn
)
6005 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6006 /* If we have processed 1,000 insns, flush the hash table to
6007 avoid extreme quadratic behavior. We must not include NOTEs
6008 in the count since there may be more of them when generating
6009 debugging information. If we clear the table at different
6010 times, code generated with -g -O might be different than code
6011 generated with -O but not -g.
6013 FIXME: This is a real kludge and needs to be done some other
6016 && num_insns
++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS
))
6018 flush_hash_table ();
6024 /* Process notes first so we have all notes in canonical forms
6025 when looking for duplicate operations. */
6026 if (REG_NOTES (insn
))
6028 bool changed
= false;
6029 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
),
6030 NULL_RTX
, &changed
);
6032 df_notes_rescan (insn
);
6037 /* If we haven't already found an insn where we added a LABEL_REF,
6039 if (INSN_P (insn
) && !recorded_label_ref
6040 && for_each_rtx (&PATTERN (insn
), check_for_label_ref
,
6042 recorded_label_ref
= true;
6045 /* If the previous insn set CC0 and this insn no longer
6046 references CC0, delete the previous insn. Here we use
6047 fact that nothing expects CC0 to be valid over an insn,
6048 which is true until the final pass. */
6052 prev_insn
= PREV_INSN (insn
);
6053 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6054 && (tem
= single_set (prev_insn
)) != 0
6055 && SET_DEST (tem
) == cc0_rtx
6056 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6057 delete_insn (prev_insn
);
6060 /* If this insn is not the last insn in the basic block,
6061 it will be PREV_INSN(insn) in the next iteration. If
6062 we recorded any CC0-related information for this insn,
6064 if (insn
!= BB_END (bb
))
6066 prev_insn_cc0
= this_insn_cc0
;
6067 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6073 /* With non-call exceptions, we are not always able to update
6074 the CFG properly inside cse_insn. So clean up possibly
6075 redundant EH edges here. */
6076 if (flag_non_call_exceptions
&& have_eh_succ_edges (bb
))
6077 cse_cfg_altered
|= purge_dead_edges (bb
);
6079 /* If we changed a conditional jump, we may have terminated
6080 the path we are following. Check that by verifying that
6081 the edge we would take still exists. If the edge does
6082 not exist anymore, purge the remainder of the path.
6083 Note that this will cause us to return to the caller. */
6084 if (path_entry
< path_size
- 1)
6086 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6087 if (!find_edge (bb
, next_bb
))
6093 /* If we truncate the path, we must also reset the
6094 visited bit on the remaining blocks in the path,
6095 or we will never visit them at all. */
6096 RESET_BIT (cse_visited_basic_blocks
,
6097 ebb_data
->path
[path_size
].bb
->index
);
6098 ebb_data
->path
[path_size
].bb
= NULL
;
6100 while (path_size
- 1 != path_entry
);
6101 ebb_data
->path_size
= path_size
;
6105 /* If this is a conditional jump insn, record any known
6106 equivalences due to the condition being tested. */
6108 if (path_entry
< path_size
- 1
6110 && single_set (insn
)
6111 && any_condjump_p (insn
))
6113 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6114 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6115 record_jump_equiv (insn
, taken
);
6119 /* Clear the CC0-tracking related insns, they can't provide
6120 useful information across basic block boundaries. */
6125 gcc_assert (next_qty
<= max_qty
);
6131 /* Perform cse on the instructions of a function.
6132 F is the first instruction.
6133 NREGS is one plus the highest pseudo-reg number used in the instruction.
6135 Return 2 if jump optimizations should be redone due to simplifications
6136 in conditional jump instructions.
6137 Return 1 if the CFG should be cleaned up because it has been modified.
6138 Return 0 otherwise. */
6141 cse_main (rtx f ATTRIBUTE_UNUSED
, int nregs
)
6143 struct cse_basic_block_data ebb_data
;
6145 int *rc_order
= XNEWVEC (int, last_basic_block
);
6148 df_set_flags (DF_LR_RUN_DCE
);
6150 df_set_flags (DF_DEFER_INSN_RESCAN
);
6152 reg_scan (get_insns (), max_reg_num ());
6153 init_cse_reg_info (nregs
);
6155 ebb_data
.path
= XNEWVEC (struct branch_path
,
6156 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
));
6158 cse_cfg_altered
= false;
6159 cse_jumps_altered
= false;
6160 recorded_label_ref
= false;
6161 constant_pool_entries_cost
= 0;
6162 constant_pool_entries_regcost
= 0;
6163 ebb_data
.path_size
= 0;
6165 rtl_hooks
= cse_rtl_hooks
;
6168 init_alias_analysis ();
6170 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6172 /* Set up the table of already visited basic blocks. */
6173 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block
);
6174 sbitmap_zero (cse_visited_basic_blocks
);
6176 /* Loop over basic blocks in reverse completion order (RPO),
6177 excluding the ENTRY and EXIT blocks. */
6178 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6180 while (i
< n_blocks
)
6182 /* Find the first block in the RPO queue that we have not yet
6183 processed before. */
6186 bb
= BASIC_BLOCK (rc_order
[i
++]);
6188 while (TEST_BIT (cse_visited_basic_blocks
, bb
->index
)
6191 /* Find all paths starting with BB, and process them. */
6192 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6194 /* Pre-scan the path. */
6195 cse_prescan_path (&ebb_data
);
6197 /* If this basic block has no sets, skip it. */
6198 if (ebb_data
.nsets
== 0)
6201 /* Get a reasonable estimate for the maximum number of qty's
6202 needed for this path. For this, we take the number of sets
6203 and multiply that by MAX_RECOG_OPERANDS. */
6204 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6206 /* Dump the path we're about to process. */
6208 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6210 cse_extended_basic_block (&ebb_data
);
6215 end_alias_analysis ();
6216 free (reg_eqv_table
);
6217 free (ebb_data
.path
);
6218 sbitmap_free (cse_visited_basic_blocks
);
6220 rtl_hooks
= general_rtl_hooks
;
6222 if (cse_jumps_altered
|| recorded_label_ref
)
6224 else if (cse_cfg_altered
)
6230 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6231 which there isn't a REG_LABEL_OPERAND note.
6232 Return one if so. DATA is the insn. */
6235 check_for_label_ref (rtx
*rtl
, void *data
)
6237 rtx insn
= (rtx
) data
;
6239 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6240 note for it, we must rerun jump since it needs to place the note. If
6241 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6242 don't do this since no REG_LABEL_OPERAND will be added. */
6243 return (GET_CODE (*rtl
) == LABEL_REF
6244 && ! LABEL_REF_NONLOCAL_P (*rtl
)
6246 || !label_is_jump_target_p (XEXP (*rtl
, 0), insn
))
6247 && LABEL_P (XEXP (*rtl
, 0))
6248 && INSN_UID (XEXP (*rtl
, 0)) != 0
6249 && ! find_reg_note (insn
, REG_LABEL_OPERAND
, XEXP (*rtl
, 0)));
6252 /* Count the number of times registers are used (not set) in X.
6253 COUNTS is an array in which we accumulate the count, INCR is how much
6254 we count each register usage.
6256 Don't count a usage of DEST, which is the SET_DEST of a SET which
6257 contains X in its SET_SRC. This is because such a SET does not
6258 modify the liveness of DEST.
6259 DEST is set to pc_rtx for a trapping insn, which means that we must count
6260 uses of a SET_DEST regardless because the insn can't be deleted here. */
6263 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6273 switch (code
= GET_CODE (x
))
6277 counts
[REGNO (x
)] += incr
;
6292 /* If we are clobbering a MEM, mark any registers inside the address
6294 if (MEM_P (XEXP (x
, 0)))
6295 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6299 /* Unless we are setting a REG, count everything in SET_DEST. */
6300 if (!REG_P (SET_DEST (x
)))
6301 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6302 count_reg_usage (SET_SRC (x
), counts
,
6303 dest
? dest
: SET_DEST (x
),
6310 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
6311 this fact by setting DEST to pc_rtx. */
6312 if (flag_non_call_exceptions
&& may_trap_p (PATTERN (x
)))
6314 if (code
== CALL_INSN
)
6315 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6316 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6318 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6321 note
= find_reg_equal_equiv_note (x
);
6324 rtx eqv
= XEXP (note
, 0);
6326 if (GET_CODE (eqv
) == EXPR_LIST
)
6327 /* This REG_EQUAL note describes the result of a function call.
6328 Process all the arguments. */
6331 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6332 eqv
= XEXP (eqv
, 1);
6334 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6336 count_reg_usage (eqv
, counts
, dest
, incr
);
6341 if (REG_NOTE_KIND (x
) == REG_EQUAL
6342 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6343 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6344 involving registers in the address. */
6345 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6346 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6348 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6352 /* If the asm is volatile, then this insn cannot be deleted,
6353 and so the inputs *must* be live. */
6354 if (MEM_VOLATILE_P (x
))
6356 /* Iterate over just the inputs, not the constraints as well. */
6357 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6358 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6368 fmt
= GET_RTX_FORMAT (code
);
6369 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6372 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6373 else if (fmt
[i
] == 'E')
6374 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6375 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6379 /* Return true if set is live. */
6381 set_live_p (rtx set
, rtx insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6388 if (set_noop_p (set
))
6392 else if (GET_CODE (SET_DEST (set
)) == CC0
6393 && !side_effects_p (SET_SRC (set
))
6394 && ((tem
= next_nonnote_insn (insn
)) == 0
6396 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6399 else if (!REG_P (SET_DEST (set
))
6400 || REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
6401 || counts
[REGNO (SET_DEST (set
))] != 0
6402 || side_effects_p (SET_SRC (set
)))
6407 /* Return true if insn is live. */
6410 insn_live_p (rtx insn
, int *counts
)
6413 if (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn
)))
6415 else if (GET_CODE (PATTERN (insn
)) == SET
)
6416 return set_live_p (PATTERN (insn
), insn
, counts
);
6417 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6419 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6421 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6423 if (GET_CODE (elt
) == SET
)
6425 if (set_live_p (elt
, insn
, counts
))
6428 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6437 /* Scan all the insns and delete any that are dead; i.e., they store a register
6438 that is never used or they copy a register to itself.
6440 This is used to remove insns made obviously dead by cse, loop or other
6441 optimizations. It improves the heuristics in loop since it won't try to
6442 move dead invariants out of loops or make givs for dead quantities. The
6443 remaining passes of the compilation are also sped up. */
6446 delete_trivially_dead_insns (rtx insns
, int nreg
)
6452 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
6453 /* First count the number of times each register is used. */
6454 counts
= XCNEWVEC (int, nreg
);
6455 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6457 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
6459 /* Go from the last insn to the first and delete insns that only set unused
6460 registers or copy a register to itself. As we delete an insn, remove
6461 usage counts for registers it uses.
6463 The first jump optimization pass may leave a real insn as the last
6464 insn in the function. We must not skip that insn or we may end
6465 up deleting code that is not really dead. */
6466 for (insn
= get_last_insn (); insn
; insn
= prev
)
6470 prev
= PREV_INSN (insn
);
6474 live_insn
= insn_live_p (insn
, counts
);
6476 /* If this is a dead insn, delete it and show registers in it aren't
6479 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
6481 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
6482 delete_insn_and_edges (insn
);
6487 if (dump_file
&& ndead
)
6488 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
6492 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
6496 /* This function is called via for_each_rtx. The argument, NEWREG, is
6497 a condition code register with the desired mode. If we are looking
6498 at the same register in a different mode, replace it with
6502 cse_change_cc_mode (rtx
*loc
, void *data
)
6504 struct change_cc_mode_args
* args
= (struct change_cc_mode_args
*)data
;
6508 && REGNO (*loc
) == REGNO (args
->newreg
)
6509 && GET_MODE (*loc
) != GET_MODE (args
->newreg
))
6511 validate_change (args
->insn
, loc
, args
->newreg
, 1);
6518 /* Change the mode of any reference to the register REGNO (NEWREG) to
6519 GET_MODE (NEWREG) in INSN. */
6522 cse_change_cc_mode_insn (rtx insn
, rtx newreg
)
6524 struct change_cc_mode_args args
;
6531 args
.newreg
= newreg
;
6533 for_each_rtx (&PATTERN (insn
), cse_change_cc_mode
, &args
);
6534 for_each_rtx (®_NOTES (insn
), cse_change_cc_mode
, &args
);
6536 /* If the following assertion was triggered, there is most probably
6537 something wrong with the cc_modes_compatible back end function.
6538 CC modes only can be considered compatible if the insn - with the mode
6539 replaced by any of the compatible modes - can still be recognized. */
6540 success
= apply_change_group ();
6541 gcc_assert (success
);
6544 /* Change the mode of any reference to the register REGNO (NEWREG) to
6545 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
6546 any instruction which modifies NEWREG. */
6549 cse_change_cc_mode_insns (rtx start
, rtx end
, rtx newreg
)
6553 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
6555 if (! INSN_P (insn
))
6558 if (reg_set_p (newreg
, insn
))
6561 cse_change_cc_mode_insn (insn
, newreg
);
6565 /* BB is a basic block which finishes with CC_REG as a condition code
6566 register which is set to CC_SRC. Look through the successors of BB
6567 to find blocks which have a single predecessor (i.e., this one),
6568 and look through those blocks for an assignment to CC_REG which is
6569 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
6570 permitted to change the mode of CC_SRC to a compatible mode. This
6571 returns VOIDmode if no equivalent assignments were found.
6572 Otherwise it returns the mode which CC_SRC should wind up with.
6573 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
6574 but is passed unmodified down to recursive calls in order to prevent
6577 The main complexity in this function is handling the mode issues.
6578 We may have more than one duplicate which we can eliminate, and we
6579 try to find a mode which will work for multiple duplicates. */
6581 static enum machine_mode
6582 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
6583 bool can_change_mode
)
6586 enum machine_mode mode
;
6587 unsigned int insn_count
;
6590 enum machine_mode modes
[2];
6596 /* We expect to have two successors. Look at both before picking
6597 the final mode for the comparison. If we have more successors
6598 (i.e., some sort of table jump, although that seems unlikely),
6599 then we require all beyond the first two to use the same
6602 found_equiv
= false;
6603 mode
= GET_MODE (cc_src
);
6605 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6610 if (e
->flags
& EDGE_COMPLEX
)
6613 if (EDGE_COUNT (e
->dest
->preds
) != 1
6614 || e
->dest
== EXIT_BLOCK_PTR
6615 /* Avoid endless recursion on unreachable blocks. */
6616 || e
->dest
== orig_bb
)
6619 end
= NEXT_INSN (BB_END (e
->dest
));
6620 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
6624 if (! INSN_P (insn
))
6627 /* If CC_SRC is modified, we have to stop looking for
6628 something which uses it. */
6629 if (modified_in_p (cc_src
, insn
))
6632 /* Check whether INSN sets CC_REG to CC_SRC. */
6633 set
= single_set (insn
);
6635 && REG_P (SET_DEST (set
))
6636 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
6639 enum machine_mode set_mode
;
6640 enum machine_mode comp_mode
;
6643 set_mode
= GET_MODE (SET_SRC (set
));
6644 comp_mode
= set_mode
;
6645 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
6647 else if (GET_CODE (cc_src
) == COMPARE
6648 && GET_CODE (SET_SRC (set
)) == COMPARE
6650 && rtx_equal_p (XEXP (cc_src
, 0),
6651 XEXP (SET_SRC (set
), 0))
6652 && rtx_equal_p (XEXP (cc_src
, 1),
6653 XEXP (SET_SRC (set
), 1)))
6656 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
6657 if (comp_mode
!= VOIDmode
6658 && (can_change_mode
|| comp_mode
== mode
))
6665 if (insn_count
< ARRAY_SIZE (insns
))
6667 insns
[insn_count
] = insn
;
6668 modes
[insn_count
] = set_mode
;
6669 last_insns
[insn_count
] = end
;
6672 if (mode
!= comp_mode
)
6674 gcc_assert (can_change_mode
);
6677 /* The modified insn will be re-recognized later. */
6678 PUT_MODE (cc_src
, mode
);
6683 if (set_mode
!= mode
)
6685 /* We found a matching expression in the
6686 wrong mode, but we don't have room to
6687 store it in the array. Punt. This case
6691 /* INSN sets CC_REG to a value equal to CC_SRC
6692 with the right mode. We can simply delete
6697 /* We found an instruction to delete. Keep looking,
6698 in the hopes of finding a three-way jump. */
6702 /* We found an instruction which sets the condition
6703 code, so don't look any farther. */
6707 /* If INSN sets CC_REG in some other way, don't look any
6709 if (reg_set_p (cc_reg
, insn
))
6713 /* If we fell off the bottom of the block, we can keep looking
6714 through successors. We pass CAN_CHANGE_MODE as false because
6715 we aren't prepared to handle compatibility between the
6716 further blocks and this block. */
6719 enum machine_mode submode
;
6721 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
6722 if (submode
!= VOIDmode
)
6724 gcc_assert (submode
== mode
);
6726 can_change_mode
= false;
6734 /* Now INSN_COUNT is the number of instructions we found which set
6735 CC_REG to a value equivalent to CC_SRC. The instructions are in
6736 INSNS. The modes used by those instructions are in MODES. */
6739 for (i
= 0; i
< insn_count
; ++i
)
6741 if (modes
[i
] != mode
)
6743 /* We need to change the mode of CC_REG in INSNS[i] and
6744 subsequent instructions. */
6747 if (GET_MODE (cc_reg
) == mode
)
6750 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
6752 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
6756 delete_insn_and_edges (insns
[i
]);
6762 /* If we have a fixed condition code register (or two), walk through
6763 the instructions and try to eliminate duplicate assignments. */
6766 cse_condition_code_reg (void)
6768 unsigned int cc_regno_1
;
6769 unsigned int cc_regno_2
;
6774 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
6777 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
6778 if (cc_regno_2
!= INVALID_REGNUM
)
6779 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
6781 cc_reg_2
= NULL_RTX
;
6790 enum machine_mode mode
;
6791 enum machine_mode orig_mode
;
6793 /* Look for blocks which end with a conditional jump based on a
6794 condition code register. Then look for the instruction which
6795 sets the condition code register. Then look through the
6796 successor blocks for instructions which set the condition
6797 code register to the same value. There are other possible
6798 uses of the condition code register, but these are by far the
6799 most common and the ones which we are most likely to be able
6802 last_insn
= BB_END (bb
);
6803 if (!JUMP_P (last_insn
))
6806 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
6808 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
6813 cc_src_insn
= NULL_RTX
;
6815 for (insn
= PREV_INSN (last_insn
);
6816 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
6817 insn
= PREV_INSN (insn
))
6821 if (! INSN_P (insn
))
6823 set
= single_set (insn
);
6825 && REG_P (SET_DEST (set
))
6826 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
6829 cc_src
= SET_SRC (set
);
6832 else if (reg_set_p (cc_reg
, insn
))
6839 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
6842 /* Now CC_REG is a condition code register used for a
6843 conditional jump at the end of the block, and CC_SRC, in
6844 CC_SRC_INSN, is the value to which that condition code
6845 register is set, and CC_SRC is still meaningful at the end of
6848 orig_mode
= GET_MODE (cc_src
);
6849 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
6850 if (mode
!= VOIDmode
)
6852 gcc_assert (mode
== GET_MODE (cc_src
));
6853 if (mode
!= orig_mode
)
6855 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
6857 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
6859 /* Do the same in the following insns that use the
6860 current value of CC_REG within BB. */
6861 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
6862 NEXT_INSN (last_insn
),
6870 /* Perform common subexpression elimination. Nonzero value from
6871 `cse_main' means that jumps were simplified and some code may now
6872 be unreachable, so do jump optimization again. */
6874 gate_handle_cse (void)
6876 return optimize
> 0;
6880 rest_of_handle_cse (void)
6885 dump_flow_info (dump_file
, dump_flags
);
6887 tem
= cse_main (get_insns (), max_reg_num ());
6889 /* If we are not running more CSE passes, then we are no longer
6890 expecting CSE to be run. But always rerun it in a cheap mode. */
6891 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
6895 timevar_push (TV_JUMP
);
6896 rebuild_jump_labels (get_insns ());
6898 timevar_pop (TV_JUMP
);
6900 else if (tem
== 1 || optimize
> 1)
6906 struct rtl_opt_pass pass_cse
=
6911 gate_handle_cse
, /* gate */
6912 rest_of_handle_cse
, /* execute */
6915 0, /* static_pass_number */
6917 0, /* properties_required */
6918 0, /* properties_provided */
6919 0, /* properties_destroyed */
6920 0, /* todo_flags_start */
6921 TODO_df_finish
| TODO_verify_rtl_sharing
|
6924 TODO_verify_flow
, /* todo_flags_finish */
6930 gate_handle_cse2 (void)
6932 return optimize
> 0 && flag_rerun_cse_after_loop
;
6935 /* Run second CSE pass after loop optimizations. */
6937 rest_of_handle_cse2 (void)
6942 dump_flow_info (dump_file
, dump_flags
);
6944 tem
= cse_main (get_insns (), max_reg_num ());
6946 /* Run a pass to eliminate duplicated assignments to condition code
6947 registers. We have to run this after bypass_jumps, because it
6948 makes it harder for that pass to determine whether a jump can be
6950 cse_condition_code_reg ();
6952 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6956 timevar_push (TV_JUMP
);
6957 rebuild_jump_labels (get_insns ());
6959 timevar_pop (TV_JUMP
);
6964 cse_not_expected
= 1;
6969 struct rtl_opt_pass pass_cse2
=
6974 gate_handle_cse2
, /* gate */
6975 rest_of_handle_cse2
, /* execute */
6978 0, /* static_pass_number */
6979 TV_CSE2
, /* tv_id */
6980 0, /* properties_required */
6981 0, /* properties_provided */
6982 0, /* properties_destroyed */
6983 0, /* todo_flags_start */
6984 TODO_df_finish
| TODO_verify_rtl_sharing
|
6987 TODO_verify_flow
/* todo_flags_finish */