1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "basic-block.h"
34 #include "insn-config.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
50 /* The basic idea of common subexpression elimination is to go
51 through the code, keeping a record of expressions that would
52 have the same value at the current scan point, and replacing
53 expressions encountered with the cheapest equivalent expression.
55 It is too complicated to keep track of the different possibilities
56 when control paths merge in this code; so, at each label, we forget all
57 that is known and start fresh. This can be described as processing each
58 extended basic block separately. We have a separate pass to perform
61 Note CSE can turn a conditional or computed jump into a nop or
62 an unconditional jump. When this occurs we arrange to run the jump
63 optimizer after CSE to delete the unreachable code.
65 We use two data structures to record the equivalent expressions:
66 a hash table for most expressions, and a vector of "quantity
67 numbers" to record equivalent (pseudo) registers.
69 The use of the special data structure for registers is desirable
70 because it is faster. It is possible because registers references
71 contain a fairly small number, the register number, taken from
72 a contiguously allocated series, and two register references are
73 identical if they have the same number. General expressions
74 do not have any such thing, so the only way to retrieve the
75 information recorded on an expression other than a register
76 is to keep it in a hash table.
78 Registers and "quantity numbers":
80 At the start of each basic block, all of the (hardware and pseudo)
81 registers used in the function are given distinct quantity
82 numbers to indicate their contents. During scan, when the code
83 copies one register into another, we copy the quantity number.
84 When a register is loaded in any other way, we allocate a new
85 quantity number to describe the value generated by this operation.
86 `REG_QTY (N)' records what quantity register N is currently thought
89 All real quantity numbers are greater than or equal to zero.
90 If register N has not been assigned a quantity, `REG_QTY (N)' will
91 equal -N - 1, which is always negative.
93 Quantity numbers below zero do not exist and none of the `qty_table'
94 entries should be referenced with a negative index.
96 We also maintain a bidirectional chain of registers for each
97 quantity number. The `qty_table` members `first_reg' and `last_reg',
98 and `reg_eqv_table' members `next' and `prev' hold these chains.
100 The first register in a chain is the one whose lifespan is least local.
101 Among equals, it is the one that was seen first.
102 We replace any equivalent register with that one.
104 If two registers have the same quantity number, it must be true that
105 REG expressions with qty_table `mode' must be in the hash table for both
106 registers and must be in the same class.
108 The converse is not true. Since hard registers may be referenced in
109 any mode, two REG expressions might be equivalent in the hash table
110 but not have the same quantity number if the quantity number of one
111 of the registers is not the same mode as those expressions.
113 Constants and quantity numbers
115 When a quantity has a known constant value, that value is stored
116 in the appropriate qty_table `const_rtx'. This is in addition to
117 putting the constant in the hash table as is usual for non-regs.
119 Whether a reg or a constant is preferred is determined by the configuration
120 macro CONST_COSTS and will often depend on the constant value. In any
121 event, expressions containing constants can be simplified, by fold_rtx.
123 When a quantity has a known nearly constant value (such as an address
124 of a stack slot), that value is stored in the appropriate qty_table
127 Integer constants don't have a machine mode. However, cse
128 determines the intended machine mode from the destination
129 of the instruction that moves the constant. The machine mode
130 is recorded in the hash table along with the actual RTL
131 constant expression so that different modes are kept separate.
135 To record known equivalences among expressions in general
136 we use a hash table called `table'. It has a fixed number of buckets
137 that contain chains of `struct table_elt' elements for expressions.
138 These chains connect the elements whose expressions have the same
141 Other chains through the same elements connect the elements which
142 currently have equivalent values.
144 Register references in an expression are canonicalized before hashing
145 the expression. This is done using `reg_qty' and qty_table `first_reg'.
146 The hash code of a register reference is computed using the quantity
147 number, not the register number.
149 When the value of an expression changes, it is necessary to remove from the
150 hash table not just that expression but all expressions whose values
151 could be different as a result.
153 1. If the value changing is in memory, except in special cases
154 ANYTHING referring to memory could be changed. That is because
155 nobody knows where a pointer does not point.
156 The function `invalidate_memory' removes what is necessary.
158 The special cases are when the address is constant or is
159 a constant plus a fixed register such as the frame pointer
160 or a static chain pointer. When such addresses are stored in,
161 we can tell exactly which other such addresses must be invalidated
162 due to overlap. `invalidate' does this.
163 All expressions that refer to non-constant
164 memory addresses are also invalidated. `invalidate_memory' does this.
166 2. If the value changing is a register, all expressions
167 containing references to that register, and only those,
170 Because searching the entire hash table for expressions that contain
171 a register is very slow, we try to figure out when it isn't necessary.
172 Precisely, this is necessary only when expressions have been
173 entered in the hash table using this register, and then the value has
174 changed, and then another expression wants to be added to refer to
175 the register's new value. This sequence of circumstances is rare
176 within any one basic block.
178 `REG_TICK' and `REG_IN_TABLE', accessors for members of
179 cse_reg_info, are used to detect this case. REG_TICK (i) is
180 incremented whenever a value is stored in register i.
181 REG_IN_TABLE (i) holds -1 if no references to register i have been
182 entered in the table; otherwise, it contains the value REG_TICK (i)
183 had when the references were entered. If we want to enter a
184 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
185 remove old references. Until we want to enter a new entry, the
186 mere fact that the two vectors don't match makes the entries be
187 ignored if anyone tries to match them.
189 Registers themselves are entered in the hash table as well as in
190 the equivalent-register chains. However, `REG_TICK' and
191 `REG_IN_TABLE' do not apply to expressions which are simple
192 register references. These expressions are removed from the table
193 immediately when they become invalid, and this can be done even if
194 we do not immediately search for all the expressions that refer to
197 A CLOBBER rtx in an instruction invalidates its operand for further
198 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
199 invalidates everything that resides in memory.
203 Constant expressions that differ only by an additive integer
204 are called related. When a constant expression is put in
205 the table, the related expression with no constant term
206 is also entered. These are made to point at each other
207 so that it is possible to find out if there exists any
208 register equivalent to an expression related to a given expression. */
210 /* Length of qty_table vector. We know in advance we will not need
211 a quantity number this big. */
215 /* Next quantity number to be allocated.
216 This is 1 + the largest number needed so far. */
220 /* Per-qty information tracking.
222 `first_reg' and `last_reg' track the head and tail of the
223 chain of registers which currently contain this quantity.
225 `mode' contains the machine mode of this quantity.
227 `const_rtx' holds the rtx of the constant value of this
228 quantity, if known. A summations of the frame/arg pointer
229 and a constant can also be entered here. When this holds
230 a known value, `const_insn' is the insn which stored the
233 `comparison_{code,const,qty}' are used to track when a
234 comparison between a quantity and some constant or register has
235 been passed. In such a case, we know the results of the comparison
236 in case we see it again. These members record a comparison that
237 is known to be true. `comparison_code' holds the rtx code of such
238 a comparison, else it is set to UNKNOWN and the other two
239 comparison members are undefined. `comparison_const' holds
240 the constant being compared against, or zero if the comparison
241 is not against a constant. `comparison_qty' holds the quantity
242 being compared against when the result is known. If the comparison
243 is not with a register, `comparison_qty' is -1. */
245 struct qty_table_elem
249 rtx comparison_const
;
251 unsigned int first_reg
, last_reg
;
252 /* The sizes of these fields should match the sizes of the
253 code and mode fields of struct rtx_def (see rtl.h). */
254 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
255 ENUM_BITFIELD(machine_mode
) mode
: 8;
258 /* The table of all qtys, indexed by qty number. */
259 static struct qty_table_elem
*qty_table
;
261 /* Structure used to pass arguments via for_each_rtx to function
262 cse_change_cc_mode. */
263 struct change_cc_mode_args
270 /* For machines that have a CC0, we do not record its value in the hash
271 table since its use is guaranteed to be the insn immediately following
272 its definition and any other insn is presumed to invalidate it.
274 Instead, we store below the current and last value assigned to CC0.
275 If it should happen to be a constant, it is stored in preference
276 to the actual assigned value. In case it is a constant, we store
277 the mode in which the constant should be interpreted. */
279 static rtx this_insn_cc0
, prev_insn_cc0
;
280 static enum machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
283 /* Insn being scanned. */
285 static rtx this_insn
;
286 static bool optimize_this_for_speed_p
;
288 /* Index by register number, gives the number of the next (or
289 previous) register in the chain of registers sharing the same
292 Or -1 if this register is at the end of the chain.
294 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
296 /* Per-register equivalence chain. */
302 /* The table of all register equivalence chains. */
303 static struct reg_eqv_elem
*reg_eqv_table
;
307 /* The timestamp at which this register is initialized. */
308 unsigned int timestamp
;
310 /* The quantity number of the register's current contents. */
313 /* The number of times the register has been altered in the current
317 /* The REG_TICK value at which rtx's containing this register are
318 valid in the hash table. If this does not equal the current
319 reg_tick value, such expressions existing in the hash table are
323 /* The SUBREG that was set when REG_TICK was last incremented. Set
324 to -1 if the last store was to the whole register, not a subreg. */
325 unsigned int subreg_ticked
;
328 /* A table of cse_reg_info indexed by register numbers. */
329 static struct cse_reg_info
*cse_reg_info_table
;
331 /* The size of the above table. */
332 static unsigned int cse_reg_info_table_size
;
334 /* The index of the first entry that has not been initialized. */
335 static unsigned int cse_reg_info_table_first_uninitialized
;
337 /* The timestamp at the beginning of the current run of
338 cse_extended_basic_block. We increment this variable at the beginning of
339 the current run of cse_extended_basic_block. The timestamp field of a
340 cse_reg_info entry matches the value of this variable if and only
341 if the entry has been initialized during the current run of
342 cse_extended_basic_block. */
343 static unsigned int cse_reg_info_timestamp
;
345 /* A HARD_REG_SET containing all the hard registers for which there is
346 currently a REG expression in the hash table. Note the difference
347 from the above variables, which indicate if the REG is mentioned in some
348 expression in the table. */
350 static HARD_REG_SET hard_regs_in_table
;
352 /* True if CSE has altered the CFG. */
353 static bool cse_cfg_altered
;
355 /* True if CSE has altered conditional jump insns in such a way
356 that jump optimization should be redone. */
357 static bool cse_jumps_altered
;
359 /* True if we put a LABEL_REF into the hash table for an INSN
360 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
361 to put in the note. */
362 static bool recorded_label_ref
;
364 /* canon_hash stores 1 in do_not_record
365 if it notices a reference to CC0, PC, or some other volatile
368 static int do_not_record
;
370 /* canon_hash stores 1 in hash_arg_in_memory
371 if it notices a reference to memory within the expression being hashed. */
373 static int hash_arg_in_memory
;
375 /* The hash table contains buckets which are chains of `struct table_elt's,
376 each recording one expression's information.
377 That expression is in the `exp' field.
379 The canon_exp field contains a canonical (from the point of view of
380 alias analysis) version of the `exp' field.
382 Those elements with the same hash code are chained in both directions
383 through the `next_same_hash' and `prev_same_hash' fields.
385 Each set of expressions with equivalent values
386 are on a two-way chain through the `next_same_value'
387 and `prev_same_value' fields, and all point with
388 the `first_same_value' field at the first element in
389 that chain. The chain is in order of increasing cost.
390 Each element's cost value is in its `cost' field.
392 The `in_memory' field is nonzero for elements that
393 involve any reference to memory. These elements are removed
394 whenever a write is done to an unidentified location in memory.
395 To be safe, we assume that a memory address is unidentified unless
396 the address is either a symbol constant or a constant plus
397 the frame pointer or argument pointer.
399 The `related_value' field is used to connect related expressions
400 (that differ by adding an integer).
401 The related expressions are chained in a circular fashion.
402 `related_value' is zero for expressions for which this
405 The `cost' field stores the cost of this element's expression.
406 The `regcost' field stores the value returned by approx_reg_cost for
407 this element's expression.
409 The `is_const' flag is set if the element is a constant (including
412 The `flag' field is used as a temporary during some search routines.
414 The `mode' field is usually the same as GET_MODE (`exp'), but
415 if `exp' is a CONST_INT and has no machine mode then the `mode'
416 field is the mode it was being used as. Each constant is
417 recorded separately for each mode it is used with. */
423 struct table_elt
*next_same_hash
;
424 struct table_elt
*prev_same_hash
;
425 struct table_elt
*next_same_value
;
426 struct table_elt
*prev_same_value
;
427 struct table_elt
*first_same_value
;
428 struct table_elt
*related_value
;
431 /* The size of this field should match the size
432 of the mode field of struct rtx_def (see rtl.h). */
433 ENUM_BITFIELD(machine_mode
) mode
: 8;
439 /* We don't want a lot of buckets, because we rarely have very many
440 things stored in the hash table, and a lot of buckets slows
441 down a lot of loops that happen frequently. */
443 #define HASH_SIZE (1 << HASH_SHIFT)
444 #define HASH_MASK (HASH_SIZE - 1)
446 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
447 register (hard registers may require `do_not_record' to be set). */
450 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
451 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
452 : canon_hash (X, M)) & HASH_MASK)
454 /* Like HASH, but without side-effects. */
455 #define SAFE_HASH(X, M) \
456 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
457 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
458 : safe_hash (X, M)) & HASH_MASK)
460 /* Determine whether register number N is considered a fixed register for the
461 purpose of approximating register costs.
462 It is desirable to replace other regs with fixed regs, to reduce need for
464 A reg wins if it is either the frame pointer or designated as fixed. */
465 #define FIXED_REGNO_P(N) \
466 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
467 || fixed_regs[N] || global_regs[N])
469 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
470 hard registers and pointers into the frame are the cheapest with a cost
471 of 0. Next come pseudos with a cost of one and other hard registers with
472 a cost of 2. Aside from these special cases, call `rtx_cost'. */
474 #define CHEAP_REGNO(N) \
475 (REGNO_PTR_FRAME_P(N) \
476 || (HARD_REGISTER_NUM_P (N) \
477 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
479 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
480 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
482 /* Get the number of times this register has been updated in this
485 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
487 /* Get the point at which REG was recorded in the table. */
489 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
491 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
494 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
496 /* Get the quantity number for REG. */
498 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
500 /* Determine if the quantity number for register X represents a valid index
501 into the qty_table. */
503 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
505 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
507 #define CHEAPER(X, Y) \
508 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
510 static struct table_elt
*table
[HASH_SIZE
];
512 /* Chain of `struct table_elt's made so far for this function
513 but currently removed from the table. */
515 static struct table_elt
*free_element_chain
;
517 /* Set to the cost of a constant pool reference if one was found for a
518 symbolic constant. If this was found, it means we should try to
519 convert constants into constant pool entries if they don't fit in
522 static int constant_pool_entries_cost
;
523 static int constant_pool_entries_regcost
;
525 /* Trace a patch through the CFG. */
529 /* The basic block for this path entry. */
533 /* This data describes a block that will be processed by
534 cse_extended_basic_block. */
536 struct cse_basic_block_data
538 /* Total number of SETs in block. */
540 /* Size of current branch path, if any. */
542 /* Current path, indicating which basic_blocks will be processed. */
543 struct branch_path
*path
;
547 /* Pointers to the live in/live out bitmaps for the boundaries of the
549 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
551 /* A simple bitmap to track which basic blocks have been visited
552 already as part of an already processed extended basic block. */
553 static sbitmap cse_visited_basic_blocks
;
555 static bool fixed_base_plus_p (rtx x
);
556 static int notreg_cost (rtx
, enum rtx_code
);
557 static int approx_reg_cost_1 (rtx
*, void *);
558 static int approx_reg_cost (rtx
);
559 static int preferable (int, int, int, int);
560 static void new_basic_block (void);
561 static void make_new_qty (unsigned int, enum machine_mode
);
562 static void make_regs_eqv (unsigned int, unsigned int);
563 static void delete_reg_equiv (unsigned int);
564 static int mention_regs (rtx
);
565 static int insert_regs (rtx
, struct table_elt
*, int);
566 static void remove_from_table (struct table_elt
*, unsigned);
567 static void remove_pseudo_from_table (rtx
, unsigned);
568 static struct table_elt
*lookup (rtx
, unsigned, enum machine_mode
);
569 static struct table_elt
*lookup_for_remove (rtx
, unsigned, enum machine_mode
);
570 static rtx
lookup_as_function (rtx
, enum rtx_code
);
571 static struct table_elt
*insert_with_costs (rtx
, struct table_elt
*, unsigned,
572 enum machine_mode
, int, int);
573 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
575 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
576 static void invalidate (rtx
, enum machine_mode
);
577 static bool cse_rtx_varies_p (const_rtx
, bool);
578 static void remove_invalid_refs (unsigned int);
579 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
581 static void rehash_using_reg (rtx
);
582 static void invalidate_memory (void);
583 static void invalidate_for_call (void);
584 static rtx
use_related_value (rtx
, struct table_elt
*);
586 static inline unsigned canon_hash (rtx
, enum machine_mode
);
587 static inline unsigned safe_hash (rtx
, enum machine_mode
);
588 static inline unsigned hash_rtx_string (const char *);
590 static rtx
canon_reg (rtx
, rtx
);
591 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
593 enum machine_mode
*);
594 static rtx
fold_rtx (rtx
, rtx
);
595 static rtx
equiv_constant (rtx
);
596 static void record_jump_equiv (rtx
, bool);
597 static void record_jump_cond (enum rtx_code
, enum machine_mode
, rtx
, rtx
,
599 static void cse_insn (rtx
);
600 static void cse_prescan_path (struct cse_basic_block_data
*);
601 static void invalidate_from_clobbers (rtx
);
602 static rtx
cse_process_notes (rtx
, rtx
, bool *);
603 static void cse_extended_basic_block (struct cse_basic_block_data
*);
604 static void count_reg_usage (rtx
, int *, rtx
, int);
605 static int check_for_label_ref (rtx
*, void *);
606 extern void dump_class (struct table_elt
*);
607 static void get_cse_reg_info_1 (unsigned int regno
);
608 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
609 static int check_dependence (rtx
*, void *);
611 static void flush_hash_table (void);
612 static bool insn_live_p (rtx
, int *);
613 static bool set_live_p (rtx
, rtx
, int *);
614 static int cse_change_cc_mode (rtx
*, void *);
615 static void cse_change_cc_mode_insn (rtx
, rtx
);
616 static void cse_change_cc_mode_insns (rtx
, rtx
, rtx
);
617 static enum machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
621 #undef RTL_HOOKS_GEN_LOWPART
622 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
624 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
626 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
627 virtual regs here because the simplify_*_operation routines are called
628 by integrate.c, which is called before virtual register instantiation. */
631 fixed_base_plus_p (rtx x
)
633 switch (GET_CODE (x
))
636 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
638 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
640 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
641 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
646 if (!CONST_INT_P (XEXP (x
, 1)))
648 return fixed_base_plus_p (XEXP (x
, 0));
655 /* Dump the expressions in the equivalence class indicated by CLASSP.
656 This function is used only for debugging. */
658 dump_class (struct table_elt
*classp
)
660 struct table_elt
*elt
;
662 fprintf (stderr
, "Equivalence chain for ");
663 print_rtl (stderr
, classp
->exp
);
664 fprintf (stderr
, ": \n");
666 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
668 print_rtl (stderr
, elt
->exp
);
669 fprintf (stderr
, "\n");
673 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
676 approx_reg_cost_1 (rtx
*xp
, void *data
)
679 int *cost_p
= (int *) data
;
683 unsigned int regno
= REGNO (x
);
685 if (! CHEAP_REGNO (regno
))
687 if (regno
< FIRST_PSEUDO_REGISTER
)
689 if (SMALL_REGISTER_CLASSES
)
701 /* Return an estimate of the cost of the registers used in an rtx.
702 This is mostly the number of different REG expressions in the rtx;
703 however for some exceptions like fixed registers we use a cost of
704 0. If any other hard register reference occurs, return MAX_COST. */
707 approx_reg_cost (rtx x
)
711 if (for_each_rtx (&x
, approx_reg_cost_1
, (void *) &cost
))
717 /* Return a negative value if an rtx A, whose costs are given by COST_A
718 and REGCOST_A, is more desirable than an rtx B.
719 Return a positive value if A is less desirable, or 0 if the two are
722 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
724 /* First, get rid of cases involving expressions that are entirely
726 if (cost_a
!= cost_b
)
728 if (cost_a
== MAX_COST
)
730 if (cost_b
== MAX_COST
)
734 /* Avoid extending lifetimes of hardregs. */
735 if (regcost_a
!= regcost_b
)
737 if (regcost_a
== MAX_COST
)
739 if (regcost_b
== MAX_COST
)
743 /* Normal operation costs take precedence. */
744 if (cost_a
!= cost_b
)
745 return cost_a
- cost_b
;
746 /* Only if these are identical consider effects on register pressure. */
747 if (regcost_a
!= regcost_b
)
748 return regcost_a
- regcost_b
;
752 /* Internal function, to compute cost when X is not a register; called
753 from COST macro to keep it simple. */
756 notreg_cost (rtx x
, enum rtx_code outer
)
758 return ((GET_CODE (x
) == SUBREG
759 && REG_P (SUBREG_REG (x
))
760 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
761 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_INT
762 && (GET_MODE_SIZE (GET_MODE (x
))
763 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
764 && subreg_lowpart_p (x
)
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x
)),
766 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))))
768 : rtx_cost (x
, outer
, optimize_this_for_speed_p
) * 2);
772 /* Initialize CSE_REG_INFO_TABLE. */
775 init_cse_reg_info (unsigned int nregs
)
777 /* Do we need to grow the table? */
778 if (nregs
> cse_reg_info_table_size
)
780 unsigned int new_size
;
782 if (cse_reg_info_table_size
< 2048)
784 /* Compute a new size that is a power of 2 and no smaller
785 than the large of NREGS and 64. */
786 new_size
= (cse_reg_info_table_size
787 ? cse_reg_info_table_size
: 64);
789 while (new_size
< nregs
)
794 /* If we need a big table, allocate just enough to hold
799 /* Reallocate the table with NEW_SIZE entries. */
800 if (cse_reg_info_table
)
801 free (cse_reg_info_table
);
802 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
803 cse_reg_info_table_size
= new_size
;
804 cse_reg_info_table_first_uninitialized
= 0;
807 /* Do we have all of the first NREGS entries initialized? */
808 if (cse_reg_info_table_first_uninitialized
< nregs
)
810 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
813 /* Put the old timestamp on newly allocated entries so that they
814 will all be considered out of date. We do not touch those
815 entries beyond the first NREGS entries to be nice to the
817 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
818 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
820 cse_reg_info_table_first_uninitialized
= nregs
;
824 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
827 get_cse_reg_info_1 (unsigned int regno
)
829 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
830 entry will be considered to have been initialized. */
831 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
833 /* Initialize the rest of the entry. */
834 cse_reg_info_table
[regno
].reg_tick
= 1;
835 cse_reg_info_table
[regno
].reg_in_table
= -1;
836 cse_reg_info_table
[regno
].subreg_ticked
= -1;
837 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
840 /* Find a cse_reg_info entry for REGNO. */
842 static inline struct cse_reg_info
*
843 get_cse_reg_info (unsigned int regno
)
845 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
847 /* If this entry has not been initialized, go ahead and initialize
849 if (p
->timestamp
!= cse_reg_info_timestamp
)
850 get_cse_reg_info_1 (regno
);
855 /* Clear the hash table and initialize each register with its own quantity,
856 for a new basic block. */
859 new_basic_block (void)
865 /* Invalidate cse_reg_info_table. */
866 cse_reg_info_timestamp
++;
868 /* Clear out hash table state for this pass. */
869 CLEAR_HARD_REG_SET (hard_regs_in_table
);
871 /* The per-quantity values used to be initialized here, but it is
872 much faster to initialize each as it is made in `make_new_qty'. */
874 for (i
= 0; i
< HASH_SIZE
; i
++)
876 struct table_elt
*first
;
881 struct table_elt
*last
= first
;
885 while (last
->next_same_hash
!= NULL
)
886 last
= last
->next_same_hash
;
888 /* Now relink this hash entire chain into
889 the free element list. */
891 last
->next_same_hash
= free_element_chain
;
892 free_element_chain
= first
;
901 /* Say that register REG contains a quantity in mode MODE not in any
902 register before and initialize that quantity. */
905 make_new_qty (unsigned int reg
, enum machine_mode mode
)
908 struct qty_table_elem
*ent
;
909 struct reg_eqv_elem
*eqv
;
911 gcc_assert (next_qty
< max_qty
);
913 q
= REG_QTY (reg
) = next_qty
++;
915 ent
->first_reg
= reg
;
918 ent
->const_rtx
= ent
->const_insn
= NULL_RTX
;
919 ent
->comparison_code
= UNKNOWN
;
921 eqv
= ®_eqv_table
[reg
];
922 eqv
->next
= eqv
->prev
= -1;
925 /* Make reg NEW equivalent to reg OLD.
926 OLD is not changing; NEW is. */
929 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
931 unsigned int lastr
, firstr
;
932 int q
= REG_QTY (old_reg
);
933 struct qty_table_elem
*ent
;
937 /* Nothing should become eqv until it has a "non-invalid" qty number. */
938 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
940 REG_QTY (new_reg
) = q
;
941 firstr
= ent
->first_reg
;
942 lastr
= ent
->last_reg
;
944 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
945 hard regs. Among pseudos, if NEW will live longer than any other reg
946 of the same qty, and that is beyond the current basic block,
947 make it the new canonical replacement for this qty. */
948 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
949 /* Certain fixed registers might be of the class NO_REGS. This means
950 that not only can they not be allocated by the compiler, but
951 they cannot be used in substitutions or canonicalizations
953 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
954 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
955 || (new_reg
>= FIRST_PSEUDO_REGISTER
956 && (firstr
< FIRST_PSEUDO_REGISTER
957 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
958 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
959 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
960 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
962 reg_eqv_table
[firstr
].prev
= new_reg
;
963 reg_eqv_table
[new_reg
].next
= firstr
;
964 reg_eqv_table
[new_reg
].prev
= -1;
965 ent
->first_reg
= new_reg
;
969 /* If NEW is a hard reg (known to be non-fixed), insert at end.
970 Otherwise, insert before any non-fixed hard regs that are at the
971 end. Registers of class NO_REGS cannot be used as an
972 equivalent for anything. */
973 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
974 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
975 && new_reg
>= FIRST_PSEUDO_REGISTER
)
976 lastr
= reg_eqv_table
[lastr
].prev
;
977 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
978 if (reg_eqv_table
[lastr
].next
>= 0)
979 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
981 qty_table
[q
].last_reg
= new_reg
;
982 reg_eqv_table
[lastr
].next
= new_reg
;
983 reg_eqv_table
[new_reg
].prev
= lastr
;
987 /* Remove REG from its equivalence class. */
990 delete_reg_equiv (unsigned int reg
)
992 struct qty_table_elem
*ent
;
993 int q
= REG_QTY (reg
);
996 /* If invalid, do nothing. */
997 if (! REGNO_QTY_VALID_P (reg
))
1000 ent
= &qty_table
[q
];
1002 p
= reg_eqv_table
[reg
].prev
;
1003 n
= reg_eqv_table
[reg
].next
;
1006 reg_eqv_table
[n
].prev
= p
;
1010 reg_eqv_table
[p
].next
= n
;
1014 REG_QTY (reg
) = -reg
- 1;
1017 /* Remove any invalid expressions from the hash table
1018 that refer to any of the registers contained in expression X.
1020 Make sure that newly inserted references to those registers
1021 as subexpressions will be considered valid.
1023 mention_regs is not called when a register itself
1024 is being stored in the table.
1026 Return 1 if we have done something that may have changed the hash code
1030 mention_regs (rtx x
)
1040 code
= GET_CODE (x
);
1043 unsigned int regno
= REGNO (x
);
1044 unsigned int endregno
= END_REGNO (x
);
1047 for (i
= regno
; i
< endregno
; i
++)
1049 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1050 remove_invalid_refs (i
);
1052 REG_IN_TABLE (i
) = REG_TICK (i
);
1053 SUBREG_TICKED (i
) = -1;
1059 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1060 pseudo if they don't use overlapping words. We handle only pseudos
1061 here for simplicity. */
1062 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1063 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1065 unsigned int i
= REGNO (SUBREG_REG (x
));
1067 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1069 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1070 the last store to this register really stored into this
1071 subreg, then remove the memory of this subreg.
1072 Otherwise, remove any memory of the entire register and
1073 all its subregs from the table. */
1074 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1075 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1076 remove_invalid_refs (i
);
1078 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1081 REG_IN_TABLE (i
) = REG_TICK (i
);
1082 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1086 /* If X is a comparison or a COMPARE and either operand is a register
1087 that does not have a quantity, give it one. This is so that a later
1088 call to record_jump_equiv won't cause X to be assigned a different
1089 hash code and not found in the table after that call.
1091 It is not necessary to do this here, since rehash_using_reg can
1092 fix up the table later, but doing this here eliminates the need to
1093 call that expensive function in the most common case where the only
1094 use of the register is in the comparison. */
1096 if (code
== COMPARE
|| COMPARISON_P (x
))
1098 if (REG_P (XEXP (x
, 0))
1099 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1100 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1102 rehash_using_reg (XEXP (x
, 0));
1106 if (REG_P (XEXP (x
, 1))
1107 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1108 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1110 rehash_using_reg (XEXP (x
, 1));
1115 fmt
= GET_RTX_FORMAT (code
);
1116 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1118 changed
|= mention_regs (XEXP (x
, i
));
1119 else if (fmt
[i
] == 'E')
1120 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1121 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1126 /* Update the register quantities for inserting X into the hash table
1127 with a value equivalent to CLASSP.
1128 (If the class does not contain a REG, it is irrelevant.)
1129 If MODIFIED is nonzero, X is a destination; it is being modified.
1130 Note that delete_reg_equiv should be called on a register
1131 before insert_regs is done on that register with MODIFIED != 0.
1133 Nonzero value means that elements of reg_qty have changed
1134 so X's hash code may be different. */
1137 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1141 unsigned int regno
= REGNO (x
);
1144 /* If REGNO is in the equivalence table already but is of the
1145 wrong mode for that equivalence, don't do anything here. */
1147 qty_valid
= REGNO_QTY_VALID_P (regno
);
1150 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1152 if (ent
->mode
!= GET_MODE (x
))
1156 if (modified
|| ! qty_valid
)
1159 for (classp
= classp
->first_same_value
;
1161 classp
= classp
->next_same_value
)
1162 if (REG_P (classp
->exp
)
1163 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1165 unsigned c_regno
= REGNO (classp
->exp
);
1167 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1169 /* Suppose that 5 is hard reg and 100 and 101 are
1172 (set (reg:si 100) (reg:si 5))
1173 (set (reg:si 5) (reg:si 100))
1174 (set (reg:di 101) (reg:di 5))
1176 We would now set REG_QTY (101) = REG_QTY (5), but the
1177 entry for 5 is in SImode. When we use this later in
1178 copy propagation, we get the register in wrong mode. */
1179 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1182 make_regs_eqv (regno
, c_regno
);
1186 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1187 than REG_IN_TABLE to find out if there was only a single preceding
1188 invalidation - for the SUBREG - or another one, which would be
1189 for the full register. However, if we find here that REG_TICK
1190 indicates that the register is invalid, it means that it has
1191 been invalidated in a separate operation. The SUBREG might be used
1192 now (then this is a recursive call), or we might use the full REG
1193 now and a SUBREG of it later. So bump up REG_TICK so that
1194 mention_regs will do the right thing. */
1196 && REG_IN_TABLE (regno
) >= 0
1197 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1199 make_new_qty (regno
, GET_MODE (x
));
1206 /* If X is a SUBREG, we will likely be inserting the inner register in the
1207 table. If that register doesn't have an assigned quantity number at
1208 this point but does later, the insertion that we will be doing now will
1209 not be accessible because its hash code will have changed. So assign
1210 a quantity number now. */
1212 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1213 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1215 insert_regs (SUBREG_REG (x
), NULL
, 0);
1220 return mention_regs (x
);
1224 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1225 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1226 CST is equal to an anchor. */
1229 compute_const_anchors (rtx cst
,
1230 HOST_WIDE_INT
*lower_base
, HOST_WIDE_INT
*lower_offs
,
1231 HOST_WIDE_INT
*upper_base
, HOST_WIDE_INT
*upper_offs
)
1233 HOST_WIDE_INT n
= INTVAL (cst
);
1235 *lower_base
= n
& ~(targetm
.const_anchor
- 1);
1236 if (*lower_base
== n
)
1240 (n
+ (targetm
.const_anchor
- 1)) & ~(targetm
.const_anchor
- 1);
1241 *upper_offs
= n
- *upper_base
;
1242 *lower_offs
= n
- *lower_base
;
1246 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1249 insert_const_anchor (HOST_WIDE_INT anchor
, rtx reg
, HOST_WIDE_INT offs
,
1250 enum machine_mode mode
)
1252 struct table_elt
*elt
;
1257 anchor_exp
= GEN_INT (anchor
);
1258 hash
= HASH (anchor_exp
, mode
);
1259 elt
= lookup (anchor_exp
, hash
, mode
);
1261 elt
= insert (anchor_exp
, NULL
, hash
, mode
);
1263 exp
= plus_constant (reg
, offs
);
1264 /* REG has just been inserted and the hash codes recomputed. */
1266 hash
= HASH (exp
, mode
);
1268 /* Use the cost of the register rather than the whole expression. When
1269 looking up constant anchors we will further offset the corresponding
1270 expression therefore it does not make sense to prefer REGs over
1271 reg-immediate additions. Prefer instead the oldest expression. Also
1272 don't prefer pseudos over hard regs so that we derive constants in
1273 argument registers from other argument registers rather than from the
1274 original pseudo that was used to synthesize the constant. */
1275 insert_with_costs (exp
, elt
, hash
, mode
, COST (reg
), 1);
1278 /* The constant CST is equivalent to the register REG. Create
1279 equivalences between the two anchors of CST and the corresponding
1280 register-offset expressions using REG. */
1283 insert_const_anchors (rtx reg
, rtx cst
, enum machine_mode mode
)
1285 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1287 if (!compute_const_anchors (cst
, &lower_base
, &lower_offs
,
1288 &upper_base
, &upper_offs
))
1291 /* Ignore anchors of value 0. Constants accessible from zero are
1293 if (lower_base
!= 0)
1294 insert_const_anchor (lower_base
, reg
, -lower_offs
, mode
);
1296 if (upper_base
!= 0)
1297 insert_const_anchor (upper_base
, reg
, -upper_offs
, mode
);
1300 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1301 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1302 valid expression. Return the cheapest and oldest of such expressions. In
1303 *OLD, return how old the resulting expression is compared to the other
1304 equivalent expressions. */
1307 find_reg_offset_for_const (struct table_elt
*anchor_elt
, HOST_WIDE_INT offs
,
1310 struct table_elt
*elt
;
1312 struct table_elt
*match_elt
;
1315 /* Find the cheapest and *oldest* expression to maximize the chance of
1316 reusing the same pseudo. */
1320 for (elt
= anchor_elt
->first_same_value
, idx
= 0;
1322 elt
= elt
->next_same_value
, idx
++)
1324 if (match_elt
&& CHEAPER (match_elt
, elt
))
1327 if (REG_P (elt
->exp
)
1328 || (GET_CODE (elt
->exp
) == PLUS
1329 && REG_P (XEXP (elt
->exp
, 0))
1330 && GET_CODE (XEXP (elt
->exp
, 1)) == CONST_INT
))
1334 /* Ignore expressions that are no longer valid. */
1335 if (!REG_P (elt
->exp
) && !exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
1338 x
= plus_constant (elt
->exp
, offs
);
1340 || (GET_CODE (x
) == PLUS
1341 && IN_RANGE (INTVAL (XEXP (x
, 1)),
1342 -targetm
.const_anchor
,
1343 targetm
.const_anchor
- 1)))
1355 /* Try to express the constant SRC_CONST using a register+offset expression
1356 derived from a constant anchor. Return it if successful or NULL_RTX,
1360 try_const_anchors (rtx src_const
, enum machine_mode mode
)
1362 struct table_elt
*lower_elt
, *upper_elt
;
1363 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1364 rtx lower_anchor_rtx
, upper_anchor_rtx
;
1365 rtx lower_exp
= NULL_RTX
, upper_exp
= NULL_RTX
;
1366 unsigned lower_old
, upper_old
;
1368 if (!compute_const_anchors (src_const
, &lower_base
, &lower_offs
,
1369 &upper_base
, &upper_offs
))
1372 lower_anchor_rtx
= GEN_INT (lower_base
);
1373 upper_anchor_rtx
= GEN_INT (upper_base
);
1374 lower_elt
= lookup (lower_anchor_rtx
, HASH (lower_anchor_rtx
, mode
), mode
);
1375 upper_elt
= lookup (upper_anchor_rtx
, HASH (upper_anchor_rtx
, mode
), mode
);
1378 lower_exp
= find_reg_offset_for_const (lower_elt
, lower_offs
, &lower_old
);
1380 upper_exp
= find_reg_offset_for_const (upper_elt
, upper_offs
, &upper_old
);
1387 /* Return the older expression. */
1388 return (upper_old
> lower_old
? upper_exp
: lower_exp
);
1391 /* Look in or update the hash table. */
1393 /* Remove table element ELT from use in the table.
1394 HASH is its hash code, made using the HASH macro.
1395 It's an argument because often that is known in advance
1396 and we save much time not recomputing it. */
1399 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1404 /* Mark this element as removed. See cse_insn. */
1405 elt
->first_same_value
= 0;
1407 /* Remove the table element from its equivalence class. */
1410 struct table_elt
*prev
= elt
->prev_same_value
;
1411 struct table_elt
*next
= elt
->next_same_value
;
1414 next
->prev_same_value
= prev
;
1417 prev
->next_same_value
= next
;
1420 struct table_elt
*newfirst
= next
;
1423 next
->first_same_value
= newfirst
;
1424 next
= next
->next_same_value
;
1429 /* Remove the table element from its hash bucket. */
1432 struct table_elt
*prev
= elt
->prev_same_hash
;
1433 struct table_elt
*next
= elt
->next_same_hash
;
1436 next
->prev_same_hash
= prev
;
1439 prev
->next_same_hash
= next
;
1440 else if (table
[hash
] == elt
)
1444 /* This entry is not in the proper hash bucket. This can happen
1445 when two classes were merged by `merge_equiv_classes'. Search
1446 for the hash bucket that it heads. This happens only very
1447 rarely, so the cost is acceptable. */
1448 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1449 if (table
[hash
] == elt
)
1454 /* Remove the table element from its related-value circular chain. */
1456 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1458 struct table_elt
*p
= elt
->related_value
;
1460 while (p
->related_value
!= elt
)
1461 p
= p
->related_value
;
1462 p
->related_value
= elt
->related_value
;
1463 if (p
->related_value
== p
)
1464 p
->related_value
= 0;
1467 /* Now add it to the free element chain. */
1468 elt
->next_same_hash
= free_element_chain
;
1469 free_element_chain
= elt
;
1472 /* Same as above, but X is a pseudo-register. */
1475 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1477 struct table_elt
*elt
;
1479 /* Because a pseudo-register can be referenced in more than one
1480 mode, we might have to remove more than one table entry. */
1481 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1482 remove_from_table (elt
, hash
);
1485 /* Look up X in the hash table and return its table element,
1486 or 0 if X is not in the table.
1488 MODE is the machine-mode of X, or if X is an integer constant
1489 with VOIDmode then MODE is the mode with which X will be used.
1491 Here we are satisfied to find an expression whose tree structure
1494 static struct table_elt
*
1495 lookup (rtx x
, unsigned int hash
, enum machine_mode mode
)
1497 struct table_elt
*p
;
1499 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1500 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1501 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1507 /* Like `lookup' but don't care whether the table element uses invalid regs.
1508 Also ignore discrepancies in the machine mode of a register. */
1510 static struct table_elt
*
1511 lookup_for_remove (rtx x
, unsigned int hash
, enum machine_mode mode
)
1513 struct table_elt
*p
;
1517 unsigned int regno
= REGNO (x
);
1519 /* Don't check the machine mode when comparing registers;
1520 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1521 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1523 && REGNO (p
->exp
) == regno
)
1528 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1530 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1537 /* Look for an expression equivalent to X and with code CODE.
1538 If one is found, return that expression. */
1541 lookup_as_function (rtx x
, enum rtx_code code
)
1544 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1549 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1550 if (GET_CODE (p
->exp
) == code
1551 /* Make sure this is a valid entry in the table. */
1552 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1558 /* Insert X in the hash table, assuming HASH is its hash code and
1559 CLASSP is an element of the class it should go in (or 0 if a new
1560 class should be made). COST is the code of X and reg_cost is the
1561 cost of registers in X. It is inserted at the proper position to
1562 keep the class in the order cheapest first.
1564 MODE is the machine-mode of X, or if X is an integer constant
1565 with VOIDmode then MODE is the mode with which X will be used.
1567 For elements of equal cheapness, the most recent one
1568 goes in front, except that the first element in the list
1569 remains first unless a cheaper element is added. The order of
1570 pseudo-registers does not matter, as canon_reg will be called to
1571 find the cheapest when a register is retrieved from the table.
1573 The in_memory field in the hash table element is set to 0.
1574 The caller must set it nonzero if appropriate.
1576 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1577 and if insert_regs returns a nonzero value
1578 you must then recompute its hash code before calling here.
1580 If necessary, update table showing constant values of quantities. */
1582 static struct table_elt
*
1583 insert_with_costs (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1584 enum machine_mode mode
, int cost
, int reg_cost
)
1586 struct table_elt
*elt
;
1588 /* If X is a register and we haven't made a quantity for it,
1589 something is wrong. */
1590 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1592 /* If X is a hard register, show it is being put in the table. */
1593 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1594 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1596 /* Put an element for X into the right hash bucket. */
1598 elt
= free_element_chain
;
1600 free_element_chain
= elt
->next_same_hash
;
1602 elt
= XNEW (struct table_elt
);
1605 elt
->canon_exp
= NULL_RTX
;
1607 elt
->regcost
= reg_cost
;
1608 elt
->next_same_value
= 0;
1609 elt
->prev_same_value
= 0;
1610 elt
->next_same_hash
= table
[hash
];
1611 elt
->prev_same_hash
= 0;
1612 elt
->related_value
= 0;
1615 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1618 table
[hash
]->prev_same_hash
= elt
;
1621 /* Put it into the proper value-class. */
1624 classp
= classp
->first_same_value
;
1625 if (CHEAPER (elt
, classp
))
1626 /* Insert at the head of the class. */
1628 struct table_elt
*p
;
1629 elt
->next_same_value
= classp
;
1630 classp
->prev_same_value
= elt
;
1631 elt
->first_same_value
= elt
;
1633 for (p
= classp
; p
; p
= p
->next_same_value
)
1634 p
->first_same_value
= elt
;
1638 /* Insert not at head of the class. */
1639 /* Put it after the last element cheaper than X. */
1640 struct table_elt
*p
, *next
;
1642 for (p
= classp
; (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1645 /* Put it after P and before NEXT. */
1646 elt
->next_same_value
= next
;
1648 next
->prev_same_value
= elt
;
1650 elt
->prev_same_value
= p
;
1651 p
->next_same_value
= elt
;
1652 elt
->first_same_value
= classp
;
1656 elt
->first_same_value
= elt
;
1658 /* If this is a constant being set equivalent to a register or a register
1659 being set equivalent to a constant, note the constant equivalence.
1661 If this is a constant, it cannot be equivalent to a different constant,
1662 and a constant is the only thing that can be cheaper than a register. So
1663 we know the register is the head of the class (before the constant was
1666 If this is a register that is not already known equivalent to a
1667 constant, we must check the entire class.
1669 If this is a register that is already known equivalent to an insn,
1670 update the qtys `const_insn' to show that `this_insn' is the latest
1671 insn making that quantity equivalent to the constant. */
1673 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1676 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1677 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1679 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1680 exp_ent
->const_insn
= this_insn
;
1685 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1688 struct table_elt
*p
;
1690 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1692 if (p
->is_const
&& !REG_P (p
->exp
))
1694 int x_q
= REG_QTY (REGNO (x
));
1695 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1698 = gen_lowpart (GET_MODE (x
), p
->exp
);
1699 x_ent
->const_insn
= this_insn
;
1706 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1707 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1708 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1710 /* If this is a constant with symbolic value,
1711 and it has a term with an explicit integer value,
1712 link it up with related expressions. */
1713 if (GET_CODE (x
) == CONST
)
1715 rtx subexp
= get_related_value (x
);
1717 struct table_elt
*subelt
, *subelt_prev
;
1721 /* Get the integer-free subexpression in the hash table. */
1722 subhash
= SAFE_HASH (subexp
, mode
);
1723 subelt
= lookup (subexp
, subhash
, mode
);
1725 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1726 /* Initialize SUBELT's circular chain if it has none. */
1727 if (subelt
->related_value
== 0)
1728 subelt
->related_value
= subelt
;
1729 /* Find the element in the circular chain that precedes SUBELT. */
1730 subelt_prev
= subelt
;
1731 while (subelt_prev
->related_value
!= subelt
)
1732 subelt_prev
= subelt_prev
->related_value
;
1733 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1734 This way the element that follows SUBELT is the oldest one. */
1735 elt
->related_value
= subelt_prev
->related_value
;
1736 subelt_prev
->related_value
= elt
;
1743 /* Wrap insert_with_costs by passing the default costs. */
1745 static struct table_elt
*
1746 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1747 enum machine_mode mode
)
1750 insert_with_costs (x
, classp
, hash
, mode
, COST (x
), approx_reg_cost (x
));
1754 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1755 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1756 the two classes equivalent.
1758 CLASS1 will be the surviving class; CLASS2 should not be used after this
1761 Any invalid entries in CLASS2 will not be copied. */
1764 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1766 struct table_elt
*elt
, *next
, *new_elt
;
1768 /* Ensure we start with the head of the classes. */
1769 class1
= class1
->first_same_value
;
1770 class2
= class2
->first_same_value
;
1772 /* If they were already equal, forget it. */
1773 if (class1
== class2
)
1776 for (elt
= class2
; elt
; elt
= next
)
1780 enum machine_mode mode
= elt
->mode
;
1782 next
= elt
->next_same_value
;
1784 /* Remove old entry, make a new one in CLASS1's class.
1785 Don't do this for invalid entries as we cannot find their
1786 hash code (it also isn't necessary). */
1787 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1789 bool need_rehash
= false;
1791 hash_arg_in_memory
= 0;
1792 hash
= HASH (exp
, mode
);
1796 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1797 delete_reg_equiv (REGNO (exp
));
1800 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1801 remove_pseudo_from_table (exp
, hash
);
1803 remove_from_table (elt
, hash
);
1805 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1807 rehash_using_reg (exp
);
1808 hash
= HASH (exp
, mode
);
1810 new_elt
= insert (exp
, class1
, hash
, mode
);
1811 new_elt
->in_memory
= hash_arg_in_memory
;
1816 /* Flush the entire hash table. */
1819 flush_hash_table (void)
1822 struct table_elt
*p
;
1824 for (i
= 0; i
< HASH_SIZE
; i
++)
1825 for (p
= table
[i
]; p
; p
= table
[i
])
1827 /* Note that invalidate can remove elements
1828 after P in the current hash chain. */
1830 invalidate (p
->exp
, VOIDmode
);
1832 remove_from_table (p
, i
);
1836 /* Function called for each rtx to check whether true dependence exist. */
1837 struct check_dependence_data
1839 enum machine_mode mode
;
1845 check_dependence (rtx
*x
, void *data
)
1847 struct check_dependence_data
*d
= (struct check_dependence_data
*) data
;
1848 if (*x
&& MEM_P (*x
))
1849 return canon_true_dependence (d
->exp
, d
->mode
, d
->addr
, *x
, NULL_RTX
,
1855 /* Remove from the hash table, or mark as invalid, all expressions whose
1856 values could be altered by storing in X. X is a register, a subreg, or
1857 a memory reference with nonvarying address (because, when a memory
1858 reference with a varying address is stored in, all memory references are
1859 removed by invalidate_memory so specific invalidation is superfluous).
1860 FULL_MODE, if not VOIDmode, indicates that this much should be
1861 invalidated instead of just the amount indicated by the mode of X. This
1862 is only used for bitfield stores into memory.
1864 A nonvarying address may be just a register or just a symbol reference,
1865 or it may be either of those plus a numeric offset. */
1868 invalidate (rtx x
, enum machine_mode full_mode
)
1871 struct table_elt
*p
;
1874 switch (GET_CODE (x
))
1878 /* If X is a register, dependencies on its contents are recorded
1879 through the qty number mechanism. Just change the qty number of
1880 the register, mark it as invalid for expressions that refer to it,
1881 and remove it itself. */
1882 unsigned int regno
= REGNO (x
);
1883 unsigned int hash
= HASH (x
, GET_MODE (x
));
1885 /* Remove REGNO from any quantity list it might be on and indicate
1886 that its value might have changed. If it is a pseudo, remove its
1887 entry from the hash table.
1889 For a hard register, we do the first two actions above for any
1890 additional hard registers corresponding to X. Then, if any of these
1891 registers are in the table, we must remove any REG entries that
1892 overlap these registers. */
1894 delete_reg_equiv (regno
);
1896 SUBREG_TICKED (regno
) = -1;
1898 if (regno
>= FIRST_PSEUDO_REGISTER
)
1899 remove_pseudo_from_table (x
, hash
);
1902 HOST_WIDE_INT in_table
1903 = TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1904 unsigned int endregno
= END_HARD_REGNO (x
);
1905 unsigned int tregno
, tendregno
, rn
;
1906 struct table_elt
*p
, *next
;
1908 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1910 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1912 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1913 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1914 delete_reg_equiv (rn
);
1916 SUBREG_TICKED (rn
) = -1;
1920 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1921 for (p
= table
[hash
]; p
; p
= next
)
1923 next
= p
->next_same_hash
;
1926 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1929 tregno
= REGNO (p
->exp
);
1930 tendregno
= END_HARD_REGNO (p
->exp
);
1931 if (tendregno
> regno
&& tregno
< endregno
)
1932 remove_from_table (p
, hash
);
1939 invalidate (SUBREG_REG (x
), VOIDmode
);
1943 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1944 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1948 /* This is part of a disjoint return value; extract the location in
1949 question ignoring the offset. */
1950 invalidate (XEXP (x
, 0), VOIDmode
);
1954 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1955 /* Calculate the canonical version of X here so that
1956 true_dependence doesn't generate new RTL for X on each call. */
1959 /* Remove all hash table elements that refer to overlapping pieces of
1961 if (full_mode
== VOIDmode
)
1962 full_mode
= GET_MODE (x
);
1964 for (i
= 0; i
< HASH_SIZE
; i
++)
1966 struct table_elt
*next
;
1968 for (p
= table
[i
]; p
; p
= next
)
1970 next
= p
->next_same_hash
;
1973 struct check_dependence_data d
;
1975 /* Just canonicalize the expression once;
1976 otherwise each time we call invalidate
1977 true_dependence will canonicalize the
1978 expression again. */
1980 p
->canon_exp
= canon_rtx (p
->exp
);
1984 if (for_each_rtx (&p
->canon_exp
, check_dependence
, &d
))
1985 remove_from_table (p
, i
);
1996 /* Remove all expressions that refer to register REGNO,
1997 since they are already invalid, and we are about to
1998 mark that register valid again and don't want the old
1999 expressions to reappear as valid. */
2002 remove_invalid_refs (unsigned int regno
)
2005 struct table_elt
*p
, *next
;
2007 for (i
= 0; i
< HASH_SIZE
; i
++)
2008 for (p
= table
[i
]; p
; p
= next
)
2010 next
= p
->next_same_hash
;
2012 && refers_to_regno_p (regno
, regno
+ 1, p
->exp
, (rtx
*) 0))
2013 remove_from_table (p
, i
);
2017 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2020 remove_invalid_subreg_refs (unsigned int regno
, unsigned int offset
,
2021 enum machine_mode mode
)
2024 struct table_elt
*p
, *next
;
2025 unsigned int end
= offset
+ (GET_MODE_SIZE (mode
) - 1);
2027 for (i
= 0; i
< HASH_SIZE
; i
++)
2028 for (p
= table
[i
]; p
; p
= next
)
2031 next
= p
->next_same_hash
;
2034 && (GET_CODE (exp
) != SUBREG
2035 || !REG_P (SUBREG_REG (exp
))
2036 || REGNO (SUBREG_REG (exp
)) != regno
2037 || (((SUBREG_BYTE (exp
)
2038 + (GET_MODE_SIZE (GET_MODE (exp
)) - 1)) >= offset
)
2039 && SUBREG_BYTE (exp
) <= end
))
2040 && refers_to_regno_p (regno
, regno
+ 1, p
->exp
, (rtx
*) 0))
2041 remove_from_table (p
, i
);
2045 /* Recompute the hash codes of any valid entries in the hash table that
2046 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2048 This is called when we make a jump equivalence. */
2051 rehash_using_reg (rtx x
)
2054 struct table_elt
*p
, *next
;
2057 if (GET_CODE (x
) == SUBREG
)
2060 /* If X is not a register or if the register is known not to be in any
2061 valid entries in the table, we have no work to do. */
2064 || REG_IN_TABLE (REGNO (x
)) < 0
2065 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
2068 /* Scan all hash chains looking for valid entries that mention X.
2069 If we find one and it is in the wrong hash chain, move it. */
2071 for (i
= 0; i
< HASH_SIZE
; i
++)
2072 for (p
= table
[i
]; p
; p
= next
)
2074 next
= p
->next_same_hash
;
2075 if (reg_mentioned_p (x
, p
->exp
)
2076 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
2077 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
2079 if (p
->next_same_hash
)
2080 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
2082 if (p
->prev_same_hash
)
2083 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
2085 table
[i
] = p
->next_same_hash
;
2087 p
->next_same_hash
= table
[hash
];
2088 p
->prev_same_hash
= 0;
2090 table
[hash
]->prev_same_hash
= p
;
2096 /* Remove from the hash table any expression that is a call-clobbered
2097 register. Also update their TICK values. */
2100 invalidate_for_call (void)
2102 unsigned int regno
, endregno
;
2105 struct table_elt
*p
, *next
;
2108 /* Go through all the hard registers. For each that is clobbered in
2109 a CALL_INSN, remove the register from quantity chains and update
2110 reg_tick if defined. Also see if any of these registers is currently
2113 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2114 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
2116 delete_reg_equiv (regno
);
2117 if (REG_TICK (regno
) >= 0)
2120 SUBREG_TICKED (regno
) = -1;
2123 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
2126 /* In the case where we have no call-clobbered hard registers in the
2127 table, we are done. Otherwise, scan the table and remove any
2128 entry that overlaps a call-clobbered register. */
2131 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
2132 for (p
= table
[hash
]; p
; p
= next
)
2134 next
= p
->next_same_hash
;
2137 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
2140 regno
= REGNO (p
->exp
);
2141 endregno
= END_HARD_REGNO (p
->exp
);
2143 for (i
= regno
; i
< endregno
; i
++)
2144 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
2146 remove_from_table (p
, hash
);
2152 /* Given an expression X of type CONST,
2153 and ELT which is its table entry (or 0 if it
2154 is not in the hash table),
2155 return an alternate expression for X as a register plus integer.
2156 If none can be found, return 0. */
2159 use_related_value (rtx x
, struct table_elt
*elt
)
2161 struct table_elt
*relt
= 0;
2162 struct table_elt
*p
, *q
;
2163 HOST_WIDE_INT offset
;
2165 /* First, is there anything related known?
2166 If we have a table element, we can tell from that.
2167 Otherwise, must look it up. */
2169 if (elt
!= 0 && elt
->related_value
!= 0)
2171 else if (elt
== 0 && GET_CODE (x
) == CONST
)
2173 rtx subexp
= get_related_value (x
);
2175 relt
= lookup (subexp
,
2176 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2183 /* Search all related table entries for one that has an
2184 equivalent register. */
2189 /* This loop is strange in that it is executed in two different cases.
2190 The first is when X is already in the table. Then it is searching
2191 the RELATED_VALUE list of X's class (RELT). The second case is when
2192 X is not in the table. Then RELT points to a class for the related
2195 Ensure that, whatever case we are in, that we ignore classes that have
2196 the same value as X. */
2198 if (rtx_equal_p (x
, p
->exp
))
2201 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2208 p
= p
->related_value
;
2210 /* We went all the way around, so there is nothing to be found.
2211 Alternatively, perhaps RELT was in the table for some other reason
2212 and it has no related values recorded. */
2213 if (p
== relt
|| p
== 0)
2220 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2221 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2222 return plus_constant (q
->exp
, offset
);
2226 /* Hash a string. Just add its bytes up. */
2227 static inline unsigned
2228 hash_rtx_string (const char *ps
)
2231 const unsigned char *p
= (const unsigned char *) ps
;
2240 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2241 When the callback returns true, we continue with the new rtx. */
2244 hash_rtx_cb (const_rtx x
, enum machine_mode mode
,
2245 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2246 bool have_reg_qty
, hash_rtx_callback_function cb
)
2252 enum machine_mode newmode
;
2255 /* Used to turn recursion into iteration. We can't rely on GCC's
2256 tail-recursion elimination since we need to keep accumulating values
2262 /* Invoke the callback first. */
2264 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2266 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2267 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2271 code
= GET_CODE (x
);
2276 unsigned int regno
= REGNO (x
);
2278 if (do_not_record_p
&& !reload_completed
)
2280 /* On some machines, we can't record any non-fixed hard register,
2281 because extending its life will cause reload problems. We
2282 consider ap, fp, sp, gp to be fixed for this purpose.
2284 We also consider CCmode registers to be fixed for this purpose;
2285 failure to do so leads to failure to simplify 0<100 type of
2288 On all machines, we can't record any global registers.
2289 Nor should we record any register that is in a small
2290 class, as defined by CLASS_LIKELY_SPILLED_P. */
2293 if (regno
>= FIRST_PSEUDO_REGISTER
)
2295 else if (x
== frame_pointer_rtx
2296 || x
== hard_frame_pointer_rtx
2297 || x
== arg_pointer_rtx
2298 || x
== stack_pointer_rtx
2299 || x
== pic_offset_table_rtx
)
2301 else if (global_regs
[regno
])
2303 else if (fixed_regs
[regno
])
2305 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2307 else if (SMALL_REGISTER_CLASSES
)
2309 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno
)))
2316 *do_not_record_p
= 1;
2321 hash
+= ((unsigned int) REG
<< 7);
2322 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2326 /* We handle SUBREG of a REG specially because the underlying
2327 reg changes its hash value with every value change; we don't
2328 want to have to forget unrelated subregs when one subreg changes. */
2331 if (REG_P (SUBREG_REG (x
)))
2333 hash
+= (((unsigned int) SUBREG
<< 7)
2334 + REGNO (SUBREG_REG (x
))
2335 + (SUBREG_BYTE (x
) / UNITS_PER_WORD
));
2342 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2343 + (unsigned int) INTVAL (x
));
2347 /* This is like the general case, except that it only counts
2348 the integers representing the constant. */
2349 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2350 if (GET_MODE (x
) != VOIDmode
)
2351 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2353 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2354 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2358 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2359 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2367 units
= CONST_VECTOR_NUNITS (x
);
2369 for (i
= 0; i
< units
; ++i
)
2371 elt
= CONST_VECTOR_ELT (x
, i
);
2372 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2373 do_not_record_p
, hash_arg_in_memory_p
,
2380 /* Assume there is only one rtx object for any given label. */
2382 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2383 differences and differences between each stage's debugging dumps. */
2384 hash
+= (((unsigned int) LABEL_REF
<< 7)
2385 + CODE_LABEL_NUMBER (XEXP (x
, 0)));
2390 /* Don't hash on the symbol's address to avoid bootstrap differences.
2391 Different hash values may cause expressions to be recorded in
2392 different orders and thus different registers to be used in the
2393 final assembler. This also avoids differences in the dump files
2394 between various stages. */
2396 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2399 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2401 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2406 /* We don't record if marked volatile or if BLKmode since we don't
2407 know the size of the move. */
2408 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2410 *do_not_record_p
= 1;
2413 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2414 *hash_arg_in_memory_p
= 1;
2416 /* Now that we have already found this special case,
2417 might as well speed it up as much as possible. */
2418 hash
+= (unsigned) MEM
;
2423 /* A USE that mentions non-volatile memory needs special
2424 handling since the MEM may be BLKmode which normally
2425 prevents an entry from being made. Pure calls are
2426 marked by a USE which mentions BLKmode memory.
2427 See calls.c:emit_call_1. */
2428 if (MEM_P (XEXP (x
, 0))
2429 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2431 hash
+= (unsigned) USE
;
2434 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2435 *hash_arg_in_memory_p
= 1;
2437 /* Now that we have already found this special case,
2438 might as well speed it up as much as possible. */
2439 hash
+= (unsigned) MEM
;
2454 case UNSPEC_VOLATILE
:
2455 if (do_not_record_p
) {
2456 *do_not_record_p
= 1;
2464 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2466 *do_not_record_p
= 1;
2471 /* We don't want to take the filename and line into account. */
2472 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2473 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2474 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2475 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2477 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2479 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2481 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2482 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2483 do_not_record_p
, hash_arg_in_memory_p
,
2486 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2489 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2490 x
= ASM_OPERANDS_INPUT (x
, 0);
2491 mode
= GET_MODE (x
);
2503 i
= GET_RTX_LENGTH (code
) - 1;
2504 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2505 fmt
= GET_RTX_FORMAT (code
);
2511 /* If we are about to do the last recursive call
2512 needed at this level, change it into iteration.
2513 This function is called enough to be worth it. */
2520 hash
+= hash_rtx_cb (XEXP (x
, i
), VOIDmode
, do_not_record_p
,
2521 hash_arg_in_memory_p
,
2526 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2527 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), VOIDmode
, do_not_record_p
,
2528 hash_arg_in_memory_p
,
2533 hash
+= hash_rtx_string (XSTR (x
, i
));
2537 hash
+= (unsigned int) XINT (x
, i
);
2552 /* Hash an rtx. We are careful to make sure the value is never negative.
2553 Equivalent registers hash identically.
2554 MODE is used in hashing for CONST_INTs only;
2555 otherwise the mode of X is used.
2557 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2559 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2560 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2562 Note that cse_insn knows that the hash code of a MEM expression
2563 is just (int) MEM plus the hash code of the address. */
2566 hash_rtx (const_rtx x
, enum machine_mode mode
, int *do_not_record_p
,
2567 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2569 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2570 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2573 /* Hash an rtx X for cse via hash_rtx.
2574 Stores 1 in do_not_record if any subexpression is volatile.
2575 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2576 does not have the RTX_UNCHANGING_P bit set. */
2578 static inline unsigned
2579 canon_hash (rtx x
, enum machine_mode mode
)
2581 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2584 /* Like canon_hash but with no side effects, i.e. do_not_record
2585 and hash_arg_in_memory are not changed. */
2587 static inline unsigned
2588 safe_hash (rtx x
, enum machine_mode mode
)
2590 int dummy_do_not_record
;
2591 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2594 /* Return 1 iff X and Y would canonicalize into the same thing,
2595 without actually constructing the canonicalization of either one.
2596 If VALIDATE is nonzero,
2597 we assume X is an expression being processed from the rtl
2598 and Y was found in the hash table. We check register refs
2599 in Y for being marked as valid.
2601 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2604 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2610 /* Note: it is incorrect to assume an expression is equivalent to itself
2611 if VALIDATE is nonzero. */
2612 if (x
== y
&& !validate
)
2615 if (x
== 0 || y
== 0)
2618 code
= GET_CODE (x
);
2619 if (code
!= GET_CODE (y
))
2622 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2623 if (GET_MODE (x
) != GET_MODE (y
))
2626 /* MEMs refering to different address space are not equivalent. */
2627 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2640 return XEXP (x
, 0) == XEXP (y
, 0);
2643 return XSTR (x
, 0) == XSTR (y
, 0);
2647 return REGNO (x
) == REGNO (y
);
2650 unsigned int regno
= REGNO (y
);
2652 unsigned int endregno
= END_REGNO (y
);
2654 /* If the quantities are not the same, the expressions are not
2655 equivalent. If there are and we are not to validate, they
2656 are equivalent. Otherwise, ensure all regs are up-to-date. */
2658 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2664 for (i
= regno
; i
< endregno
; i
++)
2665 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2674 /* A volatile mem should not be considered equivalent to any
2676 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2679 /* Can't merge two expressions in different alias sets, since we
2680 can decide that the expression is transparent in a block when
2681 it isn't, due to it being set with the different alias set.
2683 Also, can't merge two expressions with different MEM_ATTRS.
2684 They could e.g. be two different entities allocated into the
2685 same space on the stack (see e.g. PR25130). In that case, the
2686 MEM addresses can be the same, even though the two MEMs are
2687 absolutely not equivalent.
2689 But because really all MEM attributes should be the same for
2690 equivalent MEMs, we just use the invariant that MEMs that have
2691 the same attributes share the same mem_attrs data structure. */
2692 if (MEM_ATTRS (x
) != MEM_ATTRS (y
))
2697 /* For commutative operations, check both orders. */
2705 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2707 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2708 validate
, for_gcse
))
2709 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2711 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2712 validate
, for_gcse
)));
2715 /* We don't use the generic code below because we want to
2716 disregard filename and line numbers. */
2718 /* A volatile asm isn't equivalent to any other. */
2719 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2722 if (GET_MODE (x
) != GET_MODE (y
)
2723 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2724 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2725 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2726 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2727 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2730 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2732 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2733 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2734 ASM_OPERANDS_INPUT (y
, i
),
2736 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2737 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2747 /* Compare the elements. If any pair of corresponding elements
2748 fail to match, return 0 for the whole thing. */
2750 fmt
= GET_RTX_FORMAT (code
);
2751 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2756 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2757 validate
, for_gcse
))
2762 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2764 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2765 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2766 validate
, for_gcse
))
2771 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2776 if (XINT (x
, i
) != XINT (y
, i
))
2781 if (XWINT (x
, i
) != XWINT (y
, i
))
2797 /* Return 1 if X has a value that can vary even between two
2798 executions of the program. 0 means X can be compared reliably
2799 against certain constants or near-constants. */
2802 cse_rtx_varies_p (const_rtx x
, bool from_alias
)
2804 /* We need not check for X and the equivalence class being of the same
2805 mode because if X is equivalent to a constant in some mode, it
2806 doesn't vary in any mode. */
2809 && REGNO_QTY_VALID_P (REGNO (x
)))
2811 int x_q
= REG_QTY (REGNO (x
));
2812 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
2814 if (GET_MODE (x
) == x_ent
->mode
2815 && x_ent
->const_rtx
!= NULL_RTX
)
2819 if (GET_CODE (x
) == PLUS
2820 && CONST_INT_P (XEXP (x
, 1))
2821 && REG_P (XEXP (x
, 0))
2822 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
2824 int x0_q
= REG_QTY (REGNO (XEXP (x
, 0)));
2825 struct qty_table_elem
*x0_ent
= &qty_table
[x0_q
];
2827 if ((GET_MODE (XEXP (x
, 0)) == x0_ent
->mode
)
2828 && x0_ent
->const_rtx
!= NULL_RTX
)
2832 /* This can happen as the result of virtual register instantiation, if
2833 the initial constant is too large to be a valid address. This gives
2834 us a three instruction sequence, load large offset into a register,
2835 load fp minus a constant into a register, then a MEM which is the
2836 sum of the two `constant' registers. */
2837 if (GET_CODE (x
) == PLUS
2838 && REG_P (XEXP (x
, 0))
2839 && REG_P (XEXP (x
, 1))
2840 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0)))
2841 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
2843 int x0_q
= REG_QTY (REGNO (XEXP (x
, 0)));
2844 int x1_q
= REG_QTY (REGNO (XEXP (x
, 1)));
2845 struct qty_table_elem
*x0_ent
= &qty_table
[x0_q
];
2846 struct qty_table_elem
*x1_ent
= &qty_table
[x1_q
];
2848 if ((GET_MODE (XEXP (x
, 0)) == x0_ent
->mode
)
2849 && x0_ent
->const_rtx
!= NULL_RTX
2850 && (GET_MODE (XEXP (x
, 1)) == x1_ent
->mode
)
2851 && x1_ent
->const_rtx
!= NULL_RTX
)
2855 return rtx_varies_p (x
, from_alias
);
2858 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2859 the result if necessary. INSN is as for canon_reg. */
2862 validate_canon_reg (rtx
*xloc
, rtx insn
)
2866 rtx new_rtx
= canon_reg (*xloc
, insn
);
2868 /* If replacing pseudo with hard reg or vice versa, ensure the
2869 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2870 gcc_assert (insn
&& new_rtx
);
2871 validate_change (insn
, xloc
, new_rtx
, 1);
2875 /* Canonicalize an expression:
2876 replace each register reference inside it
2877 with the "oldest" equivalent register.
2879 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2880 after we make our substitution. The calls are made with IN_GROUP nonzero
2881 so apply_change_group must be called upon the outermost return from this
2882 function (unless INSN is zero). The result of apply_change_group can
2883 generally be discarded since the changes we are making are optional. */
2886 canon_reg (rtx x
, rtx insn
)
2895 code
= GET_CODE (x
);
2915 struct qty_table_elem
*ent
;
2917 /* Never replace a hard reg, because hard regs can appear
2918 in more than one machine mode, and we must preserve the mode
2919 of each occurrence. Also, some hard regs appear in
2920 MEMs that are shared and mustn't be altered. Don't try to
2921 replace any reg that maps to a reg of class NO_REGS. */
2922 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2923 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2926 q
= REG_QTY (REGNO (x
));
2927 ent
= &qty_table
[q
];
2928 first
= ent
->first_reg
;
2929 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2930 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2931 : gen_rtx_REG (ent
->mode
, first
));
2938 fmt
= GET_RTX_FORMAT (code
);
2939 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2944 validate_canon_reg (&XEXP (x
, i
), insn
);
2945 else if (fmt
[i
] == 'E')
2946 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2947 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2953 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2954 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2955 what values are being compared.
2957 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2958 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2959 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2960 compared to produce cc0.
2962 The return value is the comparison operator and is either the code of
2963 A or the code corresponding to the inverse of the comparison. */
2965 static enum rtx_code
2966 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2967 enum machine_mode
*pmode1
, enum machine_mode
*pmode2
)
2971 arg1
= *parg1
, arg2
= *parg2
;
2973 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2975 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2977 /* Set nonzero when we find something of interest. */
2979 int reverse_code
= 0;
2980 struct table_elt
*p
= 0;
2982 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2983 On machines with CC0, this is the only case that can occur, since
2984 fold_rtx will return the COMPARE or item being compared with zero
2987 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2990 /* If ARG1 is a comparison operator and CODE is testing for
2991 STORE_FLAG_VALUE, get the inner arguments. */
2993 else if (COMPARISON_P (arg1
))
2995 #ifdef FLOAT_STORE_FLAG_VALUE
2996 REAL_VALUE_TYPE fsfv
;
3000 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
3001 && code
== LT
&& STORE_FLAG_VALUE
== -1)
3002 #ifdef FLOAT_STORE_FLAG_VALUE
3003 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
3004 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3005 REAL_VALUE_NEGATIVE (fsfv
)))
3010 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
3011 && code
== GE
&& STORE_FLAG_VALUE
== -1)
3012 #ifdef FLOAT_STORE_FLAG_VALUE
3013 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
3014 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3015 REAL_VALUE_NEGATIVE (fsfv
)))
3018 x
= arg1
, reverse_code
= 1;
3021 /* ??? We could also check for
3023 (ne (and (eq (...) (const_int 1))) (const_int 0))
3025 and related forms, but let's wait until we see them occurring. */
3028 /* Look up ARG1 in the hash table and see if it has an equivalence
3029 that lets us see what is being compared. */
3030 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
3033 p
= p
->first_same_value
;
3035 /* If what we compare is already known to be constant, that is as
3037 We need to break the loop in this case, because otherwise we
3038 can have an infinite loop when looking at a reg that is known
3039 to be a constant which is the same as a comparison of a reg
3040 against zero which appears later in the insn stream, which in
3041 turn is constant and the same as the comparison of the first reg
3047 for (; p
; p
= p
->next_same_value
)
3049 enum machine_mode inner_mode
= GET_MODE (p
->exp
);
3050 #ifdef FLOAT_STORE_FLAG_VALUE
3051 REAL_VALUE_TYPE fsfv
;
3054 /* If the entry isn't valid, skip it. */
3055 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3058 if (GET_CODE (p
->exp
) == COMPARE
3059 /* Another possibility is that this machine has a compare insn
3060 that includes the comparison code. In that case, ARG1 would
3061 be equivalent to a comparison operation that would set ARG1 to
3062 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3063 ORIG_CODE is the actual comparison being done; if it is an EQ,
3064 we must reverse ORIG_CODE. On machine with a negative value
3065 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3068 && GET_MODE_CLASS (inner_mode
) == MODE_INT
3069 && (GET_MODE_BITSIZE (inner_mode
)
3070 <= HOST_BITS_PER_WIDE_INT
)
3071 && (STORE_FLAG_VALUE
3072 & ((HOST_WIDE_INT
) 1
3073 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
3074 #ifdef FLOAT_STORE_FLAG_VALUE
3076 && SCALAR_FLOAT_MODE_P (inner_mode
)
3077 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3078 REAL_VALUE_NEGATIVE (fsfv
)))
3081 && COMPARISON_P (p
->exp
)))
3086 else if ((code
== EQ
3088 && GET_MODE_CLASS (inner_mode
) == MODE_INT
3089 && (GET_MODE_BITSIZE (inner_mode
)
3090 <= HOST_BITS_PER_WIDE_INT
)
3091 && (STORE_FLAG_VALUE
3092 & ((HOST_WIDE_INT
) 1
3093 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
3094 #ifdef FLOAT_STORE_FLAG_VALUE
3096 && SCALAR_FLOAT_MODE_P (inner_mode
)
3097 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3098 REAL_VALUE_NEGATIVE (fsfv
)))
3101 && COMPARISON_P (p
->exp
))
3108 /* If this non-trapping address, e.g. fp + constant, the
3109 equivalent is a better operand since it may let us predict
3110 the value of the comparison. */
3111 else if (!rtx_addr_can_trap_p (p
->exp
))
3118 /* If we didn't find a useful equivalence for ARG1, we are done.
3119 Otherwise, set up for the next iteration. */
3123 /* If we need to reverse the comparison, make sure that that is
3124 possible -- we can't necessarily infer the value of GE from LT
3125 with floating-point operands. */
3128 enum rtx_code reversed
= reversed_comparison_code (x
, NULL_RTX
);
3129 if (reversed
== UNKNOWN
)
3134 else if (COMPARISON_P (x
))
3135 code
= GET_CODE (x
);
3136 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
3139 /* Return our results. Return the modes from before fold_rtx
3140 because fold_rtx might produce const_int, and then it's too late. */
3141 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
3142 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
3147 /* If X is a nontrivial arithmetic operation on an argument for which
3148 a constant value can be determined, return the result of operating
3149 on that value, as a constant. Otherwise, return X, possibly with
3150 one or more operands changed to a forward-propagated constant.
3152 If X is a register whose contents are known, we do NOT return
3153 those contents here; equiv_constant is called to perform that task.
3154 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3156 INSN is the insn that we may be modifying. If it is 0, make a copy
3157 of X before modifying it. */
3160 fold_rtx (rtx x
, rtx insn
)
3163 enum machine_mode mode
;
3169 /* Operands of X. */
3173 /* Constant equivalents of first three operands of X;
3174 0 when no such equivalent is known. */
3179 /* The mode of the first operand of X. We need this for sign and zero
3181 enum machine_mode mode_arg0
;
3186 /* Try to perform some initial simplifications on X. */
3187 code
= GET_CODE (x
);
3192 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3205 /* No use simplifying an EXPR_LIST
3206 since they are used only for lists of args
3207 in a function call's REG_EQUAL note. */
3213 return prev_insn_cc0
;
3219 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3220 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3221 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3225 #ifdef NO_FUNCTION_CSE
3227 if (CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3232 /* Anything else goes through the loop below. */
3237 mode
= GET_MODE (x
);
3241 mode_arg0
= VOIDmode
;
3243 /* Try folding our operands.
3244 Then see which ones have constant values known. */
3246 fmt
= GET_RTX_FORMAT (code
);
3247 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3250 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3251 enum machine_mode mode_arg
= GET_MODE (folded_arg
);
3253 switch (GET_CODE (folded_arg
))
3258 const_arg
= equiv_constant (folded_arg
);
3268 const_arg
= folded_arg
;
3273 folded_arg
= prev_insn_cc0
;
3274 mode_arg
= prev_insn_cc0_mode
;
3275 const_arg
= equiv_constant (folded_arg
);
3280 folded_arg
= fold_rtx (folded_arg
, insn
);
3281 const_arg
= equiv_constant (folded_arg
);
3285 /* For the first three operands, see if the operand
3286 is constant or equivalent to a constant. */
3290 folded_arg0
= folded_arg
;
3291 const_arg0
= const_arg
;
3292 mode_arg0
= mode_arg
;
3295 folded_arg1
= folded_arg
;
3296 const_arg1
= const_arg
;
3299 const_arg2
= const_arg
;
3303 /* Pick the least expensive of the argument and an equivalent constant
3306 && const_arg
!= folded_arg
3307 && COST_IN (const_arg
, code
) <= COST_IN (folded_arg
, code
)
3309 /* It's not safe to substitute the operand of a conversion
3310 operator with a constant, as the conversion's identity
3311 depends upon the mode of its operand. This optimization
3312 is handled by the call to simplify_unary_operation. */
3313 && (GET_RTX_CLASS (code
) != RTX_UNARY
3314 || GET_MODE (const_arg
) == mode_arg0
3315 || (code
!= ZERO_EXTEND
3316 && code
!= SIGN_EXTEND
3318 && code
!= FLOAT_TRUNCATE
3319 && code
!= FLOAT_EXTEND
3322 && code
!= UNSIGNED_FLOAT
3323 && code
!= UNSIGNED_FIX
)))
3324 folded_arg
= const_arg
;
3326 if (folded_arg
== XEXP (x
, i
))
3329 if (insn
== NULL_RTX
&& !changed
)
3332 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3337 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3338 consistent with the order in X. */
3339 if (canonicalize_change_group (insn
, x
))
3342 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
3343 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
3346 apply_change_group ();
3349 /* If X is an arithmetic operation, see if we can simplify it. */
3351 switch (GET_RTX_CLASS (code
))
3355 /* We can't simplify extension ops unless we know the
3357 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3358 && mode_arg0
== VOIDmode
)
3361 new_rtx
= simplify_unary_operation (code
, mode
,
3362 const_arg0
? const_arg0
: folded_arg0
,
3368 case RTX_COMM_COMPARE
:
3369 /* See what items are actually being compared and set FOLDED_ARG[01]
3370 to those values and CODE to the actual comparison code. If any are
3371 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3372 do anything if both operands are already known to be constant. */
3374 /* ??? Vector mode comparisons are not supported yet. */
3375 if (VECTOR_MODE_P (mode
))
3378 if (const_arg0
== 0 || const_arg1
== 0)
3380 struct table_elt
*p0
, *p1
;
3381 rtx true_rtx
, false_rtx
;
3382 enum machine_mode mode_arg1
;
3384 if (SCALAR_FLOAT_MODE_P (mode
))
3386 #ifdef FLOAT_STORE_FLAG_VALUE
3387 true_rtx
= (CONST_DOUBLE_FROM_REAL_VALUE
3388 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3390 true_rtx
= NULL_RTX
;
3392 false_rtx
= CONST0_RTX (mode
);
3396 true_rtx
= const_true_rtx
;
3397 false_rtx
= const0_rtx
;
3400 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3401 &mode_arg0
, &mode_arg1
);
3403 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3404 what kinds of things are being compared, so we can't do
3405 anything with this comparison. */
3407 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3410 const_arg0
= equiv_constant (folded_arg0
);
3411 const_arg1
= equiv_constant (folded_arg1
);
3413 /* If we do not now have two constants being compared, see
3414 if we can nevertheless deduce some things about the
3416 if (const_arg0
== 0 || const_arg1
== 0)
3418 if (const_arg1
!= NULL
)
3420 rtx cheapest_simplification
;
3423 struct table_elt
*p
;
3425 /* See if we can find an equivalent of folded_arg0
3426 that gets us a cheaper expression, possibly a
3427 constant through simplifications. */
3428 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3433 cheapest_simplification
= x
;
3434 cheapest_cost
= COST (x
);
3436 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3440 /* If the entry isn't valid, skip it. */
3441 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3444 /* Try to simplify using this equivalence. */
3446 = simplify_relational_operation (code
, mode
,
3451 if (simp_result
== NULL
)
3454 cost
= COST (simp_result
);
3455 if (cost
< cheapest_cost
)
3457 cheapest_cost
= cost
;
3458 cheapest_simplification
= simp_result
;
3462 /* If we have a cheaper expression now, use that
3463 and try folding it further, from the top. */
3464 if (cheapest_simplification
!= x
)
3465 return fold_rtx (copy_rtx (cheapest_simplification
),
3470 /* See if the two operands are the same. */
3472 if ((REG_P (folded_arg0
)
3473 && REG_P (folded_arg1
)
3474 && (REG_QTY (REGNO (folded_arg0
))
3475 == REG_QTY (REGNO (folded_arg1
))))
3476 || ((p0
= lookup (folded_arg0
,
3477 SAFE_HASH (folded_arg0
, mode_arg0
),
3479 && (p1
= lookup (folded_arg1
,
3480 SAFE_HASH (folded_arg1
, mode_arg0
),
3482 && p0
->first_same_value
== p1
->first_same_value
))
3483 folded_arg1
= folded_arg0
;
3485 /* If FOLDED_ARG0 is a register, see if the comparison we are
3486 doing now is either the same as we did before or the reverse
3487 (we only check the reverse if not floating-point). */
3488 else if (REG_P (folded_arg0
))
3490 int qty
= REG_QTY (REGNO (folded_arg0
));
3492 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3494 struct qty_table_elem
*ent
= &qty_table
[qty
];
3496 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3497 || (! FLOAT_MODE_P (mode_arg0
)
3498 && comparison_dominates_p (ent
->comparison_code
,
3499 reverse_condition (code
))))
3500 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3502 && rtx_equal_p (ent
->comparison_const
,
3504 || (REG_P (folded_arg1
)
3505 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3507 if (comparison_dominates_p (ent
->comparison_code
, code
))
3522 /* If we are comparing against zero, see if the first operand is
3523 equivalent to an IOR with a constant. If so, we may be able to
3524 determine the result of this comparison. */
3525 if (const_arg1
== const0_rtx
&& !const_arg0
)
3527 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3531 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3532 && CONST_INT_P (inner_const
)
3533 && INTVAL (inner_const
) != 0)
3534 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3538 rtx op0
= const_arg0
? const_arg0
: folded_arg0
;
3539 rtx op1
= const_arg1
? const_arg1
: folded_arg1
;
3540 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
, op0
, op1
);
3545 case RTX_COMM_ARITH
:
3549 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3550 with that LABEL_REF as its second operand. If so, the result is
3551 the first operand of that MINUS. This handles switches with an
3552 ADDR_DIFF_VEC table. */
3553 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3556 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3557 : lookup_as_function (folded_arg0
, MINUS
);
3559 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3560 && XEXP (XEXP (y
, 1), 0) == XEXP (const_arg1
, 0))
3563 /* Now try for a CONST of a MINUS like the above. */
3564 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3565 : lookup_as_function (folded_arg0
, CONST
))) != 0
3566 && GET_CODE (XEXP (y
, 0)) == MINUS
3567 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3568 && XEXP (XEXP (XEXP (y
, 0), 1), 0) == XEXP (const_arg1
, 0))
3569 return XEXP (XEXP (y
, 0), 0);
3572 /* Likewise if the operands are in the other order. */
3573 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3576 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3577 : lookup_as_function (folded_arg1
, MINUS
);
3579 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3580 && XEXP (XEXP (y
, 1), 0) == XEXP (const_arg0
, 0))
3583 /* Now try for a CONST of a MINUS like the above. */
3584 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3585 : lookup_as_function (folded_arg1
, CONST
))) != 0
3586 && GET_CODE (XEXP (y
, 0)) == MINUS
3587 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3588 && XEXP (XEXP (XEXP (y
, 0), 1), 0) == XEXP (const_arg0
, 0))
3589 return XEXP (XEXP (y
, 0), 0);
3592 /* If second operand is a register equivalent to a negative
3593 CONST_INT, see if we can find a register equivalent to the
3594 positive constant. Make a MINUS if so. Don't do this for
3595 a non-negative constant since we might then alternate between
3596 choosing positive and negative constants. Having the positive
3597 constant previously-used is the more common case. Be sure
3598 the resulting constant is non-negative; if const_arg1 were
3599 the smallest negative number this would overflow: depending
3600 on the mode, this would either just be the same value (and
3601 hence not save anything) or be incorrect. */
3602 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
)
3603 && INTVAL (const_arg1
) < 0
3604 /* This used to test
3606 -INTVAL (const_arg1) >= 0
3608 But The Sun V5.0 compilers mis-compiled that test. So
3609 instead we test for the problematic value in a more direct
3610 manner and hope the Sun compilers get it correct. */
3611 && INTVAL (const_arg1
) !=
3612 ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1))
3613 && REG_P (folded_arg1
))
3615 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3617 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3620 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3622 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3623 canon_reg (p
->exp
, NULL_RTX
));
3628 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3629 If so, produce (PLUS Z C2-C). */
3630 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
))
3632 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3633 if (y
&& CONST_INT_P (XEXP (y
, 1)))
3634 return fold_rtx (plus_constant (copy_rtx (y
),
3635 -INTVAL (const_arg1
)),
3642 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3643 case IOR
: case AND
: case XOR
:
3645 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3646 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3647 is known to be of similar form, we may be able to replace the
3648 operation with a combined operation. This may eliminate the
3649 intermediate operation if every use is simplified in this way.
3650 Note that the similar optimization done by combine.c only works
3651 if the intermediate operation's result has only one reference. */
3653 if (REG_P (folded_arg0
)
3654 && const_arg1
&& CONST_INT_P (const_arg1
))
3657 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3658 rtx y
, inner_const
, new_const
;
3659 rtx canon_const_arg1
= const_arg1
;
3660 enum rtx_code associate_code
;
3663 && (INTVAL (const_arg1
) >= GET_MODE_BITSIZE (mode
)
3664 || INTVAL (const_arg1
) < 0))
3666 if (SHIFT_COUNT_TRUNCATED
)
3667 canon_const_arg1
= GEN_INT (INTVAL (const_arg1
)
3668 & (GET_MODE_BITSIZE (mode
)
3674 y
= lookup_as_function (folded_arg0
, code
);
3678 /* If we have compiled a statement like
3679 "if (x == (x & mask1))", and now are looking at
3680 "x & mask2", we will have a case where the first operand
3681 of Y is the same as our first operand. Unless we detect
3682 this case, an infinite loop will result. */
3683 if (XEXP (y
, 0) == folded_arg0
)
3686 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3687 if (!inner_const
|| !CONST_INT_P (inner_const
))
3690 /* Don't associate these operations if they are a PLUS with the
3691 same constant and it is a power of two. These might be doable
3692 with a pre- or post-increment. Similarly for two subtracts of
3693 identical powers of two with post decrement. */
3695 if (code
== PLUS
&& const_arg1
== inner_const
3696 && ((HAVE_PRE_INCREMENT
3697 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3698 || (HAVE_POST_INCREMENT
3699 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3700 || (HAVE_PRE_DECREMENT
3701 && exact_log2 (- INTVAL (const_arg1
)) >= 0)
3702 || (HAVE_POST_DECREMENT
3703 && exact_log2 (- INTVAL (const_arg1
)) >= 0)))
3706 /* ??? Vector mode shifts by scalar
3707 shift operand are not supported yet. */
3708 if (is_shift
&& VECTOR_MODE_P (mode
))
3712 && (INTVAL (inner_const
) >= GET_MODE_BITSIZE (mode
)
3713 || INTVAL (inner_const
) < 0))
3715 if (SHIFT_COUNT_TRUNCATED
)
3716 inner_const
= GEN_INT (INTVAL (inner_const
)
3717 & (GET_MODE_BITSIZE (mode
) - 1));
3722 /* Compute the code used to compose the constants. For example,
3723 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3725 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3727 new_const
= simplify_binary_operation (associate_code
, mode
,
3734 /* If we are associating shift operations, don't let this
3735 produce a shift of the size of the object or larger.
3736 This could occur when we follow a sign-extend by a right
3737 shift on a machine that does a sign-extend as a pair
3741 && CONST_INT_P (new_const
)
3742 && INTVAL (new_const
) >= GET_MODE_BITSIZE (mode
))
3744 /* As an exception, we can turn an ASHIFTRT of this
3745 form into a shift of the number of bits - 1. */
3746 if (code
== ASHIFTRT
)
3747 new_const
= GEN_INT (GET_MODE_BITSIZE (mode
) - 1);
3748 else if (!side_effects_p (XEXP (y
, 0)))
3749 return CONST0_RTX (mode
);
3754 y
= copy_rtx (XEXP (y
, 0));
3756 /* If Y contains our first operand (the most common way this
3757 can happen is if Y is a MEM), we would do into an infinite
3758 loop if we tried to fold it. So don't in that case. */
3760 if (! reg_mentioned_p (folded_arg0
, y
))
3761 y
= fold_rtx (y
, insn
);
3763 return simplify_gen_binary (code
, mode
, y
, new_const
);
3767 case DIV
: case UDIV
:
3768 /* ??? The associative optimization performed immediately above is
3769 also possible for DIV and UDIV using associate_code of MULT.
3770 However, we would need extra code to verify that the
3771 multiplication does not overflow, that is, there is no overflow
3772 in the calculation of new_const. */
3779 new_rtx
= simplify_binary_operation (code
, mode
,
3780 const_arg0
? const_arg0
: folded_arg0
,
3781 const_arg1
? const_arg1
: folded_arg1
);
3785 /* (lo_sum (high X) X) is simply X. */
3786 if (code
== LO_SUM
&& const_arg0
!= 0
3787 && GET_CODE (const_arg0
) == HIGH
3788 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3793 case RTX_BITFIELD_OPS
:
3794 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3795 const_arg0
? const_arg0
: folded_arg0
,
3796 const_arg1
? const_arg1
: folded_arg1
,
3797 const_arg2
? const_arg2
: XEXP (x
, 2));
3804 return new_rtx
? new_rtx
: x
;
3807 /* Return a constant value currently equivalent to X.
3808 Return 0 if we don't know one. */
3811 equiv_constant (rtx x
)
3814 && REGNO_QTY_VALID_P (REGNO (x
)))
3816 int x_q
= REG_QTY (REGNO (x
));
3817 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3819 if (x_ent
->const_rtx
)
3820 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3823 if (x
== 0 || CONSTANT_P (x
))
3826 if (GET_CODE (x
) == SUBREG
)
3828 enum machine_mode mode
= GET_MODE (x
);
3829 enum machine_mode imode
= GET_MODE (SUBREG_REG (x
));
3832 /* See if we previously assigned a constant value to this SUBREG. */
3833 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3834 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3835 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3838 /* If we didn't and if doing so makes sense, see if we previously
3839 assigned a constant value to the enclosing word mode SUBREG. */
3840 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
)
3841 && GET_MODE_SIZE (word_mode
) < GET_MODE_SIZE (imode
))
3843 int byte
= SUBREG_BYTE (x
) - subreg_lowpart_offset (mode
, word_mode
);
3844 if (byte
>= 0 && (byte
% UNITS_PER_WORD
) == 0)
3846 rtx y
= gen_rtx_SUBREG (word_mode
, SUBREG_REG (x
), byte
);
3847 new_rtx
= lookup_as_function (y
, CONST_INT
);
3849 return gen_lowpart (mode
, new_rtx
);
3853 /* Otherwise see if we already have a constant for the inner REG. */
3854 if (REG_P (SUBREG_REG (x
))
3855 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3856 return simplify_subreg (mode
, new_rtx
, imode
, SUBREG_BYTE (x
));
3861 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3862 the hash table in case its value was seen before. */
3866 struct table_elt
*elt
;
3868 x
= avoid_constant_pool_reference (x
);
3872 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3876 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3877 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3884 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3887 In certain cases, this can cause us to add an equivalence. For example,
3888 if we are following the taken case of
3890 we can add the fact that `i' and '2' are now equivalent.
3892 In any case, we can record that this comparison was passed. If the same
3893 comparison is seen later, we will know its value. */
3896 record_jump_equiv (rtx insn
, bool taken
)
3898 int cond_known_true
;
3901 enum machine_mode mode
, mode0
, mode1
;
3902 int reversed_nonequality
= 0;
3905 /* Ensure this is the right kind of insn. */
3906 gcc_assert (any_condjump_p (insn
));
3908 set
= pc_set (insn
);
3910 /* See if this jump condition is known true or false. */
3912 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3914 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3916 /* Get the type of comparison being done and the operands being compared.
3917 If we had to reverse a non-equality condition, record that fact so we
3918 know that it isn't valid for floating-point. */
3919 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3920 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3921 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3923 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3924 if (! cond_known_true
)
3926 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3928 /* Don't remember if we can't find the inverse. */
3929 if (code
== UNKNOWN
)
3933 /* The mode is the mode of the non-constant. */
3935 if (mode1
!= VOIDmode
)
3938 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3941 /* Yet another form of subreg creation. In this case, we want something in
3942 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3945 record_jump_cond_subreg (enum machine_mode mode
, rtx op
)
3947 enum machine_mode op_mode
= GET_MODE (op
);
3948 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3950 return lowpart_subreg (mode
, op
, op_mode
);
3953 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3954 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3955 Make any useful entries we can with that information. Called from
3956 above function and called recursively. */
3959 record_jump_cond (enum rtx_code code
, enum machine_mode mode
, rtx op0
,
3960 rtx op1
, int reversed_nonequality
)
3962 unsigned op0_hash
, op1_hash
;
3963 int op0_in_memory
, op1_in_memory
;
3964 struct table_elt
*op0_elt
, *op1_elt
;
3966 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3967 we know that they are also equal in the smaller mode (this is also
3968 true for all smaller modes whether or not there is a SUBREG, but
3969 is not worth testing for with no SUBREG). */
3971 /* Note that GET_MODE (op0) may not equal MODE. */
3972 if (code
== EQ
&& GET_CODE (op0
) == SUBREG
3973 && (GET_MODE_SIZE (GET_MODE (op0
))
3974 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
3976 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3977 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3979 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3980 reversed_nonequality
);
3983 if (code
== EQ
&& GET_CODE (op1
) == SUBREG
3984 && (GET_MODE_SIZE (GET_MODE (op1
))
3985 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
3987 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3988 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3990 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3991 reversed_nonequality
);
3994 /* Similarly, if this is an NE comparison, and either is a SUBREG
3995 making a smaller mode, we know the whole thing is also NE. */
3997 /* Note that GET_MODE (op0) may not equal MODE;
3998 if we test MODE instead, we can get an infinite recursion
3999 alternating between two modes each wider than MODE. */
4001 if (code
== NE
&& GET_CODE (op0
) == SUBREG
4002 && subreg_lowpart_p (op0
)
4003 && (GET_MODE_SIZE (GET_MODE (op0
))
4004 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
4006 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
4007 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
4009 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
4010 reversed_nonequality
);
4013 if (code
== NE
&& GET_CODE (op1
) == SUBREG
4014 && subreg_lowpart_p (op1
)
4015 && (GET_MODE_SIZE (GET_MODE (op1
))
4016 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
4018 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
4019 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
4021 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
4022 reversed_nonequality
);
4025 /* Hash both operands. */
4028 hash_arg_in_memory
= 0;
4029 op0_hash
= HASH (op0
, mode
);
4030 op0_in_memory
= hash_arg_in_memory
;
4036 hash_arg_in_memory
= 0;
4037 op1_hash
= HASH (op1
, mode
);
4038 op1_in_memory
= hash_arg_in_memory
;
4043 /* Look up both operands. */
4044 op0_elt
= lookup (op0
, op0_hash
, mode
);
4045 op1_elt
= lookup (op1
, op1_hash
, mode
);
4047 /* If both operands are already equivalent or if they are not in the
4048 table but are identical, do nothing. */
4049 if ((op0_elt
!= 0 && op1_elt
!= 0
4050 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
4051 || op0
== op1
|| rtx_equal_p (op0
, op1
))
4054 /* If we aren't setting two things equal all we can do is save this
4055 comparison. Similarly if this is floating-point. In the latter
4056 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4057 If we record the equality, we might inadvertently delete code
4058 whose intent was to change -0 to +0. */
4060 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
4062 struct qty_table_elem
*ent
;
4065 /* If we reversed a floating-point comparison, if OP0 is not a
4066 register, or if OP1 is neither a register or constant, we can't
4070 op1
= equiv_constant (op1
);
4072 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
4073 || !REG_P (op0
) || op1
== 0)
4076 /* Put OP0 in the hash table if it isn't already. This gives it a
4077 new quantity number. */
4080 if (insert_regs (op0
, NULL
, 0))
4082 rehash_using_reg (op0
);
4083 op0_hash
= HASH (op0
, mode
);
4085 /* If OP0 is contained in OP1, this changes its hash code
4086 as well. Faster to rehash than to check, except
4087 for the simple case of a constant. */
4088 if (! CONSTANT_P (op1
))
4089 op1_hash
= HASH (op1
,mode
);
4092 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4093 op0_elt
->in_memory
= op0_in_memory
;
4096 qty
= REG_QTY (REGNO (op0
));
4097 ent
= &qty_table
[qty
];
4099 ent
->comparison_code
= code
;
4102 /* Look it up again--in case op0 and op1 are the same. */
4103 op1_elt
= lookup (op1
, op1_hash
, mode
);
4105 /* Put OP1 in the hash table so it gets a new quantity number. */
4108 if (insert_regs (op1
, NULL
, 0))
4110 rehash_using_reg (op1
);
4111 op1_hash
= HASH (op1
, mode
);
4114 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4115 op1_elt
->in_memory
= op1_in_memory
;
4118 ent
->comparison_const
= NULL_RTX
;
4119 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
4123 ent
->comparison_const
= op1
;
4124 ent
->comparison_qty
= -1;
4130 /* If either side is still missing an equivalence, make it now,
4131 then merge the equivalences. */
4135 if (insert_regs (op0
, NULL
, 0))
4137 rehash_using_reg (op0
);
4138 op0_hash
= HASH (op0
, mode
);
4141 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4142 op0_elt
->in_memory
= op0_in_memory
;
4147 if (insert_regs (op1
, NULL
, 0))
4149 rehash_using_reg (op1
);
4150 op1_hash
= HASH (op1
, mode
);
4153 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4154 op1_elt
->in_memory
= op1_in_memory
;
4157 merge_equiv_classes (op0_elt
, op1_elt
);
4160 /* CSE processing for one instruction.
4161 First simplify sources and addresses of all assignments
4162 in the instruction, using previously-computed equivalents values.
4163 Then install the new sources and destinations in the table
4164 of available values. */
4166 /* Data on one SET contained in the instruction. */
4170 /* The SET rtx itself. */
4172 /* The SET_SRC of the rtx (the original value, if it is changing). */
4174 /* The hash-table element for the SET_SRC of the SET. */
4175 struct table_elt
*src_elt
;
4176 /* Hash value for the SET_SRC. */
4178 /* Hash value for the SET_DEST. */
4180 /* The SET_DEST, with SUBREG, etc., stripped. */
4182 /* Nonzero if the SET_SRC is in memory. */
4184 /* Nonzero if the SET_SRC contains something
4185 whose value cannot be predicted and understood. */
4187 /* Original machine mode, in case it becomes a CONST_INT.
4188 The size of this field should match the size of the mode
4189 field of struct rtx_def (see rtl.h). */
4190 ENUM_BITFIELD(machine_mode
) mode
: 8;
4191 /* A constant equivalent for SET_SRC, if any. */
4193 /* Hash value of constant equivalent for SET_SRC. */
4194 unsigned src_const_hash
;
4195 /* Table entry for constant equivalent for SET_SRC, if any. */
4196 struct table_elt
*src_const_elt
;
4197 /* Table entry for the destination address. */
4198 struct table_elt
*dest_addr_elt
;
4204 rtx x
= PATTERN (insn
);
4210 struct table_elt
*src_eqv_elt
= 0;
4211 int src_eqv_volatile
= 0;
4212 int src_eqv_in_memory
= 0;
4213 unsigned src_eqv_hash
= 0;
4215 struct set
*sets
= (struct set
*) 0;
4219 /* Records what this insn does to set CC0. */
4221 this_insn_cc0_mode
= VOIDmode
;
4224 /* Find all the SETs and CLOBBERs in this instruction.
4225 Record all the SETs in the array `set' and count them.
4226 Also determine whether there is a CLOBBER that invalidates
4227 all memory references, or all references at varying addresses. */
4231 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4233 if (GET_CODE (XEXP (tem
, 0)) == CLOBBER
)
4234 invalidate (SET_DEST (XEXP (tem
, 0)), VOIDmode
);
4235 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4239 if (GET_CODE (x
) == SET
)
4241 sets
= XALLOCA (struct set
);
4244 /* Ignore SETs that are unconditional jumps.
4245 They never need cse processing, so this does not hurt.
4246 The reason is not efficiency but rather
4247 so that we can test at the end for instructions
4248 that have been simplified to unconditional jumps
4249 and not be misled by unchanged instructions
4250 that were unconditional jumps to begin with. */
4251 if (SET_DEST (x
) == pc_rtx
4252 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4255 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4256 The hard function value register is used only once, to copy to
4257 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4258 Ensure we invalidate the destination register. On the 80386 no
4259 other code would invalidate it since it is a fixed_reg.
4260 We need not check the return of apply_change_group; see canon_reg. */
4262 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4264 canon_reg (SET_SRC (x
), insn
);
4265 apply_change_group ();
4266 fold_rtx (SET_SRC (x
), insn
);
4267 invalidate (SET_DEST (x
), VOIDmode
);
4272 else if (GET_CODE (x
) == PARALLEL
)
4274 int lim
= XVECLEN (x
, 0);
4276 sets
= XALLOCAVEC (struct set
, lim
);
4278 /* Find all regs explicitly clobbered in this insn,
4279 and ensure they are not replaced with any other regs
4280 elsewhere in this insn.
4281 When a reg that is clobbered is also used for input,
4282 we should presume that that is for a reason,
4283 and we should not substitute some other register
4284 which is not supposed to be clobbered.
4285 Therefore, this loop cannot be merged into the one below
4286 because a CALL may precede a CLOBBER and refer to the
4287 value clobbered. We must not let a canonicalization do
4288 anything in that case. */
4289 for (i
= 0; i
< lim
; i
++)
4291 rtx y
= XVECEXP (x
, 0, i
);
4292 if (GET_CODE (y
) == CLOBBER
)
4294 rtx clobbered
= XEXP (y
, 0);
4296 if (REG_P (clobbered
)
4297 || GET_CODE (clobbered
) == SUBREG
)
4298 invalidate (clobbered
, VOIDmode
);
4299 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
4300 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
4301 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
4305 for (i
= 0; i
< lim
; i
++)
4307 rtx y
= XVECEXP (x
, 0, i
);
4308 if (GET_CODE (y
) == SET
)
4310 /* As above, we ignore unconditional jumps and call-insns and
4311 ignore the result of apply_change_group. */
4312 if (GET_CODE (SET_SRC (y
)) == CALL
)
4314 canon_reg (SET_SRC (y
), insn
);
4315 apply_change_group ();
4316 fold_rtx (SET_SRC (y
), insn
);
4317 invalidate (SET_DEST (y
), VOIDmode
);
4319 else if (SET_DEST (y
) == pc_rtx
4320 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4323 sets
[n_sets
++].rtl
= y
;
4325 else if (GET_CODE (y
) == CLOBBER
)
4327 /* If we clobber memory, canon the address.
4328 This does nothing when a register is clobbered
4329 because we have already invalidated the reg. */
4330 if (MEM_P (XEXP (y
, 0)))
4331 canon_reg (XEXP (y
, 0), insn
);
4333 else if (GET_CODE (y
) == USE
4334 && ! (REG_P (XEXP (y
, 0))
4335 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4336 canon_reg (y
, insn
);
4337 else if (GET_CODE (y
) == CALL
)
4339 /* The result of apply_change_group can be ignored; see
4341 canon_reg (y
, insn
);
4342 apply_change_group ();
4347 else if (GET_CODE (x
) == CLOBBER
)
4349 if (MEM_P (XEXP (x
, 0)))
4350 canon_reg (XEXP (x
, 0), insn
);
4353 /* Canonicalize a USE of a pseudo register or memory location. */
4354 else if (GET_CODE (x
) == USE
4355 && ! (REG_P (XEXP (x
, 0))
4356 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4357 canon_reg (XEXP (x
, 0), insn
);
4358 else if (GET_CODE (x
) == CALL
)
4360 /* The result of apply_change_group can be ignored; see canon_reg. */
4361 canon_reg (x
, insn
);
4362 apply_change_group ();
4365 else if (DEBUG_INSN_P (insn
))
4366 canon_reg (PATTERN (insn
), insn
);
4368 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4369 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4370 is handled specially for this case, and if it isn't set, then there will
4371 be no equivalence for the destination. */
4372 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4373 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0
4374 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4375 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4377 /* The result of apply_change_group can be ignored; see canon_reg. */
4378 canon_reg (XEXP (tem
, 0), insn
);
4379 apply_change_group ();
4380 src_eqv
= fold_rtx (XEXP (tem
, 0), insn
);
4381 XEXP (tem
, 0) = copy_rtx (src_eqv
);
4382 df_notes_rescan (insn
);
4385 /* Canonicalize sources and addresses of destinations.
4386 We do this in a separate pass to avoid problems when a MATCH_DUP is
4387 present in the insn pattern. In that case, we want to ensure that
4388 we don't break the duplicate nature of the pattern. So we will replace
4389 both operands at the same time. Otherwise, we would fail to find an
4390 equivalent substitution in the loop calling validate_change below.
4392 We used to suppress canonicalization of DEST if it appears in SRC,
4393 but we don't do this any more. */
4395 for (i
= 0; i
< n_sets
; i
++)
4397 rtx dest
= SET_DEST (sets
[i
].rtl
);
4398 rtx src
= SET_SRC (sets
[i
].rtl
);
4399 rtx new_rtx
= canon_reg (src
, insn
);
4401 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4403 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4405 validate_change (insn
, &XEXP (dest
, 1),
4406 canon_reg (XEXP (dest
, 1), insn
), 1);
4407 validate_change (insn
, &XEXP (dest
, 2),
4408 canon_reg (XEXP (dest
, 2), insn
), 1);
4411 while (GET_CODE (dest
) == SUBREG
4412 || GET_CODE (dest
) == ZERO_EXTRACT
4413 || GET_CODE (dest
) == STRICT_LOW_PART
)
4414 dest
= XEXP (dest
, 0);
4417 canon_reg (dest
, insn
);
4420 /* Now that we have done all the replacements, we can apply the change
4421 group and see if they all work. Note that this will cause some
4422 canonicalizations that would have worked individually not to be applied
4423 because some other canonicalization didn't work, but this should not
4426 The result of apply_change_group can be ignored; see canon_reg. */
4428 apply_change_group ();
4430 /* Set sets[i].src_elt to the class each source belongs to.
4431 Detect assignments from or to volatile things
4432 and set set[i] to zero so they will be ignored
4433 in the rest of this function.
4435 Nothing in this loop changes the hash table or the register chains. */
4437 for (i
= 0; i
< n_sets
; i
++)
4439 bool repeat
= false;
4442 struct table_elt
*elt
= 0, *p
;
4443 enum machine_mode mode
;
4446 rtx src_related
= 0;
4447 bool src_related_is_const_anchor
= false;
4448 struct table_elt
*src_const_elt
= 0;
4449 int src_cost
= MAX_COST
;
4450 int src_eqv_cost
= MAX_COST
;
4451 int src_folded_cost
= MAX_COST
;
4452 int src_related_cost
= MAX_COST
;
4453 int src_elt_cost
= MAX_COST
;
4454 int src_regcost
= MAX_COST
;
4455 int src_eqv_regcost
= MAX_COST
;
4456 int src_folded_regcost
= MAX_COST
;
4457 int src_related_regcost
= MAX_COST
;
4458 int src_elt_regcost
= MAX_COST
;
4459 /* Set nonzero if we need to call force_const_mem on with the
4460 contents of src_folded before using it. */
4461 int src_folded_force_flag
= 0;
4463 dest
= SET_DEST (sets
[i
].rtl
);
4464 src
= SET_SRC (sets
[i
].rtl
);
4466 /* If SRC is a constant that has no machine mode,
4467 hash it with the destination's machine mode.
4468 This way we can keep different modes separate. */
4470 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4471 sets
[i
].mode
= mode
;
4475 enum machine_mode eqvmode
= mode
;
4476 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4477 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4479 hash_arg_in_memory
= 0;
4480 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4482 /* Find the equivalence class for the equivalent expression. */
4485 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4487 src_eqv_volatile
= do_not_record
;
4488 src_eqv_in_memory
= hash_arg_in_memory
;
4491 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4492 value of the INNER register, not the destination. So it is not
4493 a valid substitution for the source. But save it for later. */
4494 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4497 src_eqv_here
= src_eqv
;
4499 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4500 simplified result, which may not necessarily be valid. */
4501 src_folded
= fold_rtx (src
, insn
);
4504 /* ??? This caused bad code to be generated for the m68k port with -O2.
4505 Suppose src is (CONST_INT -1), and that after truncation src_folded
4506 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4507 At the end we will add src and src_const to the same equivalence
4508 class. We now have 3 and -1 on the same equivalence class. This
4509 causes later instructions to be mis-optimized. */
4510 /* If storing a constant in a bitfield, pre-truncate the constant
4511 so we will be able to record it later. */
4512 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4514 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4516 if (CONST_INT_P (src
)
4517 && CONST_INT_P (width
)
4518 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4519 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4521 = GEN_INT (INTVAL (src
) & (((HOST_WIDE_INT
) 1
4522 << INTVAL (width
)) - 1));
4526 /* Compute SRC's hash code, and also notice if it
4527 should not be recorded at all. In that case,
4528 prevent any further processing of this assignment. */
4530 hash_arg_in_memory
= 0;
4533 sets
[i
].src_hash
= HASH (src
, mode
);
4534 sets
[i
].src_volatile
= do_not_record
;
4535 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4537 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4538 a pseudo, do not record SRC. Using SRC as a replacement for
4539 anything else will be incorrect in that situation. Note that
4540 this usually occurs only for stack slots, in which case all the
4541 RTL would be referring to SRC, so we don't lose any optimization
4542 opportunities by not having SRC in the hash table. */
4545 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4547 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4548 sets
[i
].src_volatile
= 1;
4551 /* It is no longer clear why we used to do this, but it doesn't
4552 appear to still be needed. So let's try without it since this
4553 code hurts cse'ing widened ops. */
4554 /* If source is a paradoxical subreg (such as QI treated as an SI),
4555 treat it as volatile. It may do the work of an SI in one context
4556 where the extra bits are not being used, but cannot replace an SI
4558 if (GET_CODE (src
) == SUBREG
4559 && (GET_MODE_SIZE (GET_MODE (src
))
4560 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))))
4561 sets
[i
].src_volatile
= 1;
4564 /* Locate all possible equivalent forms for SRC. Try to replace
4565 SRC in the insn with each cheaper equivalent.
4567 We have the following types of equivalents: SRC itself, a folded
4568 version, a value given in a REG_EQUAL note, or a value related
4571 Each of these equivalents may be part of an additional class
4572 of equivalents (if more than one is in the table, they must be in
4573 the same class; we check for this).
4575 If the source is volatile, we don't do any table lookups.
4577 We note any constant equivalent for possible later use in a
4580 if (!sets
[i
].src_volatile
)
4581 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4583 sets
[i
].src_elt
= elt
;
4585 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4587 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4589 /* The REG_EQUAL is indicating that two formerly distinct
4590 classes are now equivalent. So merge them. */
4591 merge_equiv_classes (elt
, src_eqv_elt
);
4592 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4593 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4599 else if (src_eqv_elt
)
4602 /* Try to find a constant somewhere and record it in `src_const'.
4603 Record its table element, if any, in `src_const_elt'. Look in
4604 any known equivalences first. (If the constant is not in the
4605 table, also set `sets[i].src_const_hash'). */
4607 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4611 src_const_elt
= elt
;
4616 && (CONSTANT_P (src_folded
)
4617 /* Consider (minus (label_ref L1) (label_ref L2)) as
4618 "constant" here so we will record it. This allows us
4619 to fold switch statements when an ADDR_DIFF_VEC is used. */
4620 || (GET_CODE (src_folded
) == MINUS
4621 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4622 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4623 src_const
= src_folded
, src_const_elt
= elt
;
4624 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4625 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4627 /* If we don't know if the constant is in the table, get its
4628 hash code and look it up. */
4629 if (src_const
&& src_const_elt
== 0)
4631 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4632 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4635 sets
[i
].src_const
= src_const
;
4636 sets
[i
].src_const_elt
= src_const_elt
;
4638 /* If the constant and our source are both in the table, mark them as
4639 equivalent. Otherwise, if a constant is in the table but the source
4640 isn't, set ELT to it. */
4641 if (src_const_elt
&& elt
4642 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4643 merge_equiv_classes (elt
, src_const_elt
);
4644 else if (src_const_elt
&& elt
== 0)
4645 elt
= src_const_elt
;
4647 /* See if there is a register linearly related to a constant
4648 equivalent of SRC. */
4650 && (GET_CODE (src_const
) == CONST
4651 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4653 src_related
= use_related_value (src_const
, src_const_elt
);
4656 struct table_elt
*src_related_elt
4657 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4658 if (src_related_elt
&& elt
)
4660 if (elt
->first_same_value
4661 != src_related_elt
->first_same_value
)
4662 /* This can occur when we previously saw a CONST
4663 involving a SYMBOL_REF and then see the SYMBOL_REF
4664 twice. Merge the involved classes. */
4665 merge_equiv_classes (elt
, src_related_elt
);
4668 src_related_elt
= 0;
4670 else if (src_related_elt
&& elt
== 0)
4671 elt
= src_related_elt
;
4675 /* See if we have a CONST_INT that is already in a register in a
4678 if (src_const
&& src_related
== 0 && CONST_INT_P (src_const
)
4679 && GET_MODE_CLASS (mode
) == MODE_INT
4680 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
)
4682 enum machine_mode wider_mode
;
4684 for (wider_mode
= GET_MODE_WIDER_MODE (mode
);
4685 wider_mode
!= VOIDmode
4686 && GET_MODE_BITSIZE (wider_mode
) <= BITS_PER_WORD
4687 && src_related
== 0;
4688 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4690 struct table_elt
*const_elt
4691 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4696 for (const_elt
= const_elt
->first_same_value
;
4697 const_elt
; const_elt
= const_elt
->next_same_value
)
4698 if (REG_P (const_elt
->exp
))
4700 src_related
= gen_lowpart (mode
, const_elt
->exp
);
4706 /* Another possibility is that we have an AND with a constant in
4707 a mode narrower than a word. If so, it might have been generated
4708 as part of an "if" which would narrow the AND. If we already
4709 have done the AND in a wider mode, we can use a SUBREG of that
4712 if (flag_expensive_optimizations
&& ! src_related
4713 && GET_CODE (src
) == AND
&& CONST_INT_P (XEXP (src
, 1))
4714 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4716 enum machine_mode tmode
;
4717 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4719 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4720 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4721 tmode
= GET_MODE_WIDER_MODE (tmode
))
4723 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4724 struct table_elt
*larger_elt
;
4728 PUT_MODE (new_and
, tmode
);
4729 XEXP (new_and
, 0) = inner
;
4730 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4731 if (larger_elt
== 0)
4734 for (larger_elt
= larger_elt
->first_same_value
;
4735 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4736 if (REG_P (larger_elt
->exp
))
4739 = gen_lowpart (mode
, larger_elt
->exp
);
4749 #ifdef LOAD_EXTEND_OP
4750 /* See if a MEM has already been loaded with a widening operation;
4751 if it has, we can use a subreg of that. Many CISC machines
4752 also have such operations, but this is only likely to be
4753 beneficial on these machines. */
4755 if (flag_expensive_optimizations
&& src_related
== 0
4756 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4757 && GET_MODE_CLASS (mode
) == MODE_INT
4758 && MEM_P (src
) && ! do_not_record
4759 && LOAD_EXTEND_OP (mode
) != UNKNOWN
)
4761 struct rtx_def memory_extend_buf
;
4762 rtx memory_extend_rtx
= &memory_extend_buf
;
4763 enum machine_mode tmode
;
4765 /* Set what we are trying to extend and the operation it might
4766 have been extended with. */
4767 memset (memory_extend_rtx
, 0, sizeof(*memory_extend_rtx
));
4768 PUT_CODE (memory_extend_rtx
, LOAD_EXTEND_OP (mode
));
4769 XEXP (memory_extend_rtx
, 0) = src
;
4771 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4772 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4773 tmode
= GET_MODE_WIDER_MODE (tmode
))
4775 struct table_elt
*larger_elt
;
4777 PUT_MODE (memory_extend_rtx
, tmode
);
4778 larger_elt
= lookup (memory_extend_rtx
,
4779 HASH (memory_extend_rtx
, tmode
), tmode
);
4780 if (larger_elt
== 0)
4783 for (larger_elt
= larger_elt
->first_same_value
;
4784 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4785 if (REG_P (larger_elt
->exp
))
4787 src_related
= gen_lowpart (mode
, larger_elt
->exp
);
4795 #endif /* LOAD_EXTEND_OP */
4797 /* Try to express the constant using a register+offset expression
4798 derived from a constant anchor. */
4800 if (targetm
.const_anchor
4803 && GET_CODE (src_const
) == CONST_INT
)
4805 src_related
= try_const_anchors (src_const
, mode
);
4806 src_related_is_const_anchor
= src_related
!= NULL_RTX
;
4810 if (src
== src_folded
)
4813 /* At this point, ELT, if nonzero, points to a class of expressions
4814 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4815 and SRC_RELATED, if nonzero, each contain additional equivalent
4816 expressions. Prune these latter expressions by deleting expressions
4817 already in the equivalence class.
4819 Check for an equivalent identical to the destination. If found,
4820 this is the preferred equivalent since it will likely lead to
4821 elimination of the insn. Indicate this by placing it in
4825 elt
= elt
->first_same_value
;
4826 for (p
= elt
; p
; p
= p
->next_same_value
)
4828 enum rtx_code code
= GET_CODE (p
->exp
);
4830 /* If the expression is not valid, ignore it. Then we do not
4831 have to check for validity below. In most cases, we can use
4832 `rtx_equal_p', since canonicalization has already been done. */
4833 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
4836 /* Also skip paradoxical subregs, unless that's what we're
4839 && (GET_MODE_SIZE (GET_MODE (p
->exp
))
4840 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))
4842 && GET_CODE (src
) == SUBREG
4843 && GET_MODE (src
) == GET_MODE (p
->exp
)
4844 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4845 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))))
4848 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
4850 else if (src_folded
&& GET_CODE (src_folded
) == code
4851 && rtx_equal_p (src_folded
, p
->exp
))
4853 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
4854 && rtx_equal_p (src_eqv_here
, p
->exp
))
4856 else if (src_related
&& GET_CODE (src_related
) == code
4857 && rtx_equal_p (src_related
, p
->exp
))
4860 /* This is the same as the destination of the insns, we want
4861 to prefer it. Copy it to src_related. The code below will
4862 then give it a negative cost. */
4863 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
4867 /* Find the cheapest valid equivalent, trying all the available
4868 possibilities. Prefer items not in the hash table to ones
4869 that are when they are equal cost. Note that we can never
4870 worsen an insn as the current contents will also succeed.
4871 If we find an equivalent identical to the destination, use it as best,
4872 since this insn will probably be eliminated in that case. */
4875 if (rtx_equal_p (src
, dest
))
4876 src_cost
= src_regcost
= -1;
4879 src_cost
= COST (src
);
4880 src_regcost
= approx_reg_cost (src
);
4886 if (rtx_equal_p (src_eqv_here
, dest
))
4887 src_eqv_cost
= src_eqv_regcost
= -1;
4890 src_eqv_cost
= COST (src_eqv_here
);
4891 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
4897 if (rtx_equal_p (src_folded
, dest
))
4898 src_folded_cost
= src_folded_regcost
= -1;
4901 src_folded_cost
= COST (src_folded
);
4902 src_folded_regcost
= approx_reg_cost (src_folded
);
4908 if (rtx_equal_p (src_related
, dest
))
4909 src_related_cost
= src_related_regcost
= -1;
4912 src_related_cost
= COST (src_related
);
4913 src_related_regcost
= approx_reg_cost (src_related
);
4915 /* If a const-anchor is used to synthesize a constant that
4916 normally requires multiple instructions then slightly prefer
4917 it over the original sequence. These instructions are likely
4918 to become redundant now. We can't compare against the cost
4919 of src_eqv_here because, on MIPS for example, multi-insn
4920 constants have zero cost; they are assumed to be hoisted from
4922 if (src_related_is_const_anchor
4923 && src_related_cost
== src_cost
4929 /* If this was an indirect jump insn, a known label will really be
4930 cheaper even though it looks more expensive. */
4931 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
4932 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
4934 /* Terminate loop when replacement made. This must terminate since
4935 the current contents will be tested and will always be valid. */
4940 /* Skip invalid entries. */
4941 while (elt
&& !REG_P (elt
->exp
)
4942 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
4943 elt
= elt
->next_same_value
;
4945 /* A paradoxical subreg would be bad here: it'll be the right
4946 size, but later may be adjusted so that the upper bits aren't
4947 what we want. So reject it. */
4949 && GET_CODE (elt
->exp
) == SUBREG
4950 && (GET_MODE_SIZE (GET_MODE (elt
->exp
))
4951 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))
4952 /* It is okay, though, if the rtx we're trying to match
4953 will ignore any of the bits we can't predict. */
4955 && GET_CODE (src
) == SUBREG
4956 && GET_MODE (src
) == GET_MODE (elt
->exp
)
4957 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4958 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))))
4960 elt
= elt
->next_same_value
;
4966 src_elt_cost
= elt
->cost
;
4967 src_elt_regcost
= elt
->regcost
;
4970 /* Find cheapest and skip it for the next time. For items
4971 of equal cost, use this order:
4972 src_folded, src, src_eqv, src_related and hash table entry. */
4974 && preferable (src_folded_cost
, src_folded_regcost
,
4975 src_cost
, src_regcost
) <= 0
4976 && preferable (src_folded_cost
, src_folded_regcost
,
4977 src_eqv_cost
, src_eqv_regcost
) <= 0
4978 && preferable (src_folded_cost
, src_folded_regcost
,
4979 src_related_cost
, src_related_regcost
) <= 0
4980 && preferable (src_folded_cost
, src_folded_regcost
,
4981 src_elt_cost
, src_elt_regcost
) <= 0)
4983 trial
= src_folded
, src_folded_cost
= MAX_COST
;
4984 if (src_folded_force_flag
)
4986 rtx forced
= force_const_mem (mode
, trial
);
4992 && preferable (src_cost
, src_regcost
,
4993 src_eqv_cost
, src_eqv_regcost
) <= 0
4994 && preferable (src_cost
, src_regcost
,
4995 src_related_cost
, src_related_regcost
) <= 0
4996 && preferable (src_cost
, src_regcost
,
4997 src_elt_cost
, src_elt_regcost
) <= 0)
4998 trial
= src
, src_cost
= MAX_COST
;
4999 else if (src_eqv_here
5000 && preferable (src_eqv_cost
, src_eqv_regcost
,
5001 src_related_cost
, src_related_regcost
) <= 0
5002 && preferable (src_eqv_cost
, src_eqv_regcost
,
5003 src_elt_cost
, src_elt_regcost
) <= 0)
5004 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
5005 else if (src_related
5006 && preferable (src_related_cost
, src_related_regcost
,
5007 src_elt_cost
, src_elt_regcost
) <= 0)
5008 trial
= src_related
, src_related_cost
= MAX_COST
;
5012 elt
= elt
->next_same_value
;
5013 src_elt_cost
= MAX_COST
;
5016 /* Avoid creation of overlapping memory moves. */
5017 if (MEM_P (trial
) && MEM_P (SET_DEST (sets
[i
].rtl
)))
5021 /* BLKmode moves are not handled by cse anyway. */
5022 if (GET_MODE (trial
) == BLKmode
)
5025 src
= canon_rtx (trial
);
5026 dest
= canon_rtx (SET_DEST (sets
[i
].rtl
));
5028 if (!MEM_P (src
) || !MEM_P (dest
)
5029 || !nonoverlapping_memrefs_p (src
, dest
))
5034 (set (reg:M N) (const_int A))
5035 (set (reg:M2 O) (const_int B))
5036 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5038 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5039 && CONST_INT_P (trial
)
5040 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5041 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5042 && REG_P (XEXP (SET_DEST (sets
[i
].rtl
), 0))
5043 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets
[i
].rtl
)))
5044 >= INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1)))
5045 && ((unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5046 + (unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5047 <= HOST_BITS_PER_WIDE_INT
))
5049 rtx dest_reg
= XEXP (SET_DEST (sets
[i
].rtl
), 0);
5050 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5051 rtx pos
= XEXP (SET_DEST (sets
[i
].rtl
), 2);
5052 unsigned int dest_hash
= HASH (dest_reg
, GET_MODE (dest_reg
));
5053 struct table_elt
*dest_elt
5054 = lookup (dest_reg
, dest_hash
, GET_MODE (dest_reg
));
5055 rtx dest_cst
= NULL
;
5058 for (p
= dest_elt
->first_same_value
; p
; p
= p
->next_same_value
)
5059 if (p
->is_const
&& CONST_INT_P (p
->exp
))
5066 HOST_WIDE_INT val
= INTVAL (dest_cst
);
5069 if (BITS_BIG_ENDIAN
)
5070 shift
= GET_MODE_BITSIZE (GET_MODE (dest_reg
))
5071 - INTVAL (pos
) - INTVAL (width
);
5073 shift
= INTVAL (pos
);
5074 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
5075 mask
= ~(HOST_WIDE_INT
) 0;
5077 mask
= ((HOST_WIDE_INT
) 1 << INTVAL (width
)) - 1;
5078 val
&= ~(mask
<< shift
);
5079 val
|= (INTVAL (trial
) & mask
) << shift
;
5080 val
= trunc_int_for_mode (val
, GET_MODE (dest_reg
));
5081 validate_unshare_change (insn
, &SET_DEST (sets
[i
].rtl
),
5083 validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5085 if (apply_change_group ())
5087 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5090 remove_note (insn
, note
);
5091 df_notes_rescan (insn
);
5095 src_eqv_volatile
= 0;
5096 src_eqv_in_memory
= 0;
5104 /* We don't normally have an insn matching (set (pc) (pc)), so
5105 check for this separately here. We will delete such an
5108 For other cases such as a table jump or conditional jump
5109 where we know the ultimate target, go ahead and replace the
5110 operand. While that may not make a valid insn, we will
5111 reemit the jump below (and also insert any necessary
5113 if (n_sets
== 1 && dest
== pc_rtx
5115 || (GET_CODE (trial
) == LABEL_REF
5116 && ! condjump_p (insn
))))
5118 /* Don't substitute non-local labels, this confuses CFG. */
5119 if (GET_CODE (trial
) == LABEL_REF
5120 && LABEL_REF_NONLOCAL_P (trial
))
5123 SET_SRC (sets
[i
].rtl
) = trial
;
5124 cse_jumps_altered
= true;
5128 /* Reject certain invalid forms of CONST that we create. */
5129 else if (CONSTANT_P (trial
)
5130 && GET_CODE (trial
) == CONST
5131 /* Reject cases that will cause decode_rtx_const to
5132 die. On the alpha when simplifying a switch, we
5133 get (const (truncate (minus (label_ref)
5135 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
5136 /* Likewise on IA-64, except without the
5138 || (GET_CODE (XEXP (trial
, 0)) == MINUS
5139 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
5140 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
5141 /* Do nothing for this case. */
5144 /* Look for a substitution that makes a valid insn. */
5145 else if (validate_unshare_change
5146 (insn
, &SET_SRC (sets
[i
].rtl
), trial
, 0))
5148 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
5150 /* The result of apply_change_group can be ignored; see
5153 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
5154 apply_change_group ();
5159 /* If we previously found constant pool entries for
5160 constants and this is a constant, try making a
5161 pool entry. Put it in src_folded unless we already have done
5162 this since that is where it likely came from. */
5164 else if (constant_pool_entries_cost
5165 && CONSTANT_P (trial
)
5167 || (!MEM_P (src_folded
)
5168 && ! src_folded_force_flag
))
5169 && GET_MODE_CLASS (mode
) != MODE_CC
5170 && mode
!= VOIDmode
)
5172 src_folded_force_flag
= 1;
5174 src_folded_cost
= constant_pool_entries_cost
;
5175 src_folded_regcost
= constant_pool_entries_regcost
;
5179 /* If we changed the insn too much, handle this set from scratch. */
5186 src
= SET_SRC (sets
[i
].rtl
);
5188 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5189 However, there is an important exception: If both are registers
5190 that are not the head of their equivalence class, replace SET_SRC
5191 with the head of the class. If we do not do this, we will have
5192 both registers live over a portion of the basic block. This way,
5193 their lifetimes will likely abut instead of overlapping. */
5195 && REGNO_QTY_VALID_P (REGNO (dest
)))
5197 int dest_q
= REG_QTY (REGNO (dest
));
5198 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
5200 if (dest_ent
->mode
== GET_MODE (dest
)
5201 && dest_ent
->first_reg
!= REGNO (dest
)
5202 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
5203 /* Don't do this if the original insn had a hard reg as
5204 SET_SRC or SET_DEST. */
5205 && (!REG_P (sets
[i
].src
)
5206 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
5207 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
5208 /* We can't call canon_reg here because it won't do anything if
5209 SRC is a hard register. */
5211 int src_q
= REG_QTY (REGNO (src
));
5212 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5213 int first
= src_ent
->first_reg
;
5215 = (first
>= FIRST_PSEUDO_REGISTER
5216 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
5218 /* We must use validate-change even for this, because this
5219 might be a special no-op instruction, suitable only to
5221 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
5224 /* If we had a constant that is cheaper than what we are now
5225 setting SRC to, use that constant. We ignored it when we
5226 thought we could make this into a no-op. */
5227 if (src_const
&& COST (src_const
) < COST (src
)
5228 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
5235 /* If we made a change, recompute SRC values. */
5236 if (src
!= sets
[i
].src
)
5239 hash_arg_in_memory
= 0;
5241 sets
[i
].src_hash
= HASH (src
, mode
);
5242 sets
[i
].src_volatile
= do_not_record
;
5243 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5244 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
5247 /* If this is a single SET, we are setting a register, and we have an
5248 equivalent constant, we want to add a REG_NOTE. We don't want
5249 to write a REG_EQUAL note for a constant pseudo since verifying that
5250 that pseudo hasn't been eliminated is a pain. Such a note also
5251 won't help anything.
5253 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5254 which can be created for a reference to a compile time computable
5255 entry in a jump table. */
5257 if (n_sets
== 1 && src_const
&& REG_P (dest
)
5258 && !REG_P (src_const
)
5259 && ! (GET_CODE (src_const
) == CONST
5260 && GET_CODE (XEXP (src_const
, 0)) == MINUS
5261 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
5262 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
))
5264 /* We only want a REG_EQUAL note if src_const != src. */
5265 if (! rtx_equal_p (src
, src_const
))
5267 /* Make sure that the rtx is not shared. */
5268 src_const
= copy_rtx (src_const
);
5270 /* Record the actual constant value in a REG_EQUAL note,
5271 making a new one if one does not already exist. */
5272 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
5273 df_notes_rescan (insn
);
5277 /* Now deal with the destination. */
5280 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5281 while (GET_CODE (dest
) == SUBREG
5282 || GET_CODE (dest
) == ZERO_EXTRACT
5283 || GET_CODE (dest
) == STRICT_LOW_PART
)
5284 dest
= XEXP (dest
, 0);
5286 sets
[i
].inner_dest
= dest
;
5290 #ifdef PUSH_ROUNDING
5291 /* Stack pushes invalidate the stack pointer. */
5292 rtx addr
= XEXP (dest
, 0);
5293 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
5294 && XEXP (addr
, 0) == stack_pointer_rtx
)
5295 invalidate (stack_pointer_rtx
, VOIDmode
);
5297 dest
= fold_rtx (dest
, insn
);
5300 /* Compute the hash code of the destination now,
5301 before the effects of this instruction are recorded,
5302 since the register values used in the address computation
5303 are those before this instruction. */
5304 sets
[i
].dest_hash
= HASH (dest
, mode
);
5306 /* Don't enter a bit-field in the hash table
5307 because the value in it after the store
5308 may not equal what was stored, due to truncation. */
5310 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5312 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5314 if (src_const
!= 0 && CONST_INT_P (src_const
)
5315 && CONST_INT_P (width
)
5316 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5317 && ! (INTVAL (src_const
)
5318 & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
5319 /* Exception: if the value is constant,
5320 and it won't be truncated, record it. */
5324 /* This is chosen so that the destination will be invalidated
5325 but no new value will be recorded.
5326 We must invalidate because sometimes constant
5327 values can be recorded for bitfields. */
5328 sets
[i
].src_elt
= 0;
5329 sets
[i
].src_volatile
= 1;
5335 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5337 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5339 /* One less use of the label this insn used to jump to. */
5340 delete_insn_and_edges (insn
);
5341 cse_jumps_altered
= true;
5342 /* No more processing for this set. */
5346 /* If this SET is now setting PC to a label, we know it used to
5347 be a conditional or computed branch. */
5348 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5349 && !LABEL_REF_NONLOCAL_P (src
))
5351 /* We reemit the jump in as many cases as possible just in
5352 case the form of an unconditional jump is significantly
5353 different than a computed jump or conditional jump.
5355 If this insn has multiple sets, then reemitting the
5356 jump is nontrivial. So instead we just force rerecognition
5357 and hope for the best. */
5362 new_rtx
= emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
5363 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5364 LABEL_NUSES (XEXP (src
, 0))++;
5366 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5367 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5370 XEXP (note
, 1) = NULL_RTX
;
5371 REG_NOTES (new_rtx
) = note
;
5374 delete_insn_and_edges (insn
);
5378 INSN_CODE (insn
) = -1;
5380 /* Do not bother deleting any unreachable code, let jump do it. */
5381 cse_jumps_altered
= true;
5385 /* If destination is volatile, invalidate it and then do no further
5386 processing for this assignment. */
5388 else if (do_not_record
)
5390 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5391 invalidate (dest
, VOIDmode
);
5392 else if (MEM_P (dest
))
5393 invalidate (dest
, VOIDmode
);
5394 else if (GET_CODE (dest
) == STRICT_LOW_PART
5395 || GET_CODE (dest
) == ZERO_EXTRACT
)
5396 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5400 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5401 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5404 /* If setting CC0, record what it was set to, or a constant, if it
5405 is equivalent to a constant. If it is being set to a floating-point
5406 value, make a COMPARE with the appropriate constant of 0. If we
5407 don't do this, later code can interpret this as a test against
5408 const0_rtx, which can cause problems if we try to put it into an
5409 insn as a floating-point operand. */
5410 if (dest
== cc0_rtx
)
5412 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5413 this_insn_cc0_mode
= mode
;
5414 if (FLOAT_MODE_P (mode
))
5415 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5421 /* Now enter all non-volatile source expressions in the hash table
5422 if they are not already present.
5423 Record their equivalence classes in src_elt.
5424 This way we can insert the corresponding destinations into
5425 the same classes even if the actual sources are no longer in them
5426 (having been invalidated). */
5428 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5429 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5431 struct table_elt
*elt
;
5432 struct table_elt
*classp
= sets
[0].src_elt
;
5433 rtx dest
= SET_DEST (sets
[0].rtl
);
5434 enum machine_mode eqvmode
= GET_MODE (dest
);
5436 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5438 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5441 if (insert_regs (src_eqv
, classp
, 0))
5443 rehash_using_reg (src_eqv
);
5444 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5446 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5447 elt
->in_memory
= src_eqv_in_memory
;
5450 /* Check to see if src_eqv_elt is the same as a set source which
5451 does not yet have an elt, and if so set the elt of the set source
5453 for (i
= 0; i
< n_sets
; i
++)
5454 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5455 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5456 sets
[i
].src_elt
= src_eqv_elt
;
5459 for (i
= 0; i
< n_sets
; i
++)
5460 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5461 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5463 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5465 /* REG_EQUAL in setting a STRICT_LOW_PART
5466 gives an equivalent for the entire destination register,
5467 not just for the subreg being stored in now.
5468 This is a more interesting equivalence, so we arrange later
5469 to treat the entire reg as the destination. */
5470 sets
[i
].src_elt
= src_eqv_elt
;
5471 sets
[i
].src_hash
= src_eqv_hash
;
5475 /* Insert source and constant equivalent into hash table, if not
5477 struct table_elt
*classp
= src_eqv_elt
;
5478 rtx src
= sets
[i
].src
;
5479 rtx dest
= SET_DEST (sets
[i
].rtl
);
5480 enum machine_mode mode
5481 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5483 /* It's possible that we have a source value known to be
5484 constant but don't have a REG_EQUAL note on the insn.
5485 Lack of a note will mean src_eqv_elt will be NULL. This
5486 can happen where we've generated a SUBREG to access a
5487 CONST_INT that is already in a register in a wider mode.
5488 Ensure that the source expression is put in the proper
5491 classp
= sets
[i
].src_const_elt
;
5493 if (sets
[i
].src_elt
== 0)
5495 struct table_elt
*elt
;
5497 /* Note that these insert_regs calls cannot remove
5498 any of the src_elt's, because they would have failed to
5499 match if not still valid. */
5500 if (insert_regs (src
, classp
, 0))
5502 rehash_using_reg (src
);
5503 sets
[i
].src_hash
= HASH (src
, mode
);
5505 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5506 elt
->in_memory
= sets
[i
].src_in_memory
;
5507 sets
[i
].src_elt
= classp
= elt
;
5509 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5510 && src
!= sets
[i
].src_const
5511 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5512 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5513 sets
[i
].src_const_hash
, mode
);
5516 else if (sets
[i
].src_elt
== 0)
5517 /* If we did not insert the source into the hash table (e.g., it was
5518 volatile), note the equivalence class for the REG_EQUAL value, if any,
5519 so that the destination goes into that class. */
5520 sets
[i
].src_elt
= src_eqv_elt
;
5522 /* Record destination addresses in the hash table. This allows us to
5523 check if they are invalidated by other sets. */
5524 for (i
= 0; i
< n_sets
; i
++)
5528 rtx x
= sets
[i
].inner_dest
;
5529 struct table_elt
*elt
;
5530 enum machine_mode mode
;
5536 mode
= GET_MODE (x
);
5537 hash
= HASH (x
, mode
);
5538 elt
= lookup (x
, hash
, mode
);
5541 if (insert_regs (x
, NULL
, 0))
5543 rtx dest
= SET_DEST (sets
[i
].rtl
);
5545 rehash_using_reg (x
);
5546 hash
= HASH (x
, mode
);
5547 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5549 elt
= insert (x
, NULL
, hash
, mode
);
5552 sets
[i
].dest_addr_elt
= elt
;
5555 sets
[i
].dest_addr_elt
= NULL
;
5559 invalidate_from_clobbers (x
);
5561 /* Some registers are invalidated by subroutine calls. Memory is
5562 invalidated by non-constant calls. */
5566 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5567 invalidate_memory ();
5568 invalidate_for_call ();
5571 /* Now invalidate everything set by this instruction.
5572 If a SUBREG or other funny destination is being set,
5573 sets[i].rtl is still nonzero, so here we invalidate the reg
5574 a part of which is being set. */
5576 for (i
= 0; i
< n_sets
; i
++)
5579 /* We can't use the inner dest, because the mode associated with
5580 a ZERO_EXTRACT is significant. */
5581 rtx dest
= SET_DEST (sets
[i
].rtl
);
5583 /* Needed for registers to remove the register from its
5584 previous quantity's chain.
5585 Needed for memory if this is a nonvarying address, unless
5586 we have just done an invalidate_memory that covers even those. */
5587 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5588 invalidate (dest
, VOIDmode
);
5589 else if (MEM_P (dest
))
5590 invalidate (dest
, VOIDmode
);
5591 else if (GET_CODE (dest
) == STRICT_LOW_PART
5592 || GET_CODE (dest
) == ZERO_EXTRACT
)
5593 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5596 /* A volatile ASM invalidates everything. */
5597 if (NONJUMP_INSN_P (insn
)
5598 && GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
5599 && MEM_VOLATILE_P (PATTERN (insn
)))
5600 flush_hash_table ();
5602 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5603 the regs restored by the longjmp come from a later time
5605 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5607 flush_hash_table ();
5611 /* Make sure registers mentioned in destinations
5612 are safe for use in an expression to be inserted.
5613 This removes from the hash table
5614 any invalid entry that refers to one of these registers.
5616 We don't care about the return value from mention_regs because
5617 we are going to hash the SET_DEST values unconditionally. */
5619 for (i
= 0; i
< n_sets
; i
++)
5623 rtx x
= SET_DEST (sets
[i
].rtl
);
5629 /* We used to rely on all references to a register becoming
5630 inaccessible when a register changes to a new quantity,
5631 since that changes the hash code. However, that is not
5632 safe, since after HASH_SIZE new quantities we get a
5633 hash 'collision' of a register with its own invalid
5634 entries. And since SUBREGs have been changed not to
5635 change their hash code with the hash code of the register,
5636 it wouldn't work any longer at all. So we have to check
5637 for any invalid references lying around now.
5638 This code is similar to the REG case in mention_regs,
5639 but it knows that reg_tick has been incremented, and
5640 it leaves reg_in_table as -1 . */
5641 unsigned int regno
= REGNO (x
);
5642 unsigned int endregno
= END_REGNO (x
);
5645 for (i
= regno
; i
< endregno
; i
++)
5647 if (REG_IN_TABLE (i
) >= 0)
5649 remove_invalid_refs (i
);
5650 REG_IN_TABLE (i
) = -1;
5657 /* We may have just removed some of the src_elt's from the hash table.
5658 So replace each one with the current head of the same class.
5659 Also check if destination addresses have been removed. */
5661 for (i
= 0; i
< n_sets
; i
++)
5664 if (sets
[i
].dest_addr_elt
5665 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5667 /* The elt was removed, which means this destination is not
5668 valid after this instruction. */
5669 sets
[i
].rtl
= NULL_RTX
;
5671 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5672 /* If elt was removed, find current head of same class,
5673 or 0 if nothing remains of that class. */
5675 struct table_elt
*elt
= sets
[i
].src_elt
;
5677 while (elt
&& elt
->prev_same_value
)
5678 elt
= elt
->prev_same_value
;
5680 while (elt
&& elt
->first_same_value
== 0)
5681 elt
= elt
->next_same_value
;
5682 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5686 /* Now insert the destinations into their equivalence classes. */
5688 for (i
= 0; i
< n_sets
; i
++)
5691 rtx dest
= SET_DEST (sets
[i
].rtl
);
5692 struct table_elt
*elt
;
5694 /* Don't record value if we are not supposed to risk allocating
5695 floating-point values in registers that might be wider than
5697 if ((flag_float_store
5699 && FLOAT_MODE_P (GET_MODE (dest
)))
5700 /* Don't record BLKmode values, because we don't know the
5701 size of it, and can't be sure that other BLKmode values
5702 have the same or smaller size. */
5703 || GET_MODE (dest
) == BLKmode
5704 /* If we didn't put a REG_EQUAL value or a source into the hash
5705 table, there is no point is recording DEST. */
5706 || sets
[i
].src_elt
== 0
5707 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5708 or SIGN_EXTEND, don't record DEST since it can cause
5709 some tracking to be wrong.
5711 ??? Think about this more later. */
5712 || (GET_CODE (dest
) == SUBREG
5713 && (GET_MODE_SIZE (GET_MODE (dest
))
5714 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5715 && (GET_CODE (sets
[i
].src
) == SIGN_EXTEND
5716 || GET_CODE (sets
[i
].src
) == ZERO_EXTEND
)))
5719 /* STRICT_LOW_PART isn't part of the value BEING set,
5720 and neither is the SUBREG inside it.
5721 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5722 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5723 dest
= SUBREG_REG (XEXP (dest
, 0));
5725 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5726 /* Registers must also be inserted into chains for quantities. */
5727 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5729 /* If `insert_regs' changes something, the hash code must be
5731 rehash_using_reg (dest
);
5732 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5735 elt
= insert (dest
, sets
[i
].src_elt
,
5736 sets
[i
].dest_hash
, GET_MODE (dest
));
5738 /* If this is a constant, insert the constant anchors with the
5739 equivalent register-offset expressions using register DEST. */
5740 if (targetm
.const_anchor
5742 && SCALAR_INT_MODE_P (GET_MODE (dest
))
5743 && GET_CODE (sets
[i
].src_elt
->exp
) == CONST_INT
)
5744 insert_const_anchors (dest
, sets
[i
].src_elt
->exp
, GET_MODE (dest
));
5746 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
5747 && !MEM_READONLY_P (sets
[i
].inner_dest
));
5749 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5750 narrower than M2, and both M1 and M2 are the same number of words,
5751 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5752 make that equivalence as well.
5754 However, BAR may have equivalences for which gen_lowpart
5755 will produce a simpler value than gen_lowpart applied to
5756 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5757 BAR's equivalences. If we don't get a simplified form, make
5758 the SUBREG. It will not be used in an equivalence, but will
5759 cause two similar assignments to be detected.
5761 Note the loop below will find SUBREG_REG (DEST) since we have
5762 already entered SRC and DEST of the SET in the table. */
5764 if (GET_CODE (dest
) == SUBREG
5765 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1)
5767 == (GET_MODE_SIZE (GET_MODE (dest
)) - 1) / UNITS_PER_WORD
)
5768 && (GET_MODE_SIZE (GET_MODE (dest
))
5769 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5770 && sets
[i
].src_elt
!= 0)
5772 enum machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
5773 struct table_elt
*elt
, *classp
= 0;
5775 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
5776 elt
= elt
->next_same_value
)
5780 struct table_elt
*src_elt
;
5783 /* Ignore invalid entries. */
5784 if (!REG_P (elt
->exp
)
5785 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5788 /* We may have already been playing subreg games. If the
5789 mode is already correct for the destination, use it. */
5790 if (GET_MODE (elt
->exp
) == new_mode
)
5794 /* Calculate big endian correction for the SUBREG_BYTE.
5795 We have already checked that M1 (GET_MODE (dest))
5796 is not narrower than M2 (new_mode). */
5797 if (BYTES_BIG_ENDIAN
)
5798 byte
= (GET_MODE_SIZE (GET_MODE (dest
))
5799 - GET_MODE_SIZE (new_mode
));
5801 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
5802 GET_MODE (dest
), byte
);
5805 /* The call to simplify_gen_subreg fails if the value
5806 is VOIDmode, yet we can't do any simplification, e.g.
5807 for EXPR_LISTs denoting function call results.
5808 It is invalid to construct a SUBREG with a VOIDmode
5809 SUBREG_REG, hence a zero new_src means we can't do
5810 this substitution. */
5814 src_hash
= HASH (new_src
, new_mode
);
5815 src_elt
= lookup (new_src
, src_hash
, new_mode
);
5817 /* Put the new source in the hash table is if isn't
5821 if (insert_regs (new_src
, classp
, 0))
5823 rehash_using_reg (new_src
);
5824 src_hash
= HASH (new_src
, new_mode
);
5826 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
5827 src_elt
->in_memory
= elt
->in_memory
;
5829 else if (classp
&& classp
!= src_elt
->first_same_value
)
5830 /* Show that two things that we've seen before are
5831 actually the same. */
5832 merge_equiv_classes (src_elt
, classp
);
5834 classp
= src_elt
->first_same_value
;
5835 /* Ignore invalid entries. */
5837 && !REG_P (classp
->exp
)
5838 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
5839 classp
= classp
->next_same_value
;
5844 /* Special handling for (set REG0 REG1) where REG0 is the
5845 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5846 be used in the sequel, so (if easily done) change this insn to
5847 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5848 that computed their value. Then REG1 will become a dead store
5849 and won't cloud the situation for later optimizations.
5851 Do not make this change if REG1 is a hard register, because it will
5852 then be used in the sequel and we may be changing a two-operand insn
5853 into a three-operand insn.
5855 Also do not do this if we are operating on a copy of INSN. */
5857 if (n_sets
== 1 && sets
[0].rtl
&& REG_P (SET_DEST (sets
[0].rtl
))
5858 && NEXT_INSN (PREV_INSN (insn
)) == insn
5859 && REG_P (SET_SRC (sets
[0].rtl
))
5860 && REGNO (SET_SRC (sets
[0].rtl
)) >= FIRST_PSEUDO_REGISTER
5861 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets
[0].rtl
))))
5863 int src_q
= REG_QTY (REGNO (SET_SRC (sets
[0].rtl
)));
5864 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5866 if (src_ent
->first_reg
== REGNO (SET_DEST (sets
[0].rtl
)))
5868 /* Scan for the previous nonnote insn, but stop at a basic
5871 rtx bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
5874 prev
= PREV_INSN (prev
);
5876 while (prev
!= bb_head
&& (NOTE_P (prev
) || DEBUG_INSN_P (prev
)));
5878 /* Do not swap the registers around if the previous instruction
5879 attaches a REG_EQUIV note to REG1.
5881 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5882 from the pseudo that originally shadowed an incoming argument
5883 to another register. Some uses of REG_EQUIV might rely on it
5884 being attached to REG1 rather than REG2.
5886 This section previously turned the REG_EQUIV into a REG_EQUAL
5887 note. We cannot do that because REG_EQUIV may provide an
5888 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5889 if (NONJUMP_INSN_P (prev
)
5890 && GET_CODE (PATTERN (prev
)) == SET
5891 && SET_DEST (PATTERN (prev
)) == SET_SRC (sets
[0].rtl
)
5892 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
5894 rtx dest
= SET_DEST (sets
[0].rtl
);
5895 rtx src
= SET_SRC (sets
[0].rtl
);
5898 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
5899 validate_change (insn
, &SET_DEST (sets
[0].rtl
), src
, 1);
5900 validate_change (insn
, &SET_SRC (sets
[0].rtl
), dest
, 1);
5901 apply_change_group ();
5903 /* If INSN has a REG_EQUAL note, and this note mentions
5904 REG0, then we must delete it, because the value in
5905 REG0 has changed. If the note's value is REG1, we must
5906 also delete it because that is now this insn's dest. */
5907 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5909 && (reg_mentioned_p (dest
, XEXP (note
, 0))
5910 || rtx_equal_p (src
, XEXP (note
, 0))))
5911 remove_note (insn
, note
);
5919 /* Remove from the hash table all expressions that reference memory. */
5922 invalidate_memory (void)
5925 struct table_elt
*p
, *next
;
5927 for (i
= 0; i
< HASH_SIZE
; i
++)
5928 for (p
= table
[i
]; p
; p
= next
)
5930 next
= p
->next_same_hash
;
5932 remove_from_table (p
, i
);
5936 /* Perform invalidation on the basis of everything about an insn
5937 except for invalidating the actual places that are SET in it.
5938 This includes the places CLOBBERed, and anything that might
5939 alias with something that is SET or CLOBBERed.
5941 X is the pattern of the insn. */
5944 invalidate_from_clobbers (rtx x
)
5946 if (GET_CODE (x
) == CLOBBER
)
5948 rtx ref
= XEXP (x
, 0);
5951 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
5953 invalidate (ref
, VOIDmode
);
5954 else if (GET_CODE (ref
) == STRICT_LOW_PART
5955 || GET_CODE (ref
) == ZERO_EXTRACT
)
5956 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
5959 else if (GET_CODE (x
) == PARALLEL
)
5962 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
5964 rtx y
= XVECEXP (x
, 0, i
);
5965 if (GET_CODE (y
) == CLOBBER
)
5967 rtx ref
= XEXP (y
, 0);
5968 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
5970 invalidate (ref
, VOIDmode
);
5971 else if (GET_CODE (ref
) == STRICT_LOW_PART
5972 || GET_CODE (ref
) == ZERO_EXTRACT
)
5973 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
5979 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5980 and replace any registers in them with either an equivalent constant
5981 or the canonical form of the register. If we are inside an address,
5982 only do this if the address remains valid.
5984 OBJECT is 0 except when within a MEM in which case it is the MEM.
5986 Return the replacement for X. */
5989 cse_process_notes_1 (rtx x
, rtx object
, bool *changed
)
5991 enum rtx_code code
= GET_CODE (x
);
5992 const char *fmt
= GET_RTX_FORMAT (code
);
6010 validate_change (x
, &XEXP (x
, 0),
6011 cse_process_notes (XEXP (x
, 0), x
, changed
), 0);
6016 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
6017 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
, changed
);
6019 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
, changed
);
6026 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6027 /* We don't substitute VOIDmode constants into these rtx,
6028 since they would impede folding. */
6029 if (GET_MODE (new_rtx
) != VOIDmode
)
6030 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6035 i
= REG_QTY (REGNO (x
));
6037 /* Return a constant or a constant register. */
6038 if (REGNO_QTY_VALID_P (REGNO (x
)))
6040 struct qty_table_elem
*ent
= &qty_table
[i
];
6042 if (ent
->const_rtx
!= NULL_RTX
6043 && (CONSTANT_P (ent
->const_rtx
)
6044 || REG_P (ent
->const_rtx
)))
6046 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
6048 return copy_rtx (new_rtx
);
6052 /* Otherwise, canonicalize this register. */
6053 return canon_reg (x
, NULL_RTX
);
6059 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6061 validate_change (object
, &XEXP (x
, i
),
6062 cse_process_notes (XEXP (x
, i
), object
, changed
), 0);
6068 cse_process_notes (rtx x
, rtx object
, bool *changed
)
6070 rtx new_rtx
= cse_process_notes_1 (x
, object
, changed
);
6077 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6079 DATA is a pointer to a struct cse_basic_block_data, that is used to
6081 It is filled with a queue of basic blocks, starting with FIRST_BB
6082 and following a trace through the CFG.
6084 If all paths starting at FIRST_BB have been followed, or no new path
6085 starting at FIRST_BB can be constructed, this function returns FALSE.
6086 Otherwise, DATA->path is filled and the function returns TRUE indicating
6087 that a path to follow was found.
6089 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6090 block in the path will be FIRST_BB. */
6093 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
6100 SET_BIT (cse_visited_basic_blocks
, first_bb
->index
);
6102 /* See if there is a previous path. */
6103 path_size
= data
->path_size
;
6105 /* There is a previous path. Make sure it started with FIRST_BB. */
6107 gcc_assert (data
->path
[0].bb
== first_bb
);
6109 /* There was only one basic block in the last path. Clear the path and
6110 return, so that paths starting at another basic block can be tried. */
6117 /* If the path was empty from the beginning, construct a new path. */
6119 data
->path
[path_size
++].bb
= first_bb
;
6122 /* Otherwise, path_size must be equal to or greater than 2, because
6123 a previous path exists that is at least two basic blocks long.
6125 Update the previous branch path, if any. If the last branch was
6126 previously along the branch edge, take the fallthrough edge now. */
6127 while (path_size
>= 2)
6129 basic_block last_bb_in_path
, previous_bb_in_path
;
6133 last_bb_in_path
= data
->path
[path_size
].bb
;
6134 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
6136 /* If we previously followed a path along the branch edge, try
6137 the fallthru edge now. */
6138 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
6139 && any_condjump_p (BB_END (previous_bb_in_path
))
6140 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
6141 && e
== BRANCH_EDGE (previous_bb_in_path
))
6143 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
6144 if (bb
!= EXIT_BLOCK_PTR
6145 && single_pred_p (bb
)
6146 /* We used to assert here that we would only see blocks
6147 that we have not visited yet. But we may end up
6148 visiting basic blocks twice if the CFG has changed
6149 in this run of cse_main, because when the CFG changes
6150 the topological sort of the CFG also changes. A basic
6151 blocks that previously had more than two predecessors
6152 may now have a single predecessor, and become part of
6153 a path that starts at another basic block.
6155 We still want to visit each basic block only once, so
6156 halt the path here if we have already visited BB. */
6157 && !TEST_BIT (cse_visited_basic_blocks
, bb
->index
))
6159 SET_BIT (cse_visited_basic_blocks
, bb
->index
);
6160 data
->path
[path_size
++].bb
= bb
;
6165 data
->path
[path_size
].bb
= NULL
;
6168 /* If only one block remains in the path, bail. */
6176 /* Extend the path if possible. */
6179 bb
= data
->path
[path_size
- 1].bb
;
6180 while (bb
&& path_size
< PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
))
6182 if (single_succ_p (bb
))
6183 e
= single_succ_edge (bb
);
6184 else if (EDGE_COUNT (bb
->succs
) == 2
6185 && any_condjump_p (BB_END (bb
)))
6187 /* First try to follow the branch. If that doesn't lead
6188 to a useful path, follow the fallthru edge. */
6189 e
= BRANCH_EDGE (bb
);
6190 if (!single_pred_p (e
->dest
))
6191 e
= FALLTHRU_EDGE (bb
);
6196 if (e
&& e
->dest
!= EXIT_BLOCK_PTR
6197 && single_pred_p (e
->dest
)
6198 /* Avoid visiting basic blocks twice. The large comment
6199 above explains why this can happen. */
6200 && !TEST_BIT (cse_visited_basic_blocks
, e
->dest
->index
))
6202 basic_block bb2
= e
->dest
;
6203 SET_BIT (cse_visited_basic_blocks
, bb2
->index
);
6204 data
->path
[path_size
++].bb
= bb2
;
6213 data
->path_size
= path_size
;
6214 return path_size
!= 0;
6217 /* Dump the path in DATA to file F. NSETS is the number of sets
6221 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
6225 fprintf (f
, ";; Following path with %d sets: ", nsets
);
6226 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
6227 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
6228 fputc ('\n', dump_file
);
6233 /* Return true if BB has exception handling successor edges. */
6236 have_eh_succ_edges (basic_block bb
)
6241 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6242 if (e
->flags
& EDGE_EH
)
6249 /* Scan to the end of the path described by DATA. Return an estimate of
6250 the total number of SETs of all insns in the path. */
6253 cse_prescan_path (struct cse_basic_block_data
*data
)
6256 int path_size
= data
->path_size
;
6259 /* Scan to end of each basic block in the path. */
6260 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6265 bb
= data
->path
[path_entry
].bb
;
6267 FOR_BB_INSNS (bb
, insn
)
6272 /* A PARALLEL can have lots of SETs in it,
6273 especially if it is really an ASM_OPERANDS. */
6274 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6275 nsets
+= XVECLEN (PATTERN (insn
), 0);
6281 data
->nsets
= nsets
;
6284 /* Process a single extended basic block described by EBB_DATA. */
6287 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
6289 int path_size
= ebb_data
->path_size
;
6293 /* Allocate the space needed by qty_table. */
6294 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
6297 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
6298 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
6299 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6304 bb
= ebb_data
->path
[path_entry
].bb
;
6306 /* Invalidate recorded information for eh regs if there is an EH
6307 edge pointing to that bb. */
6308 if (bb_has_eh_pred (bb
))
6312 for (def_rec
= df_get_artificial_defs (bb
->index
); *def_rec
; def_rec
++)
6314 df_ref def
= *def_rec
;
6315 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
6316 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6320 FOR_BB_INSNS (bb
, insn
)
6322 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6323 /* If we have processed 1,000 insns, flush the hash table to
6324 avoid extreme quadratic behavior. We must not include NOTEs
6325 in the count since there may be more of them when generating
6326 debugging information. If we clear the table at different
6327 times, code generated with -g -O might be different than code
6328 generated with -O but not -g.
6330 FIXME: This is a real kludge and needs to be done some other
6332 if (NONDEBUG_INSN_P (insn
)
6333 && num_insns
++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS
))
6335 flush_hash_table ();
6341 /* Process notes first so we have all notes in canonical forms
6342 when looking for duplicate operations. */
6343 if (REG_NOTES (insn
))
6345 bool changed
= false;
6346 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
),
6347 NULL_RTX
, &changed
);
6349 df_notes_rescan (insn
);
6354 /* If we haven't already found an insn where we added a LABEL_REF,
6356 if (INSN_P (insn
) && !recorded_label_ref
6357 && for_each_rtx (&PATTERN (insn
), check_for_label_ref
,
6359 recorded_label_ref
= true;
6362 /* If the previous insn set CC0 and this insn no longer
6363 references CC0, delete the previous insn. Here we use
6364 fact that nothing expects CC0 to be valid over an insn,
6365 which is true until the final pass. */
6369 prev_insn
= PREV_INSN (insn
);
6370 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6371 && (tem
= single_set (prev_insn
)) != 0
6372 && SET_DEST (tem
) == cc0_rtx
6373 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6374 delete_insn (prev_insn
);
6377 /* If this insn is not the last insn in the basic block,
6378 it will be PREV_INSN(insn) in the next iteration. If
6379 we recorded any CC0-related information for this insn,
6381 if (insn
!= BB_END (bb
))
6383 prev_insn_cc0
= this_insn_cc0
;
6384 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6390 /* With non-call exceptions, we are not always able to update
6391 the CFG properly inside cse_insn. So clean up possibly
6392 redundant EH edges here. */
6393 if (flag_non_call_exceptions
&& have_eh_succ_edges (bb
))
6394 cse_cfg_altered
|= purge_dead_edges (bb
);
6396 /* If we changed a conditional jump, we may have terminated
6397 the path we are following. Check that by verifying that
6398 the edge we would take still exists. If the edge does
6399 not exist anymore, purge the remainder of the path.
6400 Note that this will cause us to return to the caller. */
6401 if (path_entry
< path_size
- 1)
6403 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6404 if (!find_edge (bb
, next_bb
))
6410 /* If we truncate the path, we must also reset the
6411 visited bit on the remaining blocks in the path,
6412 or we will never visit them at all. */
6413 RESET_BIT (cse_visited_basic_blocks
,
6414 ebb_data
->path
[path_size
].bb
->index
);
6415 ebb_data
->path
[path_size
].bb
= NULL
;
6417 while (path_size
- 1 != path_entry
);
6418 ebb_data
->path_size
= path_size
;
6422 /* If this is a conditional jump insn, record any known
6423 equivalences due to the condition being tested. */
6425 if (path_entry
< path_size
- 1
6427 && single_set (insn
)
6428 && any_condjump_p (insn
))
6430 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6431 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6432 record_jump_equiv (insn
, taken
);
6436 /* Clear the CC0-tracking related insns, they can't provide
6437 useful information across basic block boundaries. */
6442 gcc_assert (next_qty
<= max_qty
);
6448 /* Perform cse on the instructions of a function.
6449 F is the first instruction.
6450 NREGS is one plus the highest pseudo-reg number used in the instruction.
6452 Return 2 if jump optimizations should be redone due to simplifications
6453 in conditional jump instructions.
6454 Return 1 if the CFG should be cleaned up because it has been modified.
6455 Return 0 otherwise. */
6458 cse_main (rtx f ATTRIBUTE_UNUSED
, int nregs
)
6460 struct cse_basic_block_data ebb_data
;
6462 int *rc_order
= XNEWVEC (int, last_basic_block
);
6465 df_set_flags (DF_LR_RUN_DCE
);
6467 df_set_flags (DF_DEFER_INSN_RESCAN
);
6469 reg_scan (get_insns (), max_reg_num ());
6470 init_cse_reg_info (nregs
);
6472 ebb_data
.path
= XNEWVEC (struct branch_path
,
6473 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
));
6475 cse_cfg_altered
= false;
6476 cse_jumps_altered
= false;
6477 recorded_label_ref
= false;
6478 constant_pool_entries_cost
= 0;
6479 constant_pool_entries_regcost
= 0;
6480 ebb_data
.path_size
= 0;
6482 rtl_hooks
= cse_rtl_hooks
;
6485 init_alias_analysis ();
6487 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6489 /* Set up the table of already visited basic blocks. */
6490 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block
);
6491 sbitmap_zero (cse_visited_basic_blocks
);
6493 /* Loop over basic blocks in reverse completion order (RPO),
6494 excluding the ENTRY and EXIT blocks. */
6495 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6497 while (i
< n_blocks
)
6499 /* Find the first block in the RPO queue that we have not yet
6500 processed before. */
6503 bb
= BASIC_BLOCK (rc_order
[i
++]);
6505 while (TEST_BIT (cse_visited_basic_blocks
, bb
->index
)
6508 /* Find all paths starting with BB, and process them. */
6509 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6511 /* Pre-scan the path. */
6512 cse_prescan_path (&ebb_data
);
6514 /* If this basic block has no sets, skip it. */
6515 if (ebb_data
.nsets
== 0)
6518 /* Get a reasonable estimate for the maximum number of qty's
6519 needed for this path. For this, we take the number of sets
6520 and multiply that by MAX_RECOG_OPERANDS. */
6521 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6523 /* Dump the path we're about to process. */
6525 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6527 cse_extended_basic_block (&ebb_data
);
6532 end_alias_analysis ();
6533 free (reg_eqv_table
);
6534 free (ebb_data
.path
);
6535 sbitmap_free (cse_visited_basic_blocks
);
6537 rtl_hooks
= general_rtl_hooks
;
6539 if (cse_jumps_altered
|| recorded_label_ref
)
6541 else if (cse_cfg_altered
)
6547 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6548 which there isn't a REG_LABEL_OPERAND note.
6549 Return one if so. DATA is the insn. */
6552 check_for_label_ref (rtx
*rtl
, void *data
)
6554 rtx insn
= (rtx
) data
;
6556 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6557 note for it, we must rerun jump since it needs to place the note. If
6558 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6559 don't do this since no REG_LABEL_OPERAND will be added. */
6560 return (GET_CODE (*rtl
) == LABEL_REF
6561 && ! LABEL_REF_NONLOCAL_P (*rtl
)
6563 || !label_is_jump_target_p (XEXP (*rtl
, 0), insn
))
6564 && LABEL_P (XEXP (*rtl
, 0))
6565 && INSN_UID (XEXP (*rtl
, 0)) != 0
6566 && ! find_reg_note (insn
, REG_LABEL_OPERAND
, XEXP (*rtl
, 0)));
6569 /* Count the number of times registers are used (not set) in X.
6570 COUNTS is an array in which we accumulate the count, INCR is how much
6571 we count each register usage.
6573 Don't count a usage of DEST, which is the SET_DEST of a SET which
6574 contains X in its SET_SRC. This is because such a SET does not
6575 modify the liveness of DEST.
6576 DEST is set to pc_rtx for a trapping insn, which means that we must count
6577 uses of a SET_DEST regardless because the insn can't be deleted here. */
6580 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6590 switch (code
= GET_CODE (x
))
6594 counts
[REGNO (x
)] += incr
;
6609 /* If we are clobbering a MEM, mark any registers inside the address
6611 if (MEM_P (XEXP (x
, 0)))
6612 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6616 /* Unless we are setting a REG, count everything in SET_DEST. */
6617 if (!REG_P (SET_DEST (x
)))
6618 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6619 count_reg_usage (SET_SRC (x
), counts
,
6620 dest
? dest
: SET_DEST (x
),
6630 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
6631 this fact by setting DEST to pc_rtx. */
6632 if (insn_could_throw_p (x
))
6634 if (code
== CALL_INSN
)
6635 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6636 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6638 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6641 note
= find_reg_equal_equiv_note (x
);
6644 rtx eqv
= XEXP (note
, 0);
6646 if (GET_CODE (eqv
) == EXPR_LIST
)
6647 /* This REG_EQUAL note describes the result of a function call.
6648 Process all the arguments. */
6651 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6652 eqv
= XEXP (eqv
, 1);
6654 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6656 count_reg_usage (eqv
, counts
, dest
, incr
);
6661 if (REG_NOTE_KIND (x
) == REG_EQUAL
6662 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6663 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6664 involving registers in the address. */
6665 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6666 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6668 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6672 /* If the asm is volatile, then this insn cannot be deleted,
6673 and so the inputs *must* be live. */
6674 if (MEM_VOLATILE_P (x
))
6676 /* Iterate over just the inputs, not the constraints as well. */
6677 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6678 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6688 fmt
= GET_RTX_FORMAT (code
);
6689 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6692 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6693 else if (fmt
[i
] == 'E')
6694 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6695 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6699 /* Return true if a register is dead. Can be used in for_each_rtx. */
6702 is_dead_reg (rtx
*loc
, void *data
)
6705 int *counts
= (int *)data
;
6708 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6709 && counts
[REGNO (x
)] == 0);
6712 /* Return true if set is live. */
6714 set_live_p (rtx set
, rtx insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6721 if (set_noop_p (set
))
6725 else if (GET_CODE (SET_DEST (set
)) == CC0
6726 && !side_effects_p (SET_SRC (set
))
6727 && ((tem
= next_nonnote_insn (insn
)) == 0
6729 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6732 else if (!is_dead_reg (&SET_DEST (set
), counts
)
6733 || side_effects_p (SET_SRC (set
)))
6738 /* Return true if insn is live. */
6741 insn_live_p (rtx insn
, int *counts
)
6744 if (insn_could_throw_p (insn
))
6746 else if (GET_CODE (PATTERN (insn
)) == SET
)
6747 return set_live_p (PATTERN (insn
), insn
, counts
);
6748 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6750 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6752 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6754 if (GET_CODE (elt
) == SET
)
6756 if (set_live_p (elt
, insn
, counts
))
6759 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6764 else if (DEBUG_INSN_P (insn
))
6768 for (next
= NEXT_INSN (insn
); next
; next
= NEXT_INSN (next
))
6771 else if (!DEBUG_INSN_P (next
))
6773 else if (INSN_VAR_LOCATION_DECL (insn
) == INSN_VAR_LOCATION_DECL (next
))
6776 /* If this debug insn references a dead register, drop the
6777 location expression for now. ??? We could try to find the
6778 def and see if propagation is possible. */
6779 if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn
), is_dead_reg
, counts
))
6781 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
6782 df_insn_rescan (insn
);
6791 /* Scan all the insns and delete any that are dead; i.e., they store a register
6792 that is never used or they copy a register to itself.
6794 This is used to remove insns made obviously dead by cse, loop or other
6795 optimizations. It improves the heuristics in loop since it won't try to
6796 move dead invariants out of loops or make givs for dead quantities. The
6797 remaining passes of the compilation are also sped up. */
6800 delete_trivially_dead_insns (rtx insns
, int nreg
)
6806 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
6807 /* First count the number of times each register is used. */
6808 counts
= XCNEWVEC (int, nreg
);
6809 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6811 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
6813 /* Go from the last insn to the first and delete insns that only set unused
6814 registers or copy a register to itself. As we delete an insn, remove
6815 usage counts for registers it uses.
6817 The first jump optimization pass may leave a real insn as the last
6818 insn in the function. We must not skip that insn or we may end
6819 up deleting code that is not really dead. */
6820 for (insn
= get_last_insn (); insn
; insn
= prev
)
6824 prev
= PREV_INSN (insn
);
6828 live_insn
= insn_live_p (insn
, counts
);
6830 /* If this is a dead insn, delete it and show registers in it aren't
6833 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
6835 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
6836 delete_insn_and_edges (insn
);
6841 if (dump_file
&& ndead
)
6842 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
6846 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
6850 /* This function is called via for_each_rtx. The argument, NEWREG, is
6851 a condition code register with the desired mode. If we are looking
6852 at the same register in a different mode, replace it with
6856 cse_change_cc_mode (rtx
*loc
, void *data
)
6858 struct change_cc_mode_args
* args
= (struct change_cc_mode_args
*)data
;
6862 && REGNO (*loc
) == REGNO (args
->newreg
)
6863 && GET_MODE (*loc
) != GET_MODE (args
->newreg
))
6865 validate_change (args
->insn
, loc
, args
->newreg
, 1);
6872 /* Change the mode of any reference to the register REGNO (NEWREG) to
6873 GET_MODE (NEWREG) in INSN. */
6876 cse_change_cc_mode_insn (rtx insn
, rtx newreg
)
6878 struct change_cc_mode_args args
;
6885 args
.newreg
= newreg
;
6887 for_each_rtx (&PATTERN (insn
), cse_change_cc_mode
, &args
);
6888 for_each_rtx (®_NOTES (insn
), cse_change_cc_mode
, &args
);
6890 /* If the following assertion was triggered, there is most probably
6891 something wrong with the cc_modes_compatible back end function.
6892 CC modes only can be considered compatible if the insn - with the mode
6893 replaced by any of the compatible modes - can still be recognized. */
6894 success
= apply_change_group ();
6895 gcc_assert (success
);
6898 /* Change the mode of any reference to the register REGNO (NEWREG) to
6899 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
6900 any instruction which modifies NEWREG. */
6903 cse_change_cc_mode_insns (rtx start
, rtx end
, rtx newreg
)
6907 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
6909 if (! INSN_P (insn
))
6912 if (reg_set_p (newreg
, insn
))
6915 cse_change_cc_mode_insn (insn
, newreg
);
6919 /* BB is a basic block which finishes with CC_REG as a condition code
6920 register which is set to CC_SRC. Look through the successors of BB
6921 to find blocks which have a single predecessor (i.e., this one),
6922 and look through those blocks for an assignment to CC_REG which is
6923 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
6924 permitted to change the mode of CC_SRC to a compatible mode. This
6925 returns VOIDmode if no equivalent assignments were found.
6926 Otherwise it returns the mode which CC_SRC should wind up with.
6927 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
6928 but is passed unmodified down to recursive calls in order to prevent
6931 The main complexity in this function is handling the mode issues.
6932 We may have more than one duplicate which we can eliminate, and we
6933 try to find a mode which will work for multiple duplicates. */
6935 static enum machine_mode
6936 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
6937 bool can_change_mode
)
6940 enum machine_mode mode
;
6941 unsigned int insn_count
;
6944 enum machine_mode modes
[2];
6950 /* We expect to have two successors. Look at both before picking
6951 the final mode for the comparison. If we have more successors
6952 (i.e., some sort of table jump, although that seems unlikely),
6953 then we require all beyond the first two to use the same
6956 found_equiv
= false;
6957 mode
= GET_MODE (cc_src
);
6959 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6964 if (e
->flags
& EDGE_COMPLEX
)
6967 if (EDGE_COUNT (e
->dest
->preds
) != 1
6968 || e
->dest
== EXIT_BLOCK_PTR
6969 /* Avoid endless recursion on unreachable blocks. */
6970 || e
->dest
== orig_bb
)
6973 end
= NEXT_INSN (BB_END (e
->dest
));
6974 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
6978 if (! INSN_P (insn
))
6981 /* If CC_SRC is modified, we have to stop looking for
6982 something which uses it. */
6983 if (modified_in_p (cc_src
, insn
))
6986 /* Check whether INSN sets CC_REG to CC_SRC. */
6987 set
= single_set (insn
);
6989 && REG_P (SET_DEST (set
))
6990 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
6993 enum machine_mode set_mode
;
6994 enum machine_mode comp_mode
;
6997 set_mode
= GET_MODE (SET_SRC (set
));
6998 comp_mode
= set_mode
;
6999 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
7001 else if (GET_CODE (cc_src
) == COMPARE
7002 && GET_CODE (SET_SRC (set
)) == COMPARE
7004 && rtx_equal_p (XEXP (cc_src
, 0),
7005 XEXP (SET_SRC (set
), 0))
7006 && rtx_equal_p (XEXP (cc_src
, 1),
7007 XEXP (SET_SRC (set
), 1)))
7010 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
7011 if (comp_mode
!= VOIDmode
7012 && (can_change_mode
|| comp_mode
== mode
))
7019 if (insn_count
< ARRAY_SIZE (insns
))
7021 insns
[insn_count
] = insn
;
7022 modes
[insn_count
] = set_mode
;
7023 last_insns
[insn_count
] = end
;
7026 if (mode
!= comp_mode
)
7028 gcc_assert (can_change_mode
);
7031 /* The modified insn will be re-recognized later. */
7032 PUT_MODE (cc_src
, mode
);
7037 if (set_mode
!= mode
)
7039 /* We found a matching expression in the
7040 wrong mode, but we don't have room to
7041 store it in the array. Punt. This case
7045 /* INSN sets CC_REG to a value equal to CC_SRC
7046 with the right mode. We can simply delete
7051 /* We found an instruction to delete. Keep looking,
7052 in the hopes of finding a three-way jump. */
7056 /* We found an instruction which sets the condition
7057 code, so don't look any farther. */
7061 /* If INSN sets CC_REG in some other way, don't look any
7063 if (reg_set_p (cc_reg
, insn
))
7067 /* If we fell off the bottom of the block, we can keep looking
7068 through successors. We pass CAN_CHANGE_MODE as false because
7069 we aren't prepared to handle compatibility between the
7070 further blocks and this block. */
7073 enum machine_mode submode
;
7075 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
7076 if (submode
!= VOIDmode
)
7078 gcc_assert (submode
== mode
);
7080 can_change_mode
= false;
7088 /* Now INSN_COUNT is the number of instructions we found which set
7089 CC_REG to a value equivalent to CC_SRC. The instructions are in
7090 INSNS. The modes used by those instructions are in MODES. */
7093 for (i
= 0; i
< insn_count
; ++i
)
7095 if (modes
[i
] != mode
)
7097 /* We need to change the mode of CC_REG in INSNS[i] and
7098 subsequent instructions. */
7101 if (GET_MODE (cc_reg
) == mode
)
7104 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7106 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
7110 delete_insn_and_edges (insns
[i
]);
7116 /* If we have a fixed condition code register (or two), walk through
7117 the instructions and try to eliminate duplicate assignments. */
7120 cse_condition_code_reg (void)
7122 unsigned int cc_regno_1
;
7123 unsigned int cc_regno_2
;
7128 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
7131 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
7132 if (cc_regno_2
!= INVALID_REGNUM
)
7133 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
7135 cc_reg_2
= NULL_RTX
;
7144 enum machine_mode mode
;
7145 enum machine_mode orig_mode
;
7147 /* Look for blocks which end with a conditional jump based on a
7148 condition code register. Then look for the instruction which
7149 sets the condition code register. Then look through the
7150 successor blocks for instructions which set the condition
7151 code register to the same value. There are other possible
7152 uses of the condition code register, but these are by far the
7153 most common and the ones which we are most likely to be able
7156 last_insn
= BB_END (bb
);
7157 if (!JUMP_P (last_insn
))
7160 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
7162 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
7167 cc_src_insn
= NULL_RTX
;
7169 for (insn
= PREV_INSN (last_insn
);
7170 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
7171 insn
= PREV_INSN (insn
))
7175 if (! INSN_P (insn
))
7177 set
= single_set (insn
);
7179 && REG_P (SET_DEST (set
))
7180 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7183 cc_src
= SET_SRC (set
);
7186 else if (reg_set_p (cc_reg
, insn
))
7193 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
7196 /* Now CC_REG is a condition code register used for a
7197 conditional jump at the end of the block, and CC_SRC, in
7198 CC_SRC_INSN, is the value to which that condition code
7199 register is set, and CC_SRC is still meaningful at the end of
7202 orig_mode
= GET_MODE (cc_src
);
7203 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
7204 if (mode
!= VOIDmode
)
7206 gcc_assert (mode
== GET_MODE (cc_src
));
7207 if (mode
!= orig_mode
)
7209 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7211 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
7213 /* Do the same in the following insns that use the
7214 current value of CC_REG within BB. */
7215 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
7216 NEXT_INSN (last_insn
),
7224 /* Perform common subexpression elimination. Nonzero value from
7225 `cse_main' means that jumps were simplified and some code may now
7226 be unreachable, so do jump optimization again. */
7228 gate_handle_cse (void)
7230 return optimize
> 0;
7234 rest_of_handle_cse (void)
7239 dump_flow_info (dump_file
, dump_flags
);
7241 tem
= cse_main (get_insns (), max_reg_num ());
7243 /* If we are not running more CSE passes, then we are no longer
7244 expecting CSE to be run. But always rerun it in a cheap mode. */
7245 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
7249 timevar_push (TV_JUMP
);
7250 rebuild_jump_labels (get_insns ());
7252 timevar_pop (TV_JUMP
);
7254 else if (tem
== 1 || optimize
> 1)
7260 struct rtl_opt_pass pass_cse
=
7265 gate_handle_cse
, /* gate */
7266 rest_of_handle_cse
, /* execute */
7269 0, /* static_pass_number */
7271 0, /* properties_required */
7272 0, /* properties_provided */
7273 0, /* properties_destroyed */
7274 0, /* todo_flags_start */
7275 TODO_df_finish
| TODO_verify_rtl_sharing
|
7278 TODO_verify_flow
, /* todo_flags_finish */
7284 gate_handle_cse2 (void)
7286 return optimize
> 0 && flag_rerun_cse_after_loop
;
7289 /* Run second CSE pass after loop optimizations. */
7291 rest_of_handle_cse2 (void)
7296 dump_flow_info (dump_file
, dump_flags
);
7298 tem
= cse_main (get_insns (), max_reg_num ());
7300 /* Run a pass to eliminate duplicated assignments to condition code
7301 registers. We have to run this after bypass_jumps, because it
7302 makes it harder for that pass to determine whether a jump can be
7304 cse_condition_code_reg ();
7306 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7310 timevar_push (TV_JUMP
);
7311 rebuild_jump_labels (get_insns ());
7313 timevar_pop (TV_JUMP
);
7318 cse_not_expected
= 1;
7323 struct rtl_opt_pass pass_cse2
=
7328 gate_handle_cse2
, /* gate */
7329 rest_of_handle_cse2
, /* execute */
7332 0, /* static_pass_number */
7333 TV_CSE2
, /* tv_id */
7334 0, /* properties_required */
7335 0, /* properties_provided */
7336 0, /* properties_destroyed */
7337 0, /* todo_flags_start */
7338 TODO_df_finish
| TODO_verify_rtl_sharing
|
7341 TODO_verify_flow
/* todo_flags_finish */
7346 gate_handle_cse_after_global_opts (void)
7348 return optimize
> 0 && flag_rerun_cse_after_global_opts
;
7351 /* Run second CSE pass after loop optimizations. */
7353 rest_of_handle_cse_after_global_opts (void)
7358 /* We only want to do local CSE, so don't follow jumps. */
7359 save_cfj
= flag_cse_follow_jumps
;
7360 flag_cse_follow_jumps
= 0;
7362 rebuild_jump_labels (get_insns ());
7363 tem
= cse_main (get_insns (), max_reg_num ());
7364 purge_all_dead_edges ();
7365 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7367 cse_not_expected
= !flag_rerun_cse_after_loop
;
7369 /* If cse altered any jumps, rerun jump opts to clean things up. */
7372 timevar_push (TV_JUMP
);
7373 rebuild_jump_labels (get_insns ());
7375 timevar_pop (TV_JUMP
);
7380 flag_cse_follow_jumps
= save_cfj
;
7384 struct rtl_opt_pass pass_cse_after_global_opts
=
7388 "cse_local", /* name */
7389 gate_handle_cse_after_global_opts
, /* gate */
7390 rest_of_handle_cse_after_global_opts
, /* execute */
7393 0, /* static_pass_number */
7395 0, /* properties_required */
7396 0, /* properties_provided */
7397 0, /* properties_destroyed */
7398 0, /* todo_flags_start */
7399 TODO_df_finish
| TODO_verify_rtl_sharing
|
7402 TODO_verify_flow
/* todo_flags_finish */