1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "hard-reg-set.h"
35 #include "dominance.h"
39 #include "cfgcleanup.h"
40 #include "basic-block.h"
42 #include "insn-config.h"
45 #include "statistics.h"
46 #include "double-int.h"
48 #include "fixed-value.h"
61 #include "diagnostic-core.h"
67 #include "rtlhooks-def.h"
68 #include "tree-pass.h"
73 /* The basic idea of common subexpression elimination is to go
74 through the code, keeping a record of expressions that would
75 have the same value at the current scan point, and replacing
76 expressions encountered with the cheapest equivalent expression.
78 It is too complicated to keep track of the different possibilities
79 when control paths merge in this code; so, at each label, we forget all
80 that is known and start fresh. This can be described as processing each
81 extended basic block separately. We have a separate pass to perform
84 Note CSE can turn a conditional or computed jump into a nop or
85 an unconditional jump. When this occurs we arrange to run the jump
86 optimizer after CSE to delete the unreachable code.
88 We use two data structures to record the equivalent expressions:
89 a hash table for most expressions, and a vector of "quantity
90 numbers" to record equivalent (pseudo) registers.
92 The use of the special data structure for registers is desirable
93 because it is faster. It is possible because registers references
94 contain a fairly small number, the register number, taken from
95 a contiguously allocated series, and two register references are
96 identical if they have the same number. General expressions
97 do not have any such thing, so the only way to retrieve the
98 information recorded on an expression other than a register
99 is to keep it in a hash table.
101 Registers and "quantity numbers":
103 At the start of each basic block, all of the (hardware and pseudo)
104 registers used in the function are given distinct quantity
105 numbers to indicate their contents. During scan, when the code
106 copies one register into another, we copy the quantity number.
107 When a register is loaded in any other way, we allocate a new
108 quantity number to describe the value generated by this operation.
109 `REG_QTY (N)' records what quantity register N is currently thought
112 All real quantity numbers are greater than or equal to zero.
113 If register N has not been assigned a quantity, `REG_QTY (N)' will
114 equal -N - 1, which is always negative.
116 Quantity numbers below zero do not exist and none of the `qty_table'
117 entries should be referenced with a negative index.
119 We also maintain a bidirectional chain of registers for each
120 quantity number. The `qty_table` members `first_reg' and `last_reg',
121 and `reg_eqv_table' members `next' and `prev' hold these chains.
123 The first register in a chain is the one whose lifespan is least local.
124 Among equals, it is the one that was seen first.
125 We replace any equivalent register with that one.
127 If two registers have the same quantity number, it must be true that
128 REG expressions with qty_table `mode' must be in the hash table for both
129 registers and must be in the same class.
131 The converse is not true. Since hard registers may be referenced in
132 any mode, two REG expressions might be equivalent in the hash table
133 but not have the same quantity number if the quantity number of one
134 of the registers is not the same mode as those expressions.
136 Constants and quantity numbers
138 When a quantity has a known constant value, that value is stored
139 in the appropriate qty_table `const_rtx'. This is in addition to
140 putting the constant in the hash table as is usual for non-regs.
142 Whether a reg or a constant is preferred is determined by the configuration
143 macro CONST_COSTS and will often depend on the constant value. In any
144 event, expressions containing constants can be simplified, by fold_rtx.
146 When a quantity has a known nearly constant value (such as an address
147 of a stack slot), that value is stored in the appropriate qty_table
150 Integer constants don't have a machine mode. However, cse
151 determines the intended machine mode from the destination
152 of the instruction that moves the constant. The machine mode
153 is recorded in the hash table along with the actual RTL
154 constant expression so that different modes are kept separate.
158 To record known equivalences among expressions in general
159 we use a hash table called `table'. It has a fixed number of buckets
160 that contain chains of `struct table_elt' elements for expressions.
161 These chains connect the elements whose expressions have the same
164 Other chains through the same elements connect the elements which
165 currently have equivalent values.
167 Register references in an expression are canonicalized before hashing
168 the expression. This is done using `reg_qty' and qty_table `first_reg'.
169 The hash code of a register reference is computed using the quantity
170 number, not the register number.
172 When the value of an expression changes, it is necessary to remove from the
173 hash table not just that expression but all expressions whose values
174 could be different as a result.
176 1. If the value changing is in memory, except in special cases
177 ANYTHING referring to memory could be changed. That is because
178 nobody knows where a pointer does not point.
179 The function `invalidate_memory' removes what is necessary.
181 The special cases are when the address is constant or is
182 a constant plus a fixed register such as the frame pointer
183 or a static chain pointer. When such addresses are stored in,
184 we can tell exactly which other such addresses must be invalidated
185 due to overlap. `invalidate' does this.
186 All expressions that refer to non-constant
187 memory addresses are also invalidated. `invalidate_memory' does this.
189 2. If the value changing is a register, all expressions
190 containing references to that register, and only those,
193 Because searching the entire hash table for expressions that contain
194 a register is very slow, we try to figure out when it isn't necessary.
195 Precisely, this is necessary only when expressions have been
196 entered in the hash table using this register, and then the value has
197 changed, and then another expression wants to be added to refer to
198 the register's new value. This sequence of circumstances is rare
199 within any one basic block.
201 `REG_TICK' and `REG_IN_TABLE', accessors for members of
202 cse_reg_info, are used to detect this case. REG_TICK (i) is
203 incremented whenever a value is stored in register i.
204 REG_IN_TABLE (i) holds -1 if no references to register i have been
205 entered in the table; otherwise, it contains the value REG_TICK (i)
206 had when the references were entered. If we want to enter a
207 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
208 remove old references. Until we want to enter a new entry, the
209 mere fact that the two vectors don't match makes the entries be
210 ignored if anyone tries to match them.
212 Registers themselves are entered in the hash table as well as in
213 the equivalent-register chains. However, `REG_TICK' and
214 `REG_IN_TABLE' do not apply to expressions which are simple
215 register references. These expressions are removed from the table
216 immediately when they become invalid, and this can be done even if
217 we do not immediately search for all the expressions that refer to
220 A CLOBBER rtx in an instruction invalidates its operand for further
221 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
222 invalidates everything that resides in memory.
226 Constant expressions that differ only by an additive integer
227 are called related. When a constant expression is put in
228 the table, the related expression with no constant term
229 is also entered. These are made to point at each other
230 so that it is possible to find out if there exists any
231 register equivalent to an expression related to a given expression. */
233 /* Length of qty_table vector. We know in advance we will not need
234 a quantity number this big. */
238 /* Next quantity number to be allocated.
239 This is 1 + the largest number needed so far. */
243 /* Per-qty information tracking.
245 `first_reg' and `last_reg' track the head and tail of the
246 chain of registers which currently contain this quantity.
248 `mode' contains the machine mode of this quantity.
250 `const_rtx' holds the rtx of the constant value of this
251 quantity, if known. A summations of the frame/arg pointer
252 and a constant can also be entered here. When this holds
253 a known value, `const_insn' is the insn which stored the
256 `comparison_{code,const,qty}' are used to track when a
257 comparison between a quantity and some constant or register has
258 been passed. In such a case, we know the results of the comparison
259 in case we see it again. These members record a comparison that
260 is known to be true. `comparison_code' holds the rtx code of such
261 a comparison, else it is set to UNKNOWN and the other two
262 comparison members are undefined. `comparison_const' holds
263 the constant being compared against, or zero if the comparison
264 is not against a constant. `comparison_qty' holds the quantity
265 being compared against when the result is known. If the comparison
266 is not with a register, `comparison_qty' is -1. */
268 struct qty_table_elem
271 rtx_insn
*const_insn
;
272 rtx comparison_const
;
274 unsigned int first_reg
, last_reg
;
275 /* The sizes of these fields should match the sizes of the
276 code and mode fields of struct rtx_def (see rtl.h). */
277 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
278 ENUM_BITFIELD(machine_mode
) mode
: 8;
281 /* The table of all qtys, indexed by qty number. */
282 static struct qty_table_elem
*qty_table
;
285 /* For machines that have a CC0, we do not record its value in the hash
286 table since its use is guaranteed to be the insn immediately following
287 its definition and any other insn is presumed to invalidate it.
289 Instead, we store below the current and last value assigned to CC0.
290 If it should happen to be a constant, it is stored in preference
291 to the actual assigned value. In case it is a constant, we store
292 the mode in which the constant should be interpreted. */
294 static rtx this_insn_cc0
, prev_insn_cc0
;
295 static machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
298 /* Insn being scanned. */
300 static rtx_insn
*this_insn
;
301 static bool optimize_this_for_speed_p
;
303 /* Index by register number, gives the number of the next (or
304 previous) register in the chain of registers sharing the same
307 Or -1 if this register is at the end of the chain.
309 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
311 /* Per-register equivalence chain. */
317 /* The table of all register equivalence chains. */
318 static struct reg_eqv_elem
*reg_eqv_table
;
322 /* The timestamp at which this register is initialized. */
323 unsigned int timestamp
;
325 /* The quantity number of the register's current contents. */
328 /* The number of times the register has been altered in the current
332 /* The REG_TICK value at which rtx's containing this register are
333 valid in the hash table. If this does not equal the current
334 reg_tick value, such expressions existing in the hash table are
338 /* The SUBREG that was set when REG_TICK was last incremented. Set
339 to -1 if the last store was to the whole register, not a subreg. */
340 unsigned int subreg_ticked
;
343 /* A table of cse_reg_info indexed by register numbers. */
344 static struct cse_reg_info
*cse_reg_info_table
;
346 /* The size of the above table. */
347 static unsigned int cse_reg_info_table_size
;
349 /* The index of the first entry that has not been initialized. */
350 static unsigned int cse_reg_info_table_first_uninitialized
;
352 /* The timestamp at the beginning of the current run of
353 cse_extended_basic_block. We increment this variable at the beginning of
354 the current run of cse_extended_basic_block. The timestamp field of a
355 cse_reg_info entry matches the value of this variable if and only
356 if the entry has been initialized during the current run of
357 cse_extended_basic_block. */
358 static unsigned int cse_reg_info_timestamp
;
360 /* A HARD_REG_SET containing all the hard registers for which there is
361 currently a REG expression in the hash table. Note the difference
362 from the above variables, which indicate if the REG is mentioned in some
363 expression in the table. */
365 static HARD_REG_SET hard_regs_in_table
;
367 /* True if CSE has altered the CFG. */
368 static bool cse_cfg_altered
;
370 /* True if CSE has altered conditional jump insns in such a way
371 that jump optimization should be redone. */
372 static bool cse_jumps_altered
;
374 /* True if we put a LABEL_REF into the hash table for an INSN
375 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
376 to put in the note. */
377 static bool recorded_label_ref
;
379 /* canon_hash stores 1 in do_not_record
380 if it notices a reference to CC0, PC, or some other volatile
383 static int do_not_record
;
385 /* canon_hash stores 1 in hash_arg_in_memory
386 if it notices a reference to memory within the expression being hashed. */
388 static int hash_arg_in_memory
;
390 /* The hash table contains buckets which are chains of `struct table_elt's,
391 each recording one expression's information.
392 That expression is in the `exp' field.
394 The canon_exp field contains a canonical (from the point of view of
395 alias analysis) version of the `exp' field.
397 Those elements with the same hash code are chained in both directions
398 through the `next_same_hash' and `prev_same_hash' fields.
400 Each set of expressions with equivalent values
401 are on a two-way chain through the `next_same_value'
402 and `prev_same_value' fields, and all point with
403 the `first_same_value' field at the first element in
404 that chain. The chain is in order of increasing cost.
405 Each element's cost value is in its `cost' field.
407 The `in_memory' field is nonzero for elements that
408 involve any reference to memory. These elements are removed
409 whenever a write is done to an unidentified location in memory.
410 To be safe, we assume that a memory address is unidentified unless
411 the address is either a symbol constant or a constant plus
412 the frame pointer or argument pointer.
414 The `related_value' field is used to connect related expressions
415 (that differ by adding an integer).
416 The related expressions are chained in a circular fashion.
417 `related_value' is zero for expressions for which this
420 The `cost' field stores the cost of this element's expression.
421 The `regcost' field stores the value returned by approx_reg_cost for
422 this element's expression.
424 The `is_const' flag is set if the element is a constant (including
427 The `flag' field is used as a temporary during some search routines.
429 The `mode' field is usually the same as GET_MODE (`exp'), but
430 if `exp' is a CONST_INT and has no machine mode then the `mode'
431 field is the mode it was being used as. Each constant is
432 recorded separately for each mode it is used with. */
438 struct table_elt
*next_same_hash
;
439 struct table_elt
*prev_same_hash
;
440 struct table_elt
*next_same_value
;
441 struct table_elt
*prev_same_value
;
442 struct table_elt
*first_same_value
;
443 struct table_elt
*related_value
;
446 /* The size of this field should match the size
447 of the mode field of struct rtx_def (see rtl.h). */
448 ENUM_BITFIELD(machine_mode
) mode
: 8;
454 /* We don't want a lot of buckets, because we rarely have very many
455 things stored in the hash table, and a lot of buckets slows
456 down a lot of loops that happen frequently. */
458 #define HASH_SIZE (1 << HASH_SHIFT)
459 #define HASH_MASK (HASH_SIZE - 1)
461 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
462 register (hard registers may require `do_not_record' to be set). */
465 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
466 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
467 : canon_hash (X, M)) & HASH_MASK)
469 /* Like HASH, but without side-effects. */
470 #define SAFE_HASH(X, M) \
471 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
472 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
473 : safe_hash (X, M)) & HASH_MASK)
475 /* Determine whether register number N is considered a fixed register for the
476 purpose of approximating register costs.
477 It is desirable to replace other regs with fixed regs, to reduce need for
479 A reg wins if it is either the frame pointer or designated as fixed. */
480 #define FIXED_REGNO_P(N) \
481 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
482 || fixed_regs[N] || global_regs[N])
484 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
485 hard registers and pointers into the frame are the cheapest with a cost
486 of 0. Next come pseudos with a cost of one and other hard registers with
487 a cost of 2. Aside from these special cases, call `rtx_cost'. */
489 #define CHEAP_REGNO(N) \
490 (REGNO_PTR_FRAME_P (N) \
491 || (HARD_REGISTER_NUM_P (N) \
492 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
494 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
495 #define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
497 /* Get the number of times this register has been updated in this
500 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
502 /* Get the point at which REG was recorded in the table. */
504 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
506 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
509 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
511 /* Get the quantity number for REG. */
513 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
515 /* Determine if the quantity number for register X represents a valid index
516 into the qty_table. */
518 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
520 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
522 #define CHEAPER(X, Y) \
523 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
525 static struct table_elt
*table
[HASH_SIZE
];
527 /* Chain of `struct table_elt's made so far for this function
528 but currently removed from the table. */
530 static struct table_elt
*free_element_chain
;
532 /* Set to the cost of a constant pool reference if one was found for a
533 symbolic constant. If this was found, it means we should try to
534 convert constants into constant pool entries if they don't fit in
537 static int constant_pool_entries_cost
;
538 static int constant_pool_entries_regcost
;
540 /* Trace a patch through the CFG. */
544 /* The basic block for this path entry. */
548 /* This data describes a block that will be processed by
549 cse_extended_basic_block. */
551 struct cse_basic_block_data
553 /* Total number of SETs in block. */
555 /* Size of current branch path, if any. */
557 /* Current path, indicating which basic_blocks will be processed. */
558 struct branch_path
*path
;
562 /* Pointers to the live in/live out bitmaps for the boundaries of the
564 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
566 /* A simple bitmap to track which basic blocks have been visited
567 already as part of an already processed extended basic block. */
568 static sbitmap cse_visited_basic_blocks
;
570 static bool fixed_base_plus_p (rtx x
);
571 static int notreg_cost (rtx
, enum rtx_code
, int);
572 static int preferable (int, int, int, int);
573 static void new_basic_block (void);
574 static void make_new_qty (unsigned int, machine_mode
);
575 static void make_regs_eqv (unsigned int, unsigned int);
576 static void delete_reg_equiv (unsigned int);
577 static int mention_regs (rtx
);
578 static int insert_regs (rtx
, struct table_elt
*, int);
579 static void remove_from_table (struct table_elt
*, unsigned);
580 static void remove_pseudo_from_table (rtx
, unsigned);
581 static struct table_elt
*lookup (rtx
, unsigned, machine_mode
);
582 static struct table_elt
*lookup_for_remove (rtx
, unsigned, machine_mode
);
583 static rtx
lookup_as_function (rtx
, enum rtx_code
);
584 static struct table_elt
*insert_with_costs (rtx
, struct table_elt
*, unsigned,
585 machine_mode
, int, int);
586 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
588 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
589 static void invalidate (rtx
, machine_mode
);
590 static void remove_invalid_refs (unsigned int);
591 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
593 static void rehash_using_reg (rtx
);
594 static void invalidate_memory (void);
595 static void invalidate_for_call (void);
596 static rtx
use_related_value (rtx
, struct table_elt
*);
598 static inline unsigned canon_hash (rtx
, machine_mode
);
599 static inline unsigned safe_hash (rtx
, machine_mode
);
600 static inline unsigned hash_rtx_string (const char *);
602 static rtx
canon_reg (rtx
, rtx_insn
*);
603 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
606 static rtx
fold_rtx (rtx
, rtx_insn
*);
607 static rtx
equiv_constant (rtx
);
608 static void record_jump_equiv (rtx_insn
*, bool);
609 static void record_jump_cond (enum rtx_code
, machine_mode
, rtx
, rtx
,
611 static void cse_insn (rtx_insn
*);
612 static void cse_prescan_path (struct cse_basic_block_data
*);
613 static void invalidate_from_clobbers (rtx_insn
*);
614 static void invalidate_from_sets_and_clobbers (rtx_insn
*);
615 static rtx
cse_process_notes (rtx
, rtx
, bool *);
616 static void cse_extended_basic_block (struct cse_basic_block_data
*);
617 extern void dump_class (struct table_elt
*);
618 static void get_cse_reg_info_1 (unsigned int regno
);
619 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
621 static void flush_hash_table (void);
622 static bool insn_live_p (rtx_insn
*, int *);
623 static bool set_live_p (rtx
, rtx_insn
*, int *);
624 static void cse_change_cc_mode_insn (rtx_insn
*, rtx
);
625 static void cse_change_cc_mode_insns (rtx_insn
*, rtx_insn
*, rtx
);
626 static machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
630 #undef RTL_HOOKS_GEN_LOWPART
631 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
633 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
635 /* Nonzero if X has the form (PLUS frame-pointer integer). */
638 fixed_base_plus_p (rtx x
)
640 switch (GET_CODE (x
))
643 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
645 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
650 if (!CONST_INT_P (XEXP (x
, 1)))
652 return fixed_base_plus_p (XEXP (x
, 0));
659 /* Dump the expressions in the equivalence class indicated by CLASSP.
660 This function is used only for debugging. */
662 dump_class (struct table_elt
*classp
)
664 struct table_elt
*elt
;
666 fprintf (stderr
, "Equivalence chain for ");
667 print_rtl (stderr
, classp
->exp
);
668 fprintf (stderr
, ": \n");
670 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
672 print_rtl (stderr
, elt
->exp
);
673 fprintf (stderr
, "\n");
677 /* Return an estimate of the cost of the registers used in an rtx.
678 This is mostly the number of different REG expressions in the rtx;
679 however for some exceptions like fixed registers we use a cost of
680 0. If any other hard register reference occurs, return MAX_COST. */
683 approx_reg_cost (const_rtx x
)
686 subrtx_iterator::array_type array
;
687 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
692 unsigned int regno
= REGNO (x
);
693 if (!CHEAP_REGNO (regno
))
695 if (regno
< FIRST_PSEUDO_REGISTER
)
697 if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
709 /* Return a negative value if an rtx A, whose costs are given by COST_A
710 and REGCOST_A, is more desirable than an rtx B.
711 Return a positive value if A is less desirable, or 0 if the two are
714 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
716 /* First, get rid of cases involving expressions that are entirely
718 if (cost_a
!= cost_b
)
720 if (cost_a
== MAX_COST
)
722 if (cost_b
== MAX_COST
)
726 /* Avoid extending lifetimes of hardregs. */
727 if (regcost_a
!= regcost_b
)
729 if (regcost_a
== MAX_COST
)
731 if (regcost_b
== MAX_COST
)
735 /* Normal operation costs take precedence. */
736 if (cost_a
!= cost_b
)
737 return cost_a
- cost_b
;
738 /* Only if these are identical consider effects on register pressure. */
739 if (regcost_a
!= regcost_b
)
740 return regcost_a
- regcost_b
;
744 /* Internal function, to compute cost when X is not a register; called
745 from COST macro to keep it simple. */
748 notreg_cost (rtx x
, enum rtx_code outer
, int opno
)
750 return ((GET_CODE (x
) == SUBREG
751 && REG_P (SUBREG_REG (x
))
752 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
753 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_INT
754 && (GET_MODE_SIZE (GET_MODE (x
))
755 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
756 && subreg_lowpart_p (x
)
757 && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x
),
758 GET_MODE (SUBREG_REG (x
))))
760 : rtx_cost (x
, outer
, opno
, optimize_this_for_speed_p
) * 2);
764 /* Initialize CSE_REG_INFO_TABLE. */
767 init_cse_reg_info (unsigned int nregs
)
769 /* Do we need to grow the table? */
770 if (nregs
> cse_reg_info_table_size
)
772 unsigned int new_size
;
774 if (cse_reg_info_table_size
< 2048)
776 /* Compute a new size that is a power of 2 and no smaller
777 than the large of NREGS and 64. */
778 new_size
= (cse_reg_info_table_size
779 ? cse_reg_info_table_size
: 64);
781 while (new_size
< nregs
)
786 /* If we need a big table, allocate just enough to hold
791 /* Reallocate the table with NEW_SIZE entries. */
792 free (cse_reg_info_table
);
793 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
794 cse_reg_info_table_size
= new_size
;
795 cse_reg_info_table_first_uninitialized
= 0;
798 /* Do we have all of the first NREGS entries initialized? */
799 if (cse_reg_info_table_first_uninitialized
< nregs
)
801 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
804 /* Put the old timestamp on newly allocated entries so that they
805 will all be considered out of date. We do not touch those
806 entries beyond the first NREGS entries to be nice to the
808 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
809 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
811 cse_reg_info_table_first_uninitialized
= nregs
;
815 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
818 get_cse_reg_info_1 (unsigned int regno
)
820 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
821 entry will be considered to have been initialized. */
822 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
824 /* Initialize the rest of the entry. */
825 cse_reg_info_table
[regno
].reg_tick
= 1;
826 cse_reg_info_table
[regno
].reg_in_table
= -1;
827 cse_reg_info_table
[regno
].subreg_ticked
= -1;
828 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
831 /* Find a cse_reg_info entry for REGNO. */
833 static inline struct cse_reg_info
*
834 get_cse_reg_info (unsigned int regno
)
836 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
838 /* If this entry has not been initialized, go ahead and initialize
840 if (p
->timestamp
!= cse_reg_info_timestamp
)
841 get_cse_reg_info_1 (regno
);
846 /* Clear the hash table and initialize each register with its own quantity,
847 for a new basic block. */
850 new_basic_block (void)
856 /* Invalidate cse_reg_info_table. */
857 cse_reg_info_timestamp
++;
859 /* Clear out hash table state for this pass. */
860 CLEAR_HARD_REG_SET (hard_regs_in_table
);
862 /* The per-quantity values used to be initialized here, but it is
863 much faster to initialize each as it is made in `make_new_qty'. */
865 for (i
= 0; i
< HASH_SIZE
; i
++)
867 struct table_elt
*first
;
872 struct table_elt
*last
= first
;
876 while (last
->next_same_hash
!= NULL
)
877 last
= last
->next_same_hash
;
879 /* Now relink this hash entire chain into
880 the free element list. */
882 last
->next_same_hash
= free_element_chain
;
883 free_element_chain
= first
;
892 /* Say that register REG contains a quantity in mode MODE not in any
893 register before and initialize that quantity. */
896 make_new_qty (unsigned int reg
, machine_mode mode
)
899 struct qty_table_elem
*ent
;
900 struct reg_eqv_elem
*eqv
;
902 gcc_assert (next_qty
< max_qty
);
904 q
= REG_QTY (reg
) = next_qty
++;
906 ent
->first_reg
= reg
;
909 ent
->const_rtx
= ent
->const_insn
= NULL
;
910 ent
->comparison_code
= UNKNOWN
;
912 eqv
= ®_eqv_table
[reg
];
913 eqv
->next
= eqv
->prev
= -1;
916 /* Make reg NEW equivalent to reg OLD.
917 OLD is not changing; NEW is. */
920 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
922 unsigned int lastr
, firstr
;
923 int q
= REG_QTY (old_reg
);
924 struct qty_table_elem
*ent
;
928 /* Nothing should become eqv until it has a "non-invalid" qty number. */
929 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
931 REG_QTY (new_reg
) = q
;
932 firstr
= ent
->first_reg
;
933 lastr
= ent
->last_reg
;
935 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
936 hard regs. Among pseudos, if NEW will live longer than any other reg
937 of the same qty, and that is beyond the current basic block,
938 make it the new canonical replacement for this qty. */
939 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
940 /* Certain fixed registers might be of the class NO_REGS. This means
941 that not only can they not be allocated by the compiler, but
942 they cannot be used in substitutions or canonicalizations
944 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
945 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
946 || (new_reg
>= FIRST_PSEUDO_REGISTER
947 && (firstr
< FIRST_PSEUDO_REGISTER
948 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
949 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
950 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
951 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
953 reg_eqv_table
[firstr
].prev
= new_reg
;
954 reg_eqv_table
[new_reg
].next
= firstr
;
955 reg_eqv_table
[new_reg
].prev
= -1;
956 ent
->first_reg
= new_reg
;
960 /* If NEW is a hard reg (known to be non-fixed), insert at end.
961 Otherwise, insert before any non-fixed hard regs that are at the
962 end. Registers of class NO_REGS cannot be used as an
963 equivalent for anything. */
964 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
965 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
966 && new_reg
>= FIRST_PSEUDO_REGISTER
)
967 lastr
= reg_eqv_table
[lastr
].prev
;
968 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
969 if (reg_eqv_table
[lastr
].next
>= 0)
970 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
972 qty_table
[q
].last_reg
= new_reg
;
973 reg_eqv_table
[lastr
].next
= new_reg
;
974 reg_eqv_table
[new_reg
].prev
= lastr
;
978 /* Remove REG from its equivalence class. */
981 delete_reg_equiv (unsigned int reg
)
983 struct qty_table_elem
*ent
;
984 int q
= REG_QTY (reg
);
987 /* If invalid, do nothing. */
988 if (! REGNO_QTY_VALID_P (reg
))
993 p
= reg_eqv_table
[reg
].prev
;
994 n
= reg_eqv_table
[reg
].next
;
997 reg_eqv_table
[n
].prev
= p
;
1001 reg_eqv_table
[p
].next
= n
;
1005 REG_QTY (reg
) = -reg
- 1;
1008 /* Remove any invalid expressions from the hash table
1009 that refer to any of the registers contained in expression X.
1011 Make sure that newly inserted references to those registers
1012 as subexpressions will be considered valid.
1014 mention_regs is not called when a register itself
1015 is being stored in the table.
1017 Return 1 if we have done something that may have changed the hash code
1021 mention_regs (rtx x
)
1031 code
= GET_CODE (x
);
1034 unsigned int regno
= REGNO (x
);
1035 unsigned int endregno
= END_REGNO (x
);
1038 for (i
= regno
; i
< endregno
; i
++)
1040 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1041 remove_invalid_refs (i
);
1043 REG_IN_TABLE (i
) = REG_TICK (i
);
1044 SUBREG_TICKED (i
) = -1;
1050 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1051 pseudo if they don't use overlapping words. We handle only pseudos
1052 here for simplicity. */
1053 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1054 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1056 unsigned int i
= REGNO (SUBREG_REG (x
));
1058 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1060 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1061 the last store to this register really stored into this
1062 subreg, then remove the memory of this subreg.
1063 Otherwise, remove any memory of the entire register and
1064 all its subregs from the table. */
1065 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1066 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1067 remove_invalid_refs (i
);
1069 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1072 REG_IN_TABLE (i
) = REG_TICK (i
);
1073 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1077 /* If X is a comparison or a COMPARE and either operand is a register
1078 that does not have a quantity, give it one. This is so that a later
1079 call to record_jump_equiv won't cause X to be assigned a different
1080 hash code and not found in the table after that call.
1082 It is not necessary to do this here, since rehash_using_reg can
1083 fix up the table later, but doing this here eliminates the need to
1084 call that expensive function in the most common case where the only
1085 use of the register is in the comparison. */
1087 if (code
== COMPARE
|| COMPARISON_P (x
))
1089 if (REG_P (XEXP (x
, 0))
1090 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1091 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1093 rehash_using_reg (XEXP (x
, 0));
1097 if (REG_P (XEXP (x
, 1))
1098 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1099 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1101 rehash_using_reg (XEXP (x
, 1));
1106 fmt
= GET_RTX_FORMAT (code
);
1107 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1109 changed
|= mention_regs (XEXP (x
, i
));
1110 else if (fmt
[i
] == 'E')
1111 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1112 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1117 /* Update the register quantities for inserting X into the hash table
1118 with a value equivalent to CLASSP.
1119 (If the class does not contain a REG, it is irrelevant.)
1120 If MODIFIED is nonzero, X is a destination; it is being modified.
1121 Note that delete_reg_equiv should be called on a register
1122 before insert_regs is done on that register with MODIFIED != 0.
1124 Nonzero value means that elements of reg_qty have changed
1125 so X's hash code may be different. */
1128 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1132 unsigned int regno
= REGNO (x
);
1135 /* If REGNO is in the equivalence table already but is of the
1136 wrong mode for that equivalence, don't do anything here. */
1138 qty_valid
= REGNO_QTY_VALID_P (regno
);
1141 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1143 if (ent
->mode
!= GET_MODE (x
))
1147 if (modified
|| ! qty_valid
)
1150 for (classp
= classp
->first_same_value
;
1152 classp
= classp
->next_same_value
)
1153 if (REG_P (classp
->exp
)
1154 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1156 unsigned c_regno
= REGNO (classp
->exp
);
1158 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1160 /* Suppose that 5 is hard reg and 100 and 101 are
1163 (set (reg:si 100) (reg:si 5))
1164 (set (reg:si 5) (reg:si 100))
1165 (set (reg:di 101) (reg:di 5))
1167 We would now set REG_QTY (101) = REG_QTY (5), but the
1168 entry for 5 is in SImode. When we use this later in
1169 copy propagation, we get the register in wrong mode. */
1170 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1173 make_regs_eqv (regno
, c_regno
);
1177 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1178 than REG_IN_TABLE to find out if there was only a single preceding
1179 invalidation - for the SUBREG - or another one, which would be
1180 for the full register. However, if we find here that REG_TICK
1181 indicates that the register is invalid, it means that it has
1182 been invalidated in a separate operation. The SUBREG might be used
1183 now (then this is a recursive call), or we might use the full REG
1184 now and a SUBREG of it later. So bump up REG_TICK so that
1185 mention_regs will do the right thing. */
1187 && REG_IN_TABLE (regno
) >= 0
1188 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1190 make_new_qty (regno
, GET_MODE (x
));
1197 /* If X is a SUBREG, we will likely be inserting the inner register in the
1198 table. If that register doesn't have an assigned quantity number at
1199 this point but does later, the insertion that we will be doing now will
1200 not be accessible because its hash code will have changed. So assign
1201 a quantity number now. */
1203 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1204 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1206 insert_regs (SUBREG_REG (x
), NULL
, 0);
1211 return mention_regs (x
);
1215 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1216 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1217 CST is equal to an anchor. */
1220 compute_const_anchors (rtx cst
,
1221 HOST_WIDE_INT
*lower_base
, HOST_WIDE_INT
*lower_offs
,
1222 HOST_WIDE_INT
*upper_base
, HOST_WIDE_INT
*upper_offs
)
1224 HOST_WIDE_INT n
= INTVAL (cst
);
1226 *lower_base
= n
& ~(targetm
.const_anchor
- 1);
1227 if (*lower_base
== n
)
1231 (n
+ (targetm
.const_anchor
- 1)) & ~(targetm
.const_anchor
- 1);
1232 *upper_offs
= n
- *upper_base
;
1233 *lower_offs
= n
- *lower_base
;
1237 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1240 insert_const_anchor (HOST_WIDE_INT anchor
, rtx reg
, HOST_WIDE_INT offs
,
1243 struct table_elt
*elt
;
1248 anchor_exp
= GEN_INT (anchor
);
1249 hash
= HASH (anchor_exp
, mode
);
1250 elt
= lookup (anchor_exp
, hash
, mode
);
1252 elt
= insert (anchor_exp
, NULL
, hash
, mode
);
1254 exp
= plus_constant (mode
, reg
, offs
);
1255 /* REG has just been inserted and the hash codes recomputed. */
1257 hash
= HASH (exp
, mode
);
1259 /* Use the cost of the register rather than the whole expression. When
1260 looking up constant anchors we will further offset the corresponding
1261 expression therefore it does not make sense to prefer REGs over
1262 reg-immediate additions. Prefer instead the oldest expression. Also
1263 don't prefer pseudos over hard regs so that we derive constants in
1264 argument registers from other argument registers rather than from the
1265 original pseudo that was used to synthesize the constant. */
1266 insert_with_costs (exp
, elt
, hash
, mode
, COST (reg
), 1);
1269 /* The constant CST is equivalent to the register REG. Create
1270 equivalences between the two anchors of CST and the corresponding
1271 register-offset expressions using REG. */
1274 insert_const_anchors (rtx reg
, rtx cst
, machine_mode mode
)
1276 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1278 if (!compute_const_anchors (cst
, &lower_base
, &lower_offs
,
1279 &upper_base
, &upper_offs
))
1282 /* Ignore anchors of value 0. Constants accessible from zero are
1284 if (lower_base
!= 0)
1285 insert_const_anchor (lower_base
, reg
, -lower_offs
, mode
);
1287 if (upper_base
!= 0)
1288 insert_const_anchor (upper_base
, reg
, -upper_offs
, mode
);
1291 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1292 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1293 valid expression. Return the cheapest and oldest of such expressions. In
1294 *OLD, return how old the resulting expression is compared to the other
1295 equivalent expressions. */
1298 find_reg_offset_for_const (struct table_elt
*anchor_elt
, HOST_WIDE_INT offs
,
1301 struct table_elt
*elt
;
1303 struct table_elt
*match_elt
;
1306 /* Find the cheapest and *oldest* expression to maximize the chance of
1307 reusing the same pseudo. */
1311 for (elt
= anchor_elt
->first_same_value
, idx
= 0;
1313 elt
= elt
->next_same_value
, idx
++)
1315 if (match_elt
&& CHEAPER (match_elt
, elt
))
1318 if (REG_P (elt
->exp
)
1319 || (GET_CODE (elt
->exp
) == PLUS
1320 && REG_P (XEXP (elt
->exp
, 0))
1321 && GET_CODE (XEXP (elt
->exp
, 1)) == CONST_INT
))
1325 /* Ignore expressions that are no longer valid. */
1326 if (!REG_P (elt
->exp
) && !exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
1329 x
= plus_constant (GET_MODE (elt
->exp
), elt
->exp
, offs
);
1331 || (GET_CODE (x
) == PLUS
1332 && IN_RANGE (INTVAL (XEXP (x
, 1)),
1333 -targetm
.const_anchor
,
1334 targetm
.const_anchor
- 1)))
1346 /* Try to express the constant SRC_CONST using a register+offset expression
1347 derived from a constant anchor. Return it if successful or NULL_RTX,
1351 try_const_anchors (rtx src_const
, machine_mode mode
)
1353 struct table_elt
*lower_elt
, *upper_elt
;
1354 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1355 rtx lower_anchor_rtx
, upper_anchor_rtx
;
1356 rtx lower_exp
= NULL_RTX
, upper_exp
= NULL_RTX
;
1357 unsigned lower_old
, upper_old
;
1359 /* CONST_INT is used for CC modes, but we should leave those alone. */
1360 if (GET_MODE_CLASS (mode
) == MODE_CC
)
1363 gcc_assert (SCALAR_INT_MODE_P (mode
));
1364 if (!compute_const_anchors (src_const
, &lower_base
, &lower_offs
,
1365 &upper_base
, &upper_offs
))
1368 lower_anchor_rtx
= GEN_INT (lower_base
);
1369 upper_anchor_rtx
= GEN_INT (upper_base
);
1370 lower_elt
= lookup (lower_anchor_rtx
, HASH (lower_anchor_rtx
, mode
), mode
);
1371 upper_elt
= lookup (upper_anchor_rtx
, HASH (upper_anchor_rtx
, mode
), mode
);
1374 lower_exp
= find_reg_offset_for_const (lower_elt
, lower_offs
, &lower_old
);
1376 upper_exp
= find_reg_offset_for_const (upper_elt
, upper_offs
, &upper_old
);
1383 /* Return the older expression. */
1384 return (upper_old
> lower_old
? upper_exp
: lower_exp
);
1387 /* Look in or update the hash table. */
1389 /* Remove table element ELT from use in the table.
1390 HASH is its hash code, made using the HASH macro.
1391 It's an argument because often that is known in advance
1392 and we save much time not recomputing it. */
1395 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1400 /* Mark this element as removed. See cse_insn. */
1401 elt
->first_same_value
= 0;
1403 /* Remove the table element from its equivalence class. */
1406 struct table_elt
*prev
= elt
->prev_same_value
;
1407 struct table_elt
*next
= elt
->next_same_value
;
1410 next
->prev_same_value
= prev
;
1413 prev
->next_same_value
= next
;
1416 struct table_elt
*newfirst
= next
;
1419 next
->first_same_value
= newfirst
;
1420 next
= next
->next_same_value
;
1425 /* Remove the table element from its hash bucket. */
1428 struct table_elt
*prev
= elt
->prev_same_hash
;
1429 struct table_elt
*next
= elt
->next_same_hash
;
1432 next
->prev_same_hash
= prev
;
1435 prev
->next_same_hash
= next
;
1436 else if (table
[hash
] == elt
)
1440 /* This entry is not in the proper hash bucket. This can happen
1441 when two classes were merged by `merge_equiv_classes'. Search
1442 for the hash bucket that it heads. This happens only very
1443 rarely, so the cost is acceptable. */
1444 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1445 if (table
[hash
] == elt
)
1450 /* Remove the table element from its related-value circular chain. */
1452 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1454 struct table_elt
*p
= elt
->related_value
;
1456 while (p
->related_value
!= elt
)
1457 p
= p
->related_value
;
1458 p
->related_value
= elt
->related_value
;
1459 if (p
->related_value
== p
)
1460 p
->related_value
= 0;
1463 /* Now add it to the free element chain. */
1464 elt
->next_same_hash
= free_element_chain
;
1465 free_element_chain
= elt
;
1468 /* Same as above, but X is a pseudo-register. */
1471 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1473 struct table_elt
*elt
;
1475 /* Because a pseudo-register can be referenced in more than one
1476 mode, we might have to remove more than one table entry. */
1477 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1478 remove_from_table (elt
, hash
);
1481 /* Look up X in the hash table and return its table element,
1482 or 0 if X is not in the table.
1484 MODE is the machine-mode of X, or if X is an integer constant
1485 with VOIDmode then MODE is the mode with which X will be used.
1487 Here we are satisfied to find an expression whose tree structure
1490 static struct table_elt
*
1491 lookup (rtx x
, unsigned int hash
, machine_mode mode
)
1493 struct table_elt
*p
;
1495 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1496 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1497 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1503 /* Like `lookup' but don't care whether the table element uses invalid regs.
1504 Also ignore discrepancies in the machine mode of a register. */
1506 static struct table_elt
*
1507 lookup_for_remove (rtx x
, unsigned int hash
, machine_mode mode
)
1509 struct table_elt
*p
;
1513 unsigned int regno
= REGNO (x
);
1515 /* Don't check the machine mode when comparing registers;
1516 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1517 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1519 && REGNO (p
->exp
) == regno
)
1524 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1526 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1533 /* Look for an expression equivalent to X and with code CODE.
1534 If one is found, return that expression. */
1537 lookup_as_function (rtx x
, enum rtx_code code
)
1540 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1545 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1546 if (GET_CODE (p
->exp
) == code
1547 /* Make sure this is a valid entry in the table. */
1548 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1554 /* Insert X in the hash table, assuming HASH is its hash code and
1555 CLASSP is an element of the class it should go in (or 0 if a new
1556 class should be made). COST is the code of X and reg_cost is the
1557 cost of registers in X. It is inserted at the proper position to
1558 keep the class in the order cheapest first.
1560 MODE is the machine-mode of X, or if X is an integer constant
1561 with VOIDmode then MODE is the mode with which X will be used.
1563 For elements of equal cheapness, the most recent one
1564 goes in front, except that the first element in the list
1565 remains first unless a cheaper element is added. The order of
1566 pseudo-registers does not matter, as canon_reg will be called to
1567 find the cheapest when a register is retrieved from the table.
1569 The in_memory field in the hash table element is set to 0.
1570 The caller must set it nonzero if appropriate.
1572 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1573 and if insert_regs returns a nonzero value
1574 you must then recompute its hash code before calling here.
1576 If necessary, update table showing constant values of quantities. */
1578 static struct table_elt
*
1579 insert_with_costs (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1580 machine_mode mode
, int cost
, int reg_cost
)
1582 struct table_elt
*elt
;
1584 /* If X is a register and we haven't made a quantity for it,
1585 something is wrong. */
1586 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1588 /* If X is a hard register, show it is being put in the table. */
1589 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1590 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1592 /* Put an element for X into the right hash bucket. */
1594 elt
= free_element_chain
;
1596 free_element_chain
= elt
->next_same_hash
;
1598 elt
= XNEW (struct table_elt
);
1601 elt
->canon_exp
= NULL_RTX
;
1603 elt
->regcost
= reg_cost
;
1604 elt
->next_same_value
= 0;
1605 elt
->prev_same_value
= 0;
1606 elt
->next_same_hash
= table
[hash
];
1607 elt
->prev_same_hash
= 0;
1608 elt
->related_value
= 0;
1611 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1614 table
[hash
]->prev_same_hash
= elt
;
1617 /* Put it into the proper value-class. */
1620 classp
= classp
->first_same_value
;
1621 if (CHEAPER (elt
, classp
))
1622 /* Insert at the head of the class. */
1624 struct table_elt
*p
;
1625 elt
->next_same_value
= classp
;
1626 classp
->prev_same_value
= elt
;
1627 elt
->first_same_value
= elt
;
1629 for (p
= classp
; p
; p
= p
->next_same_value
)
1630 p
->first_same_value
= elt
;
1634 /* Insert not at head of the class. */
1635 /* Put it after the last element cheaper than X. */
1636 struct table_elt
*p
, *next
;
1639 (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1643 /* Put it after P and before NEXT. */
1644 elt
->next_same_value
= next
;
1646 next
->prev_same_value
= elt
;
1648 elt
->prev_same_value
= p
;
1649 p
->next_same_value
= elt
;
1650 elt
->first_same_value
= classp
;
1654 elt
->first_same_value
= elt
;
1656 /* If this is a constant being set equivalent to a register or a register
1657 being set equivalent to a constant, note the constant equivalence.
1659 If this is a constant, it cannot be equivalent to a different constant,
1660 and a constant is the only thing that can be cheaper than a register. So
1661 we know the register is the head of the class (before the constant was
1664 If this is a register that is not already known equivalent to a
1665 constant, we must check the entire class.
1667 If this is a register that is already known equivalent to an insn,
1668 update the qtys `const_insn' to show that `this_insn' is the latest
1669 insn making that quantity equivalent to the constant. */
1671 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1674 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1675 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1677 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1678 exp_ent
->const_insn
= this_insn
;
1683 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1686 struct table_elt
*p
;
1688 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1690 if (p
->is_const
&& !REG_P (p
->exp
))
1692 int x_q
= REG_QTY (REGNO (x
));
1693 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1696 = gen_lowpart (GET_MODE (x
), p
->exp
);
1697 x_ent
->const_insn
= this_insn
;
1704 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1705 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1706 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1708 /* If this is a constant with symbolic value,
1709 and it has a term with an explicit integer value,
1710 link it up with related expressions. */
1711 if (GET_CODE (x
) == CONST
)
1713 rtx subexp
= get_related_value (x
);
1715 struct table_elt
*subelt
, *subelt_prev
;
1719 /* Get the integer-free subexpression in the hash table. */
1720 subhash
= SAFE_HASH (subexp
, mode
);
1721 subelt
= lookup (subexp
, subhash
, mode
);
1723 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1724 /* Initialize SUBELT's circular chain if it has none. */
1725 if (subelt
->related_value
== 0)
1726 subelt
->related_value
= subelt
;
1727 /* Find the element in the circular chain that precedes SUBELT. */
1728 subelt_prev
= subelt
;
1729 while (subelt_prev
->related_value
!= subelt
)
1730 subelt_prev
= subelt_prev
->related_value
;
1731 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1732 This way the element that follows SUBELT is the oldest one. */
1733 elt
->related_value
= subelt_prev
->related_value
;
1734 subelt_prev
->related_value
= elt
;
1741 /* Wrap insert_with_costs by passing the default costs. */
1743 static struct table_elt
*
1744 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1748 insert_with_costs (x
, classp
, hash
, mode
, COST (x
), approx_reg_cost (x
));
1752 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1753 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1754 the two classes equivalent.
1756 CLASS1 will be the surviving class; CLASS2 should not be used after this
1759 Any invalid entries in CLASS2 will not be copied. */
1762 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1764 struct table_elt
*elt
, *next
, *new_elt
;
1766 /* Ensure we start with the head of the classes. */
1767 class1
= class1
->first_same_value
;
1768 class2
= class2
->first_same_value
;
1770 /* If they were already equal, forget it. */
1771 if (class1
== class2
)
1774 for (elt
= class2
; elt
; elt
= next
)
1778 machine_mode mode
= elt
->mode
;
1780 next
= elt
->next_same_value
;
1782 /* Remove old entry, make a new one in CLASS1's class.
1783 Don't do this for invalid entries as we cannot find their
1784 hash code (it also isn't necessary). */
1785 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1787 bool need_rehash
= false;
1789 hash_arg_in_memory
= 0;
1790 hash
= HASH (exp
, mode
);
1794 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1795 delete_reg_equiv (REGNO (exp
));
1798 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1799 remove_pseudo_from_table (exp
, hash
);
1801 remove_from_table (elt
, hash
);
1803 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1805 rehash_using_reg (exp
);
1806 hash
= HASH (exp
, mode
);
1808 new_elt
= insert (exp
, class1
, hash
, mode
);
1809 new_elt
->in_memory
= hash_arg_in_memory
;
1810 if (GET_CODE (exp
) == ASM_OPERANDS
&& elt
->cost
== MAX_COST
)
1811 new_elt
->cost
= MAX_COST
;
1816 /* Flush the entire hash table. */
1819 flush_hash_table (void)
1822 struct table_elt
*p
;
1824 for (i
= 0; i
< HASH_SIZE
; i
++)
1825 for (p
= table
[i
]; p
; p
= table
[i
])
1827 /* Note that invalidate can remove elements
1828 after P in the current hash chain. */
1830 invalidate (p
->exp
, VOIDmode
);
1832 remove_from_table (p
, i
);
1836 /* Check whether an anti dependence exists between X and EXP. MODE and
1837 ADDR are as for canon_anti_dependence. */
1840 check_dependence (const_rtx x
, rtx exp
, machine_mode mode
, rtx addr
)
1842 subrtx_iterator::array_type array
;
1843 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
1845 const_rtx x
= *iter
;
1846 if (MEM_P (x
) && canon_anti_dependence (x
, true, exp
, mode
, addr
))
1852 /* Remove from the hash table, or mark as invalid, all expressions whose
1853 values could be altered by storing in X. X is a register, a subreg, or
1854 a memory reference with nonvarying address (because, when a memory
1855 reference with a varying address is stored in, all memory references are
1856 removed by invalidate_memory so specific invalidation is superfluous).
1857 FULL_MODE, if not VOIDmode, indicates that this much should be
1858 invalidated instead of just the amount indicated by the mode of X. This
1859 is only used for bitfield stores into memory.
1861 A nonvarying address may be just a register or just a symbol reference,
1862 or it may be either of those plus a numeric offset. */
1865 invalidate (rtx x
, machine_mode full_mode
)
1868 struct table_elt
*p
;
1871 switch (GET_CODE (x
))
1875 /* If X is a register, dependencies on its contents are recorded
1876 through the qty number mechanism. Just change the qty number of
1877 the register, mark it as invalid for expressions that refer to it,
1878 and remove it itself. */
1879 unsigned int regno
= REGNO (x
);
1880 unsigned int hash
= HASH (x
, GET_MODE (x
));
1882 /* Remove REGNO from any quantity list it might be on and indicate
1883 that its value might have changed. If it is a pseudo, remove its
1884 entry from the hash table.
1886 For a hard register, we do the first two actions above for any
1887 additional hard registers corresponding to X. Then, if any of these
1888 registers are in the table, we must remove any REG entries that
1889 overlap these registers. */
1891 delete_reg_equiv (regno
);
1893 SUBREG_TICKED (regno
) = -1;
1895 if (regno
>= FIRST_PSEUDO_REGISTER
)
1896 remove_pseudo_from_table (x
, hash
);
1899 HOST_WIDE_INT in_table
1900 = TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1901 unsigned int endregno
= END_HARD_REGNO (x
);
1902 unsigned int tregno
, tendregno
, rn
;
1903 struct table_elt
*p
, *next
;
1905 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1907 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1909 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1910 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1911 delete_reg_equiv (rn
);
1913 SUBREG_TICKED (rn
) = -1;
1917 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1918 for (p
= table
[hash
]; p
; p
= next
)
1920 next
= p
->next_same_hash
;
1923 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1926 tregno
= REGNO (p
->exp
);
1927 tendregno
= END_HARD_REGNO (p
->exp
);
1928 if (tendregno
> regno
&& tregno
< endregno
)
1929 remove_from_table (p
, hash
);
1936 invalidate (SUBREG_REG (x
), VOIDmode
);
1940 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1941 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1945 /* This is part of a disjoint return value; extract the location in
1946 question ignoring the offset. */
1947 invalidate (XEXP (x
, 0), VOIDmode
);
1951 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1952 /* Calculate the canonical version of X here so that
1953 true_dependence doesn't generate new RTL for X on each call. */
1956 /* Remove all hash table elements that refer to overlapping pieces of
1958 if (full_mode
== VOIDmode
)
1959 full_mode
= GET_MODE (x
);
1961 for (i
= 0; i
< HASH_SIZE
; i
++)
1963 struct table_elt
*next
;
1965 for (p
= table
[i
]; p
; p
= next
)
1967 next
= p
->next_same_hash
;
1970 /* Just canonicalize the expression once;
1971 otherwise each time we call invalidate
1972 true_dependence will canonicalize the
1973 expression again. */
1975 p
->canon_exp
= canon_rtx (p
->exp
);
1976 if (check_dependence (p
->canon_exp
, x
, full_mode
, addr
))
1977 remove_from_table (p
, i
);
1988 /* Remove all expressions that refer to register REGNO,
1989 since they are already invalid, and we are about to
1990 mark that register valid again and don't want the old
1991 expressions to reappear as valid. */
1994 remove_invalid_refs (unsigned int regno
)
1997 struct table_elt
*p
, *next
;
1999 for (i
= 0; i
< HASH_SIZE
; i
++)
2000 for (p
= table
[i
]; p
; p
= next
)
2002 next
= p
->next_same_hash
;
2003 if (!REG_P (p
->exp
) && refers_to_regno_p (regno
, p
->exp
))
2004 remove_from_table (p
, i
);
2008 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2011 remove_invalid_subreg_refs (unsigned int regno
, unsigned int offset
,
2015 struct table_elt
*p
, *next
;
2016 unsigned int end
= offset
+ (GET_MODE_SIZE (mode
) - 1);
2018 for (i
= 0; i
< HASH_SIZE
; i
++)
2019 for (p
= table
[i
]; p
; p
= next
)
2022 next
= p
->next_same_hash
;
2025 && (GET_CODE (exp
) != SUBREG
2026 || !REG_P (SUBREG_REG (exp
))
2027 || REGNO (SUBREG_REG (exp
)) != regno
2028 || (((SUBREG_BYTE (exp
)
2029 + (GET_MODE_SIZE (GET_MODE (exp
)) - 1)) >= offset
)
2030 && SUBREG_BYTE (exp
) <= end
))
2031 && refers_to_regno_p (regno
, p
->exp
))
2032 remove_from_table (p
, i
);
2036 /* Recompute the hash codes of any valid entries in the hash table that
2037 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2039 This is called when we make a jump equivalence. */
2042 rehash_using_reg (rtx x
)
2045 struct table_elt
*p
, *next
;
2048 if (GET_CODE (x
) == SUBREG
)
2051 /* If X is not a register or if the register is known not to be in any
2052 valid entries in the table, we have no work to do. */
2055 || REG_IN_TABLE (REGNO (x
)) < 0
2056 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
2059 /* Scan all hash chains looking for valid entries that mention X.
2060 If we find one and it is in the wrong hash chain, move it. */
2062 for (i
= 0; i
< HASH_SIZE
; i
++)
2063 for (p
= table
[i
]; p
; p
= next
)
2065 next
= p
->next_same_hash
;
2066 if (reg_mentioned_p (x
, p
->exp
)
2067 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
2068 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
2070 if (p
->next_same_hash
)
2071 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
2073 if (p
->prev_same_hash
)
2074 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
2076 table
[i
] = p
->next_same_hash
;
2078 p
->next_same_hash
= table
[hash
];
2079 p
->prev_same_hash
= 0;
2081 table
[hash
]->prev_same_hash
= p
;
2087 /* Remove from the hash table any expression that is a call-clobbered
2088 register. Also update their TICK values. */
2091 invalidate_for_call (void)
2093 unsigned int regno
, endregno
;
2096 struct table_elt
*p
, *next
;
2098 hard_reg_set_iterator hrsi
;
2100 /* Go through all the hard registers. For each that is clobbered in
2101 a CALL_INSN, remove the register from quantity chains and update
2102 reg_tick if defined. Also see if any of these registers is currently
2104 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, regno
, hrsi
)
2106 delete_reg_equiv (regno
);
2107 if (REG_TICK (regno
) >= 0)
2110 SUBREG_TICKED (regno
) = -1;
2112 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
2115 /* In the case where we have no call-clobbered hard registers in the
2116 table, we are done. Otherwise, scan the table and remove any
2117 entry that overlaps a call-clobbered register. */
2120 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
2121 for (p
= table
[hash
]; p
; p
= next
)
2123 next
= p
->next_same_hash
;
2126 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
2129 regno
= REGNO (p
->exp
);
2130 endregno
= END_HARD_REGNO (p
->exp
);
2132 for (i
= regno
; i
< endregno
; i
++)
2133 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
2135 remove_from_table (p
, hash
);
2141 /* Given an expression X of type CONST,
2142 and ELT which is its table entry (or 0 if it
2143 is not in the hash table),
2144 return an alternate expression for X as a register plus integer.
2145 If none can be found, return 0. */
2148 use_related_value (rtx x
, struct table_elt
*elt
)
2150 struct table_elt
*relt
= 0;
2151 struct table_elt
*p
, *q
;
2152 HOST_WIDE_INT offset
;
2154 /* First, is there anything related known?
2155 If we have a table element, we can tell from that.
2156 Otherwise, must look it up. */
2158 if (elt
!= 0 && elt
->related_value
!= 0)
2160 else if (elt
== 0 && GET_CODE (x
) == CONST
)
2162 rtx subexp
= get_related_value (x
);
2164 relt
= lookup (subexp
,
2165 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2172 /* Search all related table entries for one that has an
2173 equivalent register. */
2178 /* This loop is strange in that it is executed in two different cases.
2179 The first is when X is already in the table. Then it is searching
2180 the RELATED_VALUE list of X's class (RELT). The second case is when
2181 X is not in the table. Then RELT points to a class for the related
2184 Ensure that, whatever case we are in, that we ignore classes that have
2185 the same value as X. */
2187 if (rtx_equal_p (x
, p
->exp
))
2190 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2197 p
= p
->related_value
;
2199 /* We went all the way around, so there is nothing to be found.
2200 Alternatively, perhaps RELT was in the table for some other reason
2201 and it has no related values recorded. */
2202 if (p
== relt
|| p
== 0)
2209 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2210 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2211 return plus_constant (q
->mode
, q
->exp
, offset
);
2215 /* Hash a string. Just add its bytes up. */
2216 static inline unsigned
2217 hash_rtx_string (const char *ps
)
2220 const unsigned char *p
= (const unsigned char *) ps
;
2229 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2230 When the callback returns true, we continue with the new rtx. */
2233 hash_rtx_cb (const_rtx x
, machine_mode mode
,
2234 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2235 bool have_reg_qty
, hash_rtx_callback_function cb
)
2241 machine_mode newmode
;
2244 /* Used to turn recursion into iteration. We can't rely on GCC's
2245 tail-recursion elimination since we need to keep accumulating values
2251 /* Invoke the callback first. */
2253 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2255 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2256 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2260 code
= GET_CODE (x
);
2265 unsigned int regno
= REGNO (x
);
2267 if (do_not_record_p
&& !reload_completed
)
2269 /* On some machines, we can't record any non-fixed hard register,
2270 because extending its life will cause reload problems. We
2271 consider ap, fp, sp, gp to be fixed for this purpose.
2273 We also consider CCmode registers to be fixed for this purpose;
2274 failure to do so leads to failure to simplify 0<100 type of
2277 On all machines, we can't record any global registers.
2278 Nor should we record any register that is in a small
2279 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2282 if (regno
>= FIRST_PSEUDO_REGISTER
)
2284 else if (x
== frame_pointer_rtx
2285 || x
== hard_frame_pointer_rtx
2286 || x
== arg_pointer_rtx
2287 || x
== stack_pointer_rtx
2288 || x
== pic_offset_table_rtx
)
2290 else if (global_regs
[regno
])
2292 else if (fixed_regs
[regno
])
2294 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2296 else if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
2298 else if (targetm
.class_likely_spilled_p (REGNO_REG_CLASS (regno
)))
2305 *do_not_record_p
= 1;
2310 hash
+= ((unsigned int) REG
<< 7);
2311 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2315 /* We handle SUBREG of a REG specially because the underlying
2316 reg changes its hash value with every value change; we don't
2317 want to have to forget unrelated subregs when one subreg changes. */
2320 if (REG_P (SUBREG_REG (x
)))
2322 hash
+= (((unsigned int) SUBREG
<< 7)
2323 + REGNO (SUBREG_REG (x
))
2324 + (SUBREG_BYTE (x
) / UNITS_PER_WORD
));
2331 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2332 + (unsigned int) INTVAL (x
));
2335 case CONST_WIDE_INT
:
2336 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
2337 hash
+= CONST_WIDE_INT_ELT (x
, i
);
2341 /* This is like the general case, except that it only counts
2342 the integers representing the constant. */
2343 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2344 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
2345 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2346 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2348 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2352 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2353 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2361 units
= CONST_VECTOR_NUNITS (x
);
2363 for (i
= 0; i
< units
; ++i
)
2365 elt
= CONST_VECTOR_ELT (x
, i
);
2366 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2367 do_not_record_p
, hash_arg_in_memory_p
,
2374 /* Assume there is only one rtx object for any given label. */
2376 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2377 differences and differences between each stage's debugging dumps. */
2378 hash
+= (((unsigned int) LABEL_REF
<< 7)
2379 + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x
)));
2384 /* Don't hash on the symbol's address to avoid bootstrap differences.
2385 Different hash values may cause expressions to be recorded in
2386 different orders and thus different registers to be used in the
2387 final assembler. This also avoids differences in the dump files
2388 between various stages. */
2390 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2393 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2395 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2400 /* We don't record if marked volatile or if BLKmode since we don't
2401 know the size of the move. */
2402 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2404 *do_not_record_p
= 1;
2407 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2408 *hash_arg_in_memory_p
= 1;
2410 /* Now that we have already found this special case,
2411 might as well speed it up as much as possible. */
2412 hash
+= (unsigned) MEM
;
2417 /* A USE that mentions non-volatile memory needs special
2418 handling since the MEM may be BLKmode which normally
2419 prevents an entry from being made. Pure calls are
2420 marked by a USE which mentions BLKmode memory.
2421 See calls.c:emit_call_1. */
2422 if (MEM_P (XEXP (x
, 0))
2423 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2425 hash
+= (unsigned) USE
;
2428 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2429 *hash_arg_in_memory_p
= 1;
2431 /* Now that we have already found this special case,
2432 might as well speed it up as much as possible. */
2433 hash
+= (unsigned) MEM
;
2448 case UNSPEC_VOLATILE
:
2449 if (do_not_record_p
) {
2450 *do_not_record_p
= 1;
2458 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2460 *do_not_record_p
= 1;
2465 /* We don't want to take the filename and line into account. */
2466 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2467 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2468 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2469 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2471 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2473 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2475 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2476 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2477 do_not_record_p
, hash_arg_in_memory_p
,
2480 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2483 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2484 x
= ASM_OPERANDS_INPUT (x
, 0);
2485 mode
= GET_MODE (x
);
2497 i
= GET_RTX_LENGTH (code
) - 1;
2498 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2499 fmt
= GET_RTX_FORMAT (code
);
2505 /* If we are about to do the last recursive call
2506 needed at this level, change it into iteration.
2507 This function is called enough to be worth it. */
2514 hash
+= hash_rtx_cb (XEXP (x
, i
), VOIDmode
, do_not_record_p
,
2515 hash_arg_in_memory_p
,
2520 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2521 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), VOIDmode
, do_not_record_p
,
2522 hash_arg_in_memory_p
,
2527 hash
+= hash_rtx_string (XSTR (x
, i
));
2531 hash
+= (unsigned int) XINT (x
, i
);
2546 /* Hash an rtx. We are careful to make sure the value is never negative.
2547 Equivalent registers hash identically.
2548 MODE is used in hashing for CONST_INTs only;
2549 otherwise the mode of X is used.
2551 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2553 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2554 a MEM rtx which does not have the MEM_READONLY_P flag set.
2556 Note that cse_insn knows that the hash code of a MEM expression
2557 is just (int) MEM plus the hash code of the address. */
2560 hash_rtx (const_rtx x
, machine_mode mode
, int *do_not_record_p
,
2561 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2563 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2564 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2567 /* Hash an rtx X for cse via hash_rtx.
2568 Stores 1 in do_not_record if any subexpression is volatile.
2569 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2570 does not have the MEM_READONLY_P flag set. */
2572 static inline unsigned
2573 canon_hash (rtx x
, machine_mode mode
)
2575 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2578 /* Like canon_hash but with no side effects, i.e. do_not_record
2579 and hash_arg_in_memory are not changed. */
2581 static inline unsigned
2582 safe_hash (rtx x
, machine_mode mode
)
2584 int dummy_do_not_record
;
2585 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2588 /* Return 1 iff X and Y would canonicalize into the same thing,
2589 without actually constructing the canonicalization of either one.
2590 If VALIDATE is nonzero,
2591 we assume X is an expression being processed from the rtl
2592 and Y was found in the hash table. We check register refs
2593 in Y for being marked as valid.
2595 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2598 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2604 /* Note: it is incorrect to assume an expression is equivalent to itself
2605 if VALIDATE is nonzero. */
2606 if (x
== y
&& !validate
)
2609 if (x
== 0 || y
== 0)
2612 code
= GET_CODE (x
);
2613 if (code
!= GET_CODE (y
))
2616 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2617 if (GET_MODE (x
) != GET_MODE (y
))
2620 /* MEMs referring to different address space are not equivalent. */
2621 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2632 return LABEL_REF_LABEL (x
) == LABEL_REF_LABEL (y
);
2635 return XSTR (x
, 0) == XSTR (y
, 0);
2639 return REGNO (x
) == REGNO (y
);
2642 unsigned int regno
= REGNO (y
);
2644 unsigned int endregno
= END_REGNO (y
);
2646 /* If the quantities are not the same, the expressions are not
2647 equivalent. If there are and we are not to validate, they
2648 are equivalent. Otherwise, ensure all regs are up-to-date. */
2650 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2656 for (i
= regno
; i
< endregno
; i
++)
2657 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2666 /* A volatile mem should not be considered equivalent to any
2668 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2671 /* Can't merge two expressions in different alias sets, since we
2672 can decide that the expression is transparent in a block when
2673 it isn't, due to it being set with the different alias set.
2675 Also, can't merge two expressions with different MEM_ATTRS.
2676 They could e.g. be two different entities allocated into the
2677 same space on the stack (see e.g. PR25130). In that case, the
2678 MEM addresses can be the same, even though the two MEMs are
2679 absolutely not equivalent.
2681 But because really all MEM attributes should be the same for
2682 equivalent MEMs, we just use the invariant that MEMs that have
2683 the same attributes share the same mem_attrs data structure. */
2684 if (!mem_attrs_eq_p (MEM_ATTRS (x
), MEM_ATTRS (y
)))
2687 /* If we are handling exceptions, we cannot consider two expressions
2688 with different trapping status as equivalent, because simple_mem
2689 might accept one and reject the other. */
2690 if (cfun
->can_throw_non_call_exceptions
2691 && (MEM_NOTRAP_P (x
) != MEM_NOTRAP_P (y
)))
2696 /* For commutative operations, check both orders. */
2704 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2706 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2707 validate
, for_gcse
))
2708 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2710 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2711 validate
, for_gcse
)));
2714 /* We don't use the generic code below because we want to
2715 disregard filename and line numbers. */
2717 /* A volatile asm isn't equivalent to any other. */
2718 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2721 if (GET_MODE (x
) != GET_MODE (y
)
2722 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2723 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2724 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2725 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2726 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2729 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2731 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2732 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2733 ASM_OPERANDS_INPUT (y
, i
),
2735 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2736 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2746 /* Compare the elements. If any pair of corresponding elements
2747 fail to match, return 0 for the whole thing. */
2749 fmt
= GET_RTX_FORMAT (code
);
2750 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2755 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2756 validate
, for_gcse
))
2761 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2763 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2764 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2765 validate
, for_gcse
))
2770 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2775 if (XINT (x
, i
) != XINT (y
, i
))
2780 if (XWINT (x
, i
) != XWINT (y
, i
))
2796 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2797 the result if necessary. INSN is as for canon_reg. */
2800 validate_canon_reg (rtx
*xloc
, rtx_insn
*insn
)
2804 rtx new_rtx
= canon_reg (*xloc
, insn
);
2806 /* If replacing pseudo with hard reg or vice versa, ensure the
2807 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2808 gcc_assert (insn
&& new_rtx
);
2809 validate_change (insn
, xloc
, new_rtx
, 1);
2813 /* Canonicalize an expression:
2814 replace each register reference inside it
2815 with the "oldest" equivalent register.
2817 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2818 after we make our substitution. The calls are made with IN_GROUP nonzero
2819 so apply_change_group must be called upon the outermost return from this
2820 function (unless INSN is zero). The result of apply_change_group can
2821 generally be discarded since the changes we are making are optional. */
2824 canon_reg (rtx x
, rtx_insn
*insn
)
2833 code
= GET_CODE (x
);
2850 struct qty_table_elem
*ent
;
2852 /* Never replace a hard reg, because hard regs can appear
2853 in more than one machine mode, and we must preserve the mode
2854 of each occurrence. Also, some hard regs appear in
2855 MEMs that are shared and mustn't be altered. Don't try to
2856 replace any reg that maps to a reg of class NO_REGS. */
2857 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2858 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2861 q
= REG_QTY (REGNO (x
));
2862 ent
= &qty_table
[q
];
2863 first
= ent
->first_reg
;
2864 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2865 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2866 : gen_rtx_REG (ent
->mode
, first
));
2873 fmt
= GET_RTX_FORMAT (code
);
2874 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2879 validate_canon_reg (&XEXP (x
, i
), insn
);
2880 else if (fmt
[i
] == 'E')
2881 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2882 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2888 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2889 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2890 what values are being compared.
2892 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2893 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2894 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2895 compared to produce cc0.
2897 The return value is the comparison operator and is either the code of
2898 A or the code corresponding to the inverse of the comparison. */
2900 static enum rtx_code
2901 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2902 machine_mode
*pmode1
, machine_mode
*pmode2
)
2905 hash_set
<rtx
> *visited
= NULL
;
2906 /* Set nonzero when we find something of interest. */
2909 arg1
= *parg1
, arg2
= *parg2
;
2911 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2913 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2915 int reverse_code
= 0;
2916 struct table_elt
*p
= 0;
2918 /* Remember state from previous iteration. */
2922 visited
= new hash_set
<rtx
>;
2927 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2928 On machines with CC0, this is the only case that can occur, since
2929 fold_rtx will return the COMPARE or item being compared with zero
2932 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2935 /* If ARG1 is a comparison operator and CODE is testing for
2936 STORE_FLAG_VALUE, get the inner arguments. */
2938 else if (COMPARISON_P (arg1
))
2940 #ifdef FLOAT_STORE_FLAG_VALUE
2941 REAL_VALUE_TYPE fsfv
;
2945 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2946 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2947 #ifdef FLOAT_STORE_FLAG_VALUE
2948 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2949 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2950 REAL_VALUE_NEGATIVE (fsfv
)))
2955 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2956 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2957 #ifdef FLOAT_STORE_FLAG_VALUE
2958 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2959 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2960 REAL_VALUE_NEGATIVE (fsfv
)))
2963 x
= arg1
, reverse_code
= 1;
2966 /* ??? We could also check for
2968 (ne (and (eq (...) (const_int 1))) (const_int 0))
2970 and related forms, but let's wait until we see them occurring. */
2973 /* Look up ARG1 in the hash table and see if it has an equivalence
2974 that lets us see what is being compared. */
2975 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
2978 p
= p
->first_same_value
;
2980 /* If what we compare is already known to be constant, that is as
2982 We need to break the loop in this case, because otherwise we
2983 can have an infinite loop when looking at a reg that is known
2984 to be a constant which is the same as a comparison of a reg
2985 against zero which appears later in the insn stream, which in
2986 turn is constant and the same as the comparison of the first reg
2992 for (; p
; p
= p
->next_same_value
)
2994 machine_mode inner_mode
= GET_MODE (p
->exp
);
2995 #ifdef FLOAT_STORE_FLAG_VALUE
2996 REAL_VALUE_TYPE fsfv
;
2999 /* If the entry isn't valid, skip it. */
3000 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3003 /* If it's a comparison we've used before, skip it. */
3004 if (visited
&& visited
->contains (p
->exp
))
3007 if (GET_CODE (p
->exp
) == COMPARE
3008 /* Another possibility is that this machine has a compare insn
3009 that includes the comparison code. In that case, ARG1 would
3010 be equivalent to a comparison operation that would set ARG1 to
3011 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3012 ORIG_CODE is the actual comparison being done; if it is an EQ,
3013 we must reverse ORIG_CODE. On machine with a negative value
3014 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3017 && val_signbit_known_set_p (inner_mode
,
3019 #ifdef FLOAT_STORE_FLAG_VALUE
3021 && SCALAR_FLOAT_MODE_P (inner_mode
)
3022 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3023 REAL_VALUE_NEGATIVE (fsfv
)))
3026 && COMPARISON_P (p
->exp
)))
3031 else if ((code
== EQ
3033 && val_signbit_known_set_p (inner_mode
,
3035 #ifdef FLOAT_STORE_FLAG_VALUE
3037 && SCALAR_FLOAT_MODE_P (inner_mode
)
3038 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3039 REAL_VALUE_NEGATIVE (fsfv
)))
3042 && COMPARISON_P (p
->exp
))
3049 /* If this non-trapping address, e.g. fp + constant, the
3050 equivalent is a better operand since it may let us predict
3051 the value of the comparison. */
3052 else if (!rtx_addr_can_trap_p (p
->exp
))
3059 /* If we didn't find a useful equivalence for ARG1, we are done.
3060 Otherwise, set up for the next iteration. */
3064 /* If we need to reverse the comparison, make sure that that is
3065 possible -- we can't necessarily infer the value of GE from LT
3066 with floating-point operands. */
3069 enum rtx_code reversed
= reversed_comparison_code (x
, NULL_RTX
);
3070 if (reversed
== UNKNOWN
)
3075 else if (COMPARISON_P (x
))
3076 code
= GET_CODE (x
);
3077 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
3080 /* Return our results. Return the modes from before fold_rtx
3081 because fold_rtx might produce const_int, and then it's too late. */
3082 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
3083 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
3090 /* If X is a nontrivial arithmetic operation on an argument for which
3091 a constant value can be determined, return the result of operating
3092 on that value, as a constant. Otherwise, return X, possibly with
3093 one or more operands changed to a forward-propagated constant.
3095 If X is a register whose contents are known, we do NOT return
3096 those contents here; equiv_constant is called to perform that task.
3097 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3099 INSN is the insn that we may be modifying. If it is 0, make a copy
3100 of X before modifying it. */
3103 fold_rtx (rtx x
, rtx_insn
*insn
)
3112 /* Operands of X. */
3113 /* Workaround -Wmaybe-uninitialized false positive during
3114 profiledbootstrap by initializing them. */
3115 rtx folded_arg0
= NULL_RTX
;
3116 rtx folded_arg1
= NULL_RTX
;
3118 /* Constant equivalents of first three operands of X;
3119 0 when no such equivalent is known. */
3124 /* The mode of the first operand of X. We need this for sign and zero
3126 machine_mode mode_arg0
;
3131 /* Try to perform some initial simplifications on X. */
3132 code
= GET_CODE (x
);
3137 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3147 /* No use simplifying an EXPR_LIST
3148 since they are used only for lists of args
3149 in a function call's REG_EQUAL note. */
3155 return prev_insn_cc0
;
3161 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3162 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3163 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3167 #ifdef NO_FUNCTION_CSE
3169 if (CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3174 /* Anything else goes through the loop below. */
3179 mode
= GET_MODE (x
);
3183 mode_arg0
= VOIDmode
;
3185 /* Try folding our operands.
3186 Then see which ones have constant values known. */
3188 fmt
= GET_RTX_FORMAT (code
);
3189 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3192 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3193 machine_mode mode_arg
= GET_MODE (folded_arg
);
3195 switch (GET_CODE (folded_arg
))
3200 const_arg
= equiv_constant (folded_arg
);
3207 const_arg
= folded_arg
;
3212 /* The cc0-user and cc0-setter may be in different blocks if
3213 the cc0-setter potentially traps. In that case PREV_INSN_CC0
3214 will have been cleared as we exited the block with the
3217 While we could potentially track cc0 in this case, it just
3218 doesn't seem to be worth it given that cc0 targets are not
3219 terribly common or important these days and trapping math
3220 is rarely used. The combination of those two conditions
3221 necessary to trip this situation is exceedingly rare in the
3225 const_arg
= NULL_RTX
;
3229 folded_arg
= prev_insn_cc0
;
3230 mode_arg
= prev_insn_cc0_mode
;
3231 const_arg
= equiv_constant (folded_arg
);
3237 folded_arg
= fold_rtx (folded_arg
, insn
);
3238 const_arg
= equiv_constant (folded_arg
);
3242 /* For the first three operands, see if the operand
3243 is constant or equivalent to a constant. */
3247 folded_arg0
= folded_arg
;
3248 const_arg0
= const_arg
;
3249 mode_arg0
= mode_arg
;
3252 folded_arg1
= folded_arg
;
3253 const_arg1
= const_arg
;
3256 const_arg2
= const_arg
;
3260 /* Pick the least expensive of the argument and an equivalent constant
3263 && const_arg
!= folded_arg
3264 && COST_IN (const_arg
, code
, i
) <= COST_IN (folded_arg
, code
, i
)
3266 /* It's not safe to substitute the operand of a conversion
3267 operator with a constant, as the conversion's identity
3268 depends upon the mode of its operand. This optimization
3269 is handled by the call to simplify_unary_operation. */
3270 && (GET_RTX_CLASS (code
) != RTX_UNARY
3271 || GET_MODE (const_arg
) == mode_arg0
3272 || (code
!= ZERO_EXTEND
3273 && code
!= SIGN_EXTEND
3275 && code
!= FLOAT_TRUNCATE
3276 && code
!= FLOAT_EXTEND
3279 && code
!= UNSIGNED_FLOAT
3280 && code
!= UNSIGNED_FIX
)))
3281 folded_arg
= const_arg
;
3283 if (folded_arg
== XEXP (x
, i
))
3286 if (insn
== NULL_RTX
&& !changed
)
3289 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3294 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3295 consistent with the order in X. */
3296 if (canonicalize_change_group (insn
, x
))
3299 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
3300 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
3303 apply_change_group ();
3306 /* If X is an arithmetic operation, see if we can simplify it. */
3308 switch (GET_RTX_CLASS (code
))
3312 /* We can't simplify extension ops unless we know the
3314 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3315 && mode_arg0
== VOIDmode
)
3318 new_rtx
= simplify_unary_operation (code
, mode
,
3319 const_arg0
? const_arg0
: folded_arg0
,
3325 case RTX_COMM_COMPARE
:
3326 /* See what items are actually being compared and set FOLDED_ARG[01]
3327 to those values and CODE to the actual comparison code. If any are
3328 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3329 do anything if both operands are already known to be constant. */
3331 /* ??? Vector mode comparisons are not supported yet. */
3332 if (VECTOR_MODE_P (mode
))
3335 if (const_arg0
== 0 || const_arg1
== 0)
3337 struct table_elt
*p0
, *p1
;
3338 rtx true_rtx
, false_rtx
;
3339 machine_mode mode_arg1
;
3341 if (SCALAR_FLOAT_MODE_P (mode
))
3343 #ifdef FLOAT_STORE_FLAG_VALUE
3344 true_rtx
= (CONST_DOUBLE_FROM_REAL_VALUE
3345 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3347 true_rtx
= NULL_RTX
;
3349 false_rtx
= CONST0_RTX (mode
);
3353 true_rtx
= const_true_rtx
;
3354 false_rtx
= const0_rtx
;
3357 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3358 &mode_arg0
, &mode_arg1
);
3360 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3361 what kinds of things are being compared, so we can't do
3362 anything with this comparison. */
3364 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3367 const_arg0
= equiv_constant (folded_arg0
);
3368 const_arg1
= equiv_constant (folded_arg1
);
3370 /* If we do not now have two constants being compared, see
3371 if we can nevertheless deduce some things about the
3373 if (const_arg0
== 0 || const_arg1
== 0)
3375 if (const_arg1
!= NULL
)
3377 rtx cheapest_simplification
;
3380 struct table_elt
*p
;
3382 /* See if we can find an equivalent of folded_arg0
3383 that gets us a cheaper expression, possibly a
3384 constant through simplifications. */
3385 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3390 cheapest_simplification
= x
;
3391 cheapest_cost
= COST (x
);
3393 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3397 /* If the entry isn't valid, skip it. */
3398 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3401 /* Try to simplify using this equivalence. */
3403 = simplify_relational_operation (code
, mode
,
3408 if (simp_result
== NULL
)
3411 cost
= COST (simp_result
);
3412 if (cost
< cheapest_cost
)
3414 cheapest_cost
= cost
;
3415 cheapest_simplification
= simp_result
;
3419 /* If we have a cheaper expression now, use that
3420 and try folding it further, from the top. */
3421 if (cheapest_simplification
!= x
)
3422 return fold_rtx (copy_rtx (cheapest_simplification
),
3427 /* See if the two operands are the same. */
3429 if ((REG_P (folded_arg0
)
3430 && REG_P (folded_arg1
)
3431 && (REG_QTY (REGNO (folded_arg0
))
3432 == REG_QTY (REGNO (folded_arg1
))))
3433 || ((p0
= lookup (folded_arg0
,
3434 SAFE_HASH (folded_arg0
, mode_arg0
),
3436 && (p1
= lookup (folded_arg1
,
3437 SAFE_HASH (folded_arg1
, mode_arg0
),
3439 && p0
->first_same_value
== p1
->first_same_value
))
3440 folded_arg1
= folded_arg0
;
3442 /* If FOLDED_ARG0 is a register, see if the comparison we are
3443 doing now is either the same as we did before or the reverse
3444 (we only check the reverse if not floating-point). */
3445 else if (REG_P (folded_arg0
))
3447 int qty
= REG_QTY (REGNO (folded_arg0
));
3449 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3451 struct qty_table_elem
*ent
= &qty_table
[qty
];
3453 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3454 || (! FLOAT_MODE_P (mode_arg0
)
3455 && comparison_dominates_p (ent
->comparison_code
,
3456 reverse_condition (code
))))
3457 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3459 && rtx_equal_p (ent
->comparison_const
,
3461 || (REG_P (folded_arg1
)
3462 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3464 if (comparison_dominates_p (ent
->comparison_code
, code
))
3479 /* If we are comparing against zero, see if the first operand is
3480 equivalent to an IOR with a constant. If so, we may be able to
3481 determine the result of this comparison. */
3482 if (const_arg1
== const0_rtx
&& !const_arg0
)
3484 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3488 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3489 && CONST_INT_P (inner_const
)
3490 && INTVAL (inner_const
) != 0)
3491 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3495 rtx op0
= const_arg0
? const_arg0
: copy_rtx (folded_arg0
);
3496 rtx op1
= const_arg1
? const_arg1
: copy_rtx (folded_arg1
);
3497 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
,
3503 case RTX_COMM_ARITH
:
3507 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3508 with that LABEL_REF as its second operand. If so, the result is
3509 the first operand of that MINUS. This handles switches with an
3510 ADDR_DIFF_VEC table. */
3511 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3514 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3515 : lookup_as_function (folded_arg0
, MINUS
);
3517 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3518 && LABEL_REF_LABEL (XEXP (y
, 1)) == LABEL_REF_LABEL (const_arg1
))
3521 /* Now try for a CONST of a MINUS like the above. */
3522 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3523 : lookup_as_function (folded_arg0
, CONST
))) != 0
3524 && GET_CODE (XEXP (y
, 0)) == MINUS
3525 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3526 && LABEL_REF_LABEL (XEXP (XEXP (y
, 0), 1)) == LABEL_REF_LABEL (const_arg1
))
3527 return XEXP (XEXP (y
, 0), 0);
3530 /* Likewise if the operands are in the other order. */
3531 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3534 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3535 : lookup_as_function (folded_arg1
, MINUS
);
3537 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3538 && LABEL_REF_LABEL (XEXP (y
, 1)) == LABEL_REF_LABEL (const_arg0
))
3541 /* Now try for a CONST of a MINUS like the above. */
3542 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3543 : lookup_as_function (folded_arg1
, CONST
))) != 0
3544 && GET_CODE (XEXP (y
, 0)) == MINUS
3545 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3546 && LABEL_REF_LABEL (XEXP (XEXP (y
, 0), 1)) == LABEL_REF_LABEL (const_arg0
))
3547 return XEXP (XEXP (y
, 0), 0);
3550 /* If second operand is a register equivalent to a negative
3551 CONST_INT, see if we can find a register equivalent to the
3552 positive constant. Make a MINUS if so. Don't do this for
3553 a non-negative constant since we might then alternate between
3554 choosing positive and negative constants. Having the positive
3555 constant previously-used is the more common case. Be sure
3556 the resulting constant is non-negative; if const_arg1 were
3557 the smallest negative number this would overflow: depending
3558 on the mode, this would either just be the same value (and
3559 hence not save anything) or be incorrect. */
3560 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
)
3561 && INTVAL (const_arg1
) < 0
3562 /* This used to test
3564 -INTVAL (const_arg1) >= 0
3566 But The Sun V5.0 compilers mis-compiled that test. So
3567 instead we test for the problematic value in a more direct
3568 manner and hope the Sun compilers get it correct. */
3569 && INTVAL (const_arg1
) !=
3570 ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1))
3571 && REG_P (folded_arg1
))
3573 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3575 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3578 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3580 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3581 canon_reg (p
->exp
, NULL
));
3586 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3587 If so, produce (PLUS Z C2-C). */
3588 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
))
3590 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3591 if (y
&& CONST_INT_P (XEXP (y
, 1)))
3592 return fold_rtx (plus_constant (mode
, copy_rtx (y
),
3593 -INTVAL (const_arg1
)),
3600 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3601 case IOR
: case AND
: case XOR
:
3603 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3604 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3605 is known to be of similar form, we may be able to replace the
3606 operation with a combined operation. This may eliminate the
3607 intermediate operation if every use is simplified in this way.
3608 Note that the similar optimization done by combine.c only works
3609 if the intermediate operation's result has only one reference. */
3611 if (REG_P (folded_arg0
)
3612 && const_arg1
&& CONST_INT_P (const_arg1
))
3615 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3616 rtx y
, inner_const
, new_const
;
3617 rtx canon_const_arg1
= const_arg1
;
3618 enum rtx_code associate_code
;
3621 && (INTVAL (const_arg1
) >= GET_MODE_PRECISION (mode
)
3622 || INTVAL (const_arg1
) < 0))
3624 if (SHIFT_COUNT_TRUNCATED
)
3625 canon_const_arg1
= GEN_INT (INTVAL (const_arg1
)
3626 & (GET_MODE_BITSIZE (mode
)
3632 y
= lookup_as_function (folded_arg0
, code
);
3636 /* If we have compiled a statement like
3637 "if (x == (x & mask1))", and now are looking at
3638 "x & mask2", we will have a case where the first operand
3639 of Y is the same as our first operand. Unless we detect
3640 this case, an infinite loop will result. */
3641 if (XEXP (y
, 0) == folded_arg0
)
3644 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3645 if (!inner_const
|| !CONST_INT_P (inner_const
))
3648 /* Don't associate these operations if they are a PLUS with the
3649 same constant and it is a power of two. These might be doable
3650 with a pre- or post-increment. Similarly for two subtracts of
3651 identical powers of two with post decrement. */
3653 if (code
== PLUS
&& const_arg1
== inner_const
3654 && ((HAVE_PRE_INCREMENT
3655 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3656 || (HAVE_POST_INCREMENT
3657 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3658 || (HAVE_PRE_DECREMENT
3659 && exact_log2 (- INTVAL (const_arg1
)) >= 0)
3660 || (HAVE_POST_DECREMENT
3661 && exact_log2 (- INTVAL (const_arg1
)) >= 0)))
3664 /* ??? Vector mode shifts by scalar
3665 shift operand are not supported yet. */
3666 if (is_shift
&& VECTOR_MODE_P (mode
))
3670 && (INTVAL (inner_const
) >= GET_MODE_PRECISION (mode
)
3671 || INTVAL (inner_const
) < 0))
3673 if (SHIFT_COUNT_TRUNCATED
)
3674 inner_const
= GEN_INT (INTVAL (inner_const
)
3675 & (GET_MODE_BITSIZE (mode
) - 1));
3680 /* Compute the code used to compose the constants. For example,
3681 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3683 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3685 new_const
= simplify_binary_operation (associate_code
, mode
,
3692 /* If we are associating shift operations, don't let this
3693 produce a shift of the size of the object or larger.
3694 This could occur when we follow a sign-extend by a right
3695 shift on a machine that does a sign-extend as a pair
3699 && CONST_INT_P (new_const
)
3700 && INTVAL (new_const
) >= GET_MODE_PRECISION (mode
))
3702 /* As an exception, we can turn an ASHIFTRT of this
3703 form into a shift of the number of bits - 1. */
3704 if (code
== ASHIFTRT
)
3705 new_const
= GEN_INT (GET_MODE_BITSIZE (mode
) - 1);
3706 else if (!side_effects_p (XEXP (y
, 0)))
3707 return CONST0_RTX (mode
);
3712 y
= copy_rtx (XEXP (y
, 0));
3714 /* If Y contains our first operand (the most common way this
3715 can happen is if Y is a MEM), we would do into an infinite
3716 loop if we tried to fold it. So don't in that case. */
3718 if (! reg_mentioned_p (folded_arg0
, y
))
3719 y
= fold_rtx (y
, insn
);
3721 return simplify_gen_binary (code
, mode
, y
, new_const
);
3725 case DIV
: case UDIV
:
3726 /* ??? The associative optimization performed immediately above is
3727 also possible for DIV and UDIV using associate_code of MULT.
3728 However, we would need extra code to verify that the
3729 multiplication does not overflow, that is, there is no overflow
3730 in the calculation of new_const. */
3737 new_rtx
= simplify_binary_operation (code
, mode
,
3738 const_arg0
? const_arg0
: folded_arg0
,
3739 const_arg1
? const_arg1
: folded_arg1
);
3743 /* (lo_sum (high X) X) is simply X. */
3744 if (code
== LO_SUM
&& const_arg0
!= 0
3745 && GET_CODE (const_arg0
) == HIGH
3746 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3751 case RTX_BITFIELD_OPS
:
3752 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3753 const_arg0
? const_arg0
: folded_arg0
,
3754 const_arg1
? const_arg1
: folded_arg1
,
3755 const_arg2
? const_arg2
: XEXP (x
, 2));
3762 return new_rtx
? new_rtx
: x
;
3765 /* Return a constant value currently equivalent to X.
3766 Return 0 if we don't know one. */
3769 equiv_constant (rtx x
)
3772 && REGNO_QTY_VALID_P (REGNO (x
)))
3774 int x_q
= REG_QTY (REGNO (x
));
3775 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3777 if (x_ent
->const_rtx
)
3778 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3781 if (x
== 0 || CONSTANT_P (x
))
3784 if (GET_CODE (x
) == SUBREG
)
3786 machine_mode mode
= GET_MODE (x
);
3787 machine_mode imode
= GET_MODE (SUBREG_REG (x
));
3790 /* See if we previously assigned a constant value to this SUBREG. */
3791 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3792 || (new_rtx
= lookup_as_function (x
, CONST_WIDE_INT
)) != 0
3793 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3794 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3797 /* If we didn't and if doing so makes sense, see if we previously
3798 assigned a constant value to the enclosing word mode SUBREG. */
3799 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
)
3800 && GET_MODE_SIZE (word_mode
) < GET_MODE_SIZE (imode
))
3802 int byte
= SUBREG_BYTE (x
) - subreg_lowpart_offset (mode
, word_mode
);
3803 if (byte
>= 0 && (byte
% UNITS_PER_WORD
) == 0)
3805 rtx y
= gen_rtx_SUBREG (word_mode
, SUBREG_REG (x
), byte
);
3806 new_rtx
= lookup_as_function (y
, CONST_INT
);
3808 return gen_lowpart (mode
, new_rtx
);
3812 /* Otherwise see if we already have a constant for the inner REG,
3813 and if that is enough to calculate an equivalent constant for
3814 the subreg. Note that the upper bits of paradoxical subregs
3815 are undefined, so they cannot be said to equal anything. */
3816 if (REG_P (SUBREG_REG (x
))
3817 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (imode
)
3818 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3819 return simplify_subreg (mode
, new_rtx
, imode
, SUBREG_BYTE (x
));
3824 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3825 the hash table in case its value was seen before. */
3829 struct table_elt
*elt
;
3831 x
= avoid_constant_pool_reference (x
);
3835 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3839 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3840 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3847 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3850 In certain cases, this can cause us to add an equivalence. For example,
3851 if we are following the taken case of
3853 we can add the fact that `i' and '2' are now equivalent.
3855 In any case, we can record that this comparison was passed. If the same
3856 comparison is seen later, we will know its value. */
3859 record_jump_equiv (rtx_insn
*insn
, bool taken
)
3861 int cond_known_true
;
3864 machine_mode mode
, mode0
, mode1
;
3865 int reversed_nonequality
= 0;
3868 /* Ensure this is the right kind of insn. */
3869 gcc_assert (any_condjump_p (insn
));
3871 set
= pc_set (insn
);
3873 /* See if this jump condition is known true or false. */
3875 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3877 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3879 /* Get the type of comparison being done and the operands being compared.
3880 If we had to reverse a non-equality condition, record that fact so we
3881 know that it isn't valid for floating-point. */
3882 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3883 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3884 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3886 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3887 if (! cond_known_true
)
3889 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3891 /* Don't remember if we can't find the inverse. */
3892 if (code
== UNKNOWN
)
3896 /* The mode is the mode of the non-constant. */
3898 if (mode1
!= VOIDmode
)
3901 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3904 /* Yet another form of subreg creation. In this case, we want something in
3905 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3908 record_jump_cond_subreg (machine_mode mode
, rtx op
)
3910 machine_mode op_mode
= GET_MODE (op
);
3911 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3913 return lowpart_subreg (mode
, op
, op_mode
);
3916 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3917 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3918 Make any useful entries we can with that information. Called from
3919 above function and called recursively. */
3922 record_jump_cond (enum rtx_code code
, machine_mode mode
, rtx op0
,
3923 rtx op1
, int reversed_nonequality
)
3925 unsigned op0_hash
, op1_hash
;
3926 int op0_in_memory
, op1_in_memory
;
3927 struct table_elt
*op0_elt
, *op1_elt
;
3929 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3930 we know that they are also equal in the smaller mode (this is also
3931 true for all smaller modes whether or not there is a SUBREG, but
3932 is not worth testing for with no SUBREG). */
3934 /* Note that GET_MODE (op0) may not equal MODE. */
3935 if (code
== EQ
&& paradoxical_subreg_p (op0
))
3937 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3938 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3940 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3941 reversed_nonequality
);
3944 if (code
== EQ
&& paradoxical_subreg_p (op1
))
3946 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3947 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3949 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3950 reversed_nonequality
);
3953 /* Similarly, if this is an NE comparison, and either is a SUBREG
3954 making a smaller mode, we know the whole thing is also NE. */
3956 /* Note that GET_MODE (op0) may not equal MODE;
3957 if we test MODE instead, we can get an infinite recursion
3958 alternating between two modes each wider than MODE. */
3960 if (code
== NE
&& GET_CODE (op0
) == SUBREG
3961 && subreg_lowpart_p (op0
)
3962 && (GET_MODE_SIZE (GET_MODE (op0
))
3963 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
3965 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3966 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3968 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3969 reversed_nonequality
);
3972 if (code
== NE
&& GET_CODE (op1
) == SUBREG
3973 && subreg_lowpart_p (op1
)
3974 && (GET_MODE_SIZE (GET_MODE (op1
))
3975 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
3977 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3978 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3980 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3981 reversed_nonequality
);
3984 /* Hash both operands. */
3987 hash_arg_in_memory
= 0;
3988 op0_hash
= HASH (op0
, mode
);
3989 op0_in_memory
= hash_arg_in_memory
;
3995 hash_arg_in_memory
= 0;
3996 op1_hash
= HASH (op1
, mode
);
3997 op1_in_memory
= hash_arg_in_memory
;
4002 /* Look up both operands. */
4003 op0_elt
= lookup (op0
, op0_hash
, mode
);
4004 op1_elt
= lookup (op1
, op1_hash
, mode
);
4006 /* If both operands are already equivalent or if they are not in the
4007 table but are identical, do nothing. */
4008 if ((op0_elt
!= 0 && op1_elt
!= 0
4009 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
4010 || op0
== op1
|| rtx_equal_p (op0
, op1
))
4013 /* If we aren't setting two things equal all we can do is save this
4014 comparison. Similarly if this is floating-point. In the latter
4015 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4016 If we record the equality, we might inadvertently delete code
4017 whose intent was to change -0 to +0. */
4019 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
4021 struct qty_table_elem
*ent
;
4024 /* If we reversed a floating-point comparison, if OP0 is not a
4025 register, or if OP1 is neither a register or constant, we can't
4029 op1
= equiv_constant (op1
);
4031 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
4032 || !REG_P (op0
) || op1
== 0)
4035 /* Put OP0 in the hash table if it isn't already. This gives it a
4036 new quantity number. */
4039 if (insert_regs (op0
, NULL
, 0))
4041 rehash_using_reg (op0
);
4042 op0_hash
= HASH (op0
, mode
);
4044 /* If OP0 is contained in OP1, this changes its hash code
4045 as well. Faster to rehash than to check, except
4046 for the simple case of a constant. */
4047 if (! CONSTANT_P (op1
))
4048 op1_hash
= HASH (op1
,mode
);
4051 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4052 op0_elt
->in_memory
= op0_in_memory
;
4055 qty
= REG_QTY (REGNO (op0
));
4056 ent
= &qty_table
[qty
];
4058 ent
->comparison_code
= code
;
4061 /* Look it up again--in case op0 and op1 are the same. */
4062 op1_elt
= lookup (op1
, op1_hash
, mode
);
4064 /* Put OP1 in the hash table so it gets a new quantity number. */
4067 if (insert_regs (op1
, NULL
, 0))
4069 rehash_using_reg (op1
);
4070 op1_hash
= HASH (op1
, mode
);
4073 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4074 op1_elt
->in_memory
= op1_in_memory
;
4077 ent
->comparison_const
= NULL_RTX
;
4078 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
4082 ent
->comparison_const
= op1
;
4083 ent
->comparison_qty
= -1;
4089 /* If either side is still missing an equivalence, make it now,
4090 then merge the equivalences. */
4094 if (insert_regs (op0
, NULL
, 0))
4096 rehash_using_reg (op0
);
4097 op0_hash
= HASH (op0
, mode
);
4100 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4101 op0_elt
->in_memory
= op0_in_memory
;
4106 if (insert_regs (op1
, NULL
, 0))
4108 rehash_using_reg (op1
);
4109 op1_hash
= HASH (op1
, mode
);
4112 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4113 op1_elt
->in_memory
= op1_in_memory
;
4116 merge_equiv_classes (op0_elt
, op1_elt
);
4119 /* CSE processing for one instruction.
4121 Most "true" common subexpressions are mostly optimized away in GIMPLE,
4122 but the few that "leak through" are cleaned up by cse_insn, and complex
4123 addressing modes are often formed here.
4125 The main function is cse_insn, and between here and that function
4126 a couple of helper functions is defined to keep the size of cse_insn
4127 within reasonable proportions.
4129 Data is shared between the main and helper functions via STRUCT SET,
4130 that contains all data related for every set in the instruction that
4133 Note that cse_main processes all sets in the instruction. Most
4134 passes in GCC only process simple SET insns or single_set insns, but
4135 CSE processes insns with multiple sets as well. */
4137 /* Data on one SET contained in the instruction. */
4141 /* The SET rtx itself. */
4143 /* The SET_SRC of the rtx (the original value, if it is changing). */
4145 /* The hash-table element for the SET_SRC of the SET. */
4146 struct table_elt
*src_elt
;
4147 /* Hash value for the SET_SRC. */
4149 /* Hash value for the SET_DEST. */
4151 /* The SET_DEST, with SUBREG, etc., stripped. */
4153 /* Nonzero if the SET_SRC is in memory. */
4155 /* Nonzero if the SET_SRC contains something
4156 whose value cannot be predicted and understood. */
4158 /* Original machine mode, in case it becomes a CONST_INT.
4159 The size of this field should match the size of the mode
4160 field of struct rtx_def (see rtl.h). */
4161 ENUM_BITFIELD(machine_mode
) mode
: 8;
4162 /* A constant equivalent for SET_SRC, if any. */
4164 /* Hash value of constant equivalent for SET_SRC. */
4165 unsigned src_const_hash
;
4166 /* Table entry for constant equivalent for SET_SRC, if any. */
4167 struct table_elt
*src_const_elt
;
4168 /* Table entry for the destination address. */
4169 struct table_elt
*dest_addr_elt
;
4172 /* Special handling for (set REG0 REG1) where REG0 is the
4173 "cheapest", cheaper than REG1. After cse, REG1 will probably not
4174 be used in the sequel, so (if easily done) change this insn to
4175 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4176 that computed their value. Then REG1 will become a dead store
4177 and won't cloud the situation for later optimizations.
4179 Do not make this change if REG1 is a hard register, because it will
4180 then be used in the sequel and we may be changing a two-operand insn
4181 into a three-operand insn.
4183 This is the last transformation that cse_insn will try to do. */
4186 try_back_substitute_reg (rtx set
, rtx_insn
*insn
)
4188 rtx dest
= SET_DEST (set
);
4189 rtx src
= SET_SRC (set
);
4192 && REG_P (src
) && ! HARD_REGISTER_P (src
)
4193 && REGNO_QTY_VALID_P (REGNO (src
)))
4195 int src_q
= REG_QTY (REGNO (src
));
4196 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
4198 if (src_ent
->first_reg
== REGNO (dest
))
4200 /* Scan for the previous nonnote insn, but stop at a basic
4202 rtx_insn
*prev
= insn
;
4203 rtx_insn
*bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
4206 prev
= PREV_INSN (prev
);
4208 while (prev
!= bb_head
&& (NOTE_P (prev
) || DEBUG_INSN_P (prev
)));
4210 /* Do not swap the registers around if the previous instruction
4211 attaches a REG_EQUIV note to REG1.
4213 ??? It's not entirely clear whether we can transfer a REG_EQUIV
4214 from the pseudo that originally shadowed an incoming argument
4215 to another register. Some uses of REG_EQUIV might rely on it
4216 being attached to REG1 rather than REG2.
4218 This section previously turned the REG_EQUIV into a REG_EQUAL
4219 note. We cannot do that because REG_EQUIV may provide an
4220 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4221 if (NONJUMP_INSN_P (prev
)
4222 && GET_CODE (PATTERN (prev
)) == SET
4223 && SET_DEST (PATTERN (prev
)) == src
4224 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
4228 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
4229 validate_change (insn
, &SET_DEST (set
), src
, 1);
4230 validate_change (insn
, &SET_SRC (set
), dest
, 1);
4231 apply_change_group ();
4233 /* If INSN has a REG_EQUAL note, and this note mentions
4234 REG0, then we must delete it, because the value in
4235 REG0 has changed. If the note's value is REG1, we must
4236 also delete it because that is now this insn's dest. */
4237 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
4239 && (reg_mentioned_p (dest
, XEXP (note
, 0))
4240 || rtx_equal_p (src
, XEXP (note
, 0))))
4241 remove_note (insn
, note
);
4247 /* Record all the SETs in this instruction into SETS_PTR,
4248 and return the number of recorded sets. */
4250 find_sets_in_insn (rtx_insn
*insn
, struct set
**psets
)
4252 struct set
*sets
= *psets
;
4254 rtx x
= PATTERN (insn
);
4256 if (GET_CODE (x
) == SET
)
4258 /* Ignore SETs that are unconditional jumps.
4259 They never need cse processing, so this does not hurt.
4260 The reason is not efficiency but rather
4261 so that we can test at the end for instructions
4262 that have been simplified to unconditional jumps
4263 and not be misled by unchanged instructions
4264 that were unconditional jumps to begin with. */
4265 if (SET_DEST (x
) == pc_rtx
4266 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4268 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4269 The hard function value register is used only once, to copy to
4270 someplace else, so it isn't worth cse'ing. */
4271 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4274 sets
[n_sets
++].rtl
= x
;
4276 else if (GET_CODE (x
) == PARALLEL
)
4278 int i
, lim
= XVECLEN (x
, 0);
4280 /* Go over the expressions of the PARALLEL in forward order, to
4281 put them in the same order in the SETS array. */
4282 for (i
= 0; i
< lim
; i
++)
4284 rtx y
= XVECEXP (x
, 0, i
);
4285 if (GET_CODE (y
) == SET
)
4287 /* As above, we ignore unconditional jumps and call-insns and
4288 ignore the result of apply_change_group. */
4289 if (SET_DEST (y
) == pc_rtx
4290 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4292 else if (GET_CODE (SET_SRC (y
)) == CALL
)
4295 sets
[n_sets
++].rtl
= y
;
4303 /* Where possible, substitute every register reference in the N_SETS
4304 number of SETS in INSN with the the canonical register.
4306 Register canonicalization propagatest the earliest register (i.e.
4307 one that is set before INSN) with the same value. This is a very
4308 useful, simple form of CSE, to clean up warts from expanding GIMPLE
4309 to RTL. For instance, a CONST for an address is usually expanded
4310 multiple times to loads into different registers, thus creating many
4311 subexpressions of the form:
4313 (set (reg1) (some_const))
4314 (set (mem (... reg1 ...) (thing)))
4315 (set (reg2) (some_const))
4316 (set (mem (... reg2 ...) (thing)))
4318 After canonicalizing, the code takes the following form:
4320 (set (reg1) (some_const))
4321 (set (mem (... reg1 ...) (thing)))
4322 (set (reg2) (some_const))
4323 (set (mem (... reg1 ...) (thing)))
4325 The set to reg2 is now trivially dead, and the memory reference (or
4326 address, or whatever) may be a candidate for further CSEing.
4328 In this function, the result of apply_change_group can be ignored;
4332 canonicalize_insn (rtx_insn
*insn
, struct set
**psets
, int n_sets
)
4334 struct set
*sets
= *psets
;
4336 rtx x
= PATTERN (insn
);
4341 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4342 if (GET_CODE (XEXP (tem
, 0)) != SET
)
4343 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4346 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
4348 canon_reg (SET_SRC (x
), insn
);
4349 apply_change_group ();
4350 fold_rtx (SET_SRC (x
), insn
);
4352 else if (GET_CODE (x
) == CLOBBER
)
4354 /* If we clobber memory, canon the address.
4355 This does nothing when a register is clobbered
4356 because we have already invalidated the reg. */
4357 if (MEM_P (XEXP (x
, 0)))
4358 canon_reg (XEXP (x
, 0), insn
);
4360 else if (GET_CODE (x
) == USE
4361 && ! (REG_P (XEXP (x
, 0))
4362 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4363 /* Canonicalize a USE of a pseudo register or memory location. */
4364 canon_reg (x
, insn
);
4365 else if (GET_CODE (x
) == ASM_OPERANDS
)
4367 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
4369 rtx input
= ASM_OPERANDS_INPUT (x
, i
);
4370 if (!(REG_P (input
) && REGNO (input
) < FIRST_PSEUDO_REGISTER
))
4372 input
= canon_reg (input
, insn
);
4373 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
), input
, 1);
4377 else if (GET_CODE (x
) == CALL
)
4379 canon_reg (x
, insn
);
4380 apply_change_group ();
4383 else if (DEBUG_INSN_P (insn
))
4384 canon_reg (PATTERN (insn
), insn
);
4385 else if (GET_CODE (x
) == PARALLEL
)
4387 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4389 rtx y
= XVECEXP (x
, 0, i
);
4390 if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
4392 canon_reg (SET_SRC (y
), insn
);
4393 apply_change_group ();
4394 fold_rtx (SET_SRC (y
), insn
);
4396 else if (GET_CODE (y
) == CLOBBER
)
4398 if (MEM_P (XEXP (y
, 0)))
4399 canon_reg (XEXP (y
, 0), insn
);
4401 else if (GET_CODE (y
) == USE
4402 && ! (REG_P (XEXP (y
, 0))
4403 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4404 canon_reg (y
, insn
);
4405 else if (GET_CODE (y
) == CALL
)
4407 canon_reg (y
, insn
);
4408 apply_change_group ();
4414 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4415 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4417 /* We potentially will process this insn many times. Therefore,
4418 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4421 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4422 because cse_insn handles those specially. */
4423 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != STRICT_LOW_PART
4424 && rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
4425 remove_note (insn
, tem
);
4428 canon_reg (XEXP (tem
, 0), insn
);
4429 apply_change_group ();
4430 XEXP (tem
, 0) = fold_rtx (XEXP (tem
, 0), insn
);
4431 df_notes_rescan (insn
);
4435 /* Canonicalize sources and addresses of destinations.
4436 We do this in a separate pass to avoid problems when a MATCH_DUP is
4437 present in the insn pattern. In that case, we want to ensure that
4438 we don't break the duplicate nature of the pattern. So we will replace
4439 both operands at the same time. Otherwise, we would fail to find an
4440 equivalent substitution in the loop calling validate_change below.
4442 We used to suppress canonicalization of DEST if it appears in SRC,
4443 but we don't do this any more. */
4445 for (i
= 0; i
< n_sets
; i
++)
4447 rtx dest
= SET_DEST (sets
[i
].rtl
);
4448 rtx src
= SET_SRC (sets
[i
].rtl
);
4449 rtx new_rtx
= canon_reg (src
, insn
);
4451 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4453 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4455 validate_change (insn
, &XEXP (dest
, 1),
4456 canon_reg (XEXP (dest
, 1), insn
), 1);
4457 validate_change (insn
, &XEXP (dest
, 2),
4458 canon_reg (XEXP (dest
, 2), insn
), 1);
4461 while (GET_CODE (dest
) == SUBREG
4462 || GET_CODE (dest
) == ZERO_EXTRACT
4463 || GET_CODE (dest
) == STRICT_LOW_PART
)
4464 dest
= XEXP (dest
, 0);
4467 canon_reg (dest
, insn
);
4470 /* Now that we have done all the replacements, we can apply the change
4471 group and see if they all work. Note that this will cause some
4472 canonicalizations that would have worked individually not to be applied
4473 because some other canonicalization didn't work, but this should not
4476 The result of apply_change_group can be ignored; see canon_reg. */
4478 apply_change_group ();
4481 /* Main function of CSE.
4482 First simplify sources and addresses of all assignments
4483 in the instruction, using previously-computed equivalents values.
4484 Then install the new sources and destinations in the table
4485 of available values. */
4488 cse_insn (rtx_insn
*insn
)
4490 rtx x
= PATTERN (insn
);
4496 struct table_elt
*src_eqv_elt
= 0;
4497 int src_eqv_volatile
= 0;
4498 int src_eqv_in_memory
= 0;
4499 unsigned src_eqv_hash
= 0;
4501 struct set
*sets
= (struct set
*) 0;
4503 if (GET_CODE (x
) == SET
)
4504 sets
= XALLOCA (struct set
);
4505 else if (GET_CODE (x
) == PARALLEL
)
4506 sets
= XALLOCAVEC (struct set
, XVECLEN (x
, 0));
4510 /* Records what this insn does to set CC0. */
4512 this_insn_cc0_mode
= VOIDmode
;
4515 /* Find all regs explicitly clobbered in this insn,
4516 to ensure they are not replaced with any other regs
4517 elsewhere in this insn. */
4518 invalidate_from_sets_and_clobbers (insn
);
4520 /* Record all the SETs in this instruction. */
4521 n_sets
= find_sets_in_insn (insn
, &sets
);
4523 /* Substitute the canonical register where possible. */
4524 canonicalize_insn (insn
, &sets
, n_sets
);
4526 /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4527 if different, or if the DEST is a STRICT_LOW_PART. The latter condition
4528 is necessary because SRC_EQV is handled specially for this case, and if
4529 it isn't set, then there will be no equivalence for the destination. */
4530 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4531 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0
4532 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4533 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4534 src_eqv
= copy_rtx (XEXP (tem
, 0));
4536 /* Set sets[i].src_elt to the class each source belongs to.
4537 Detect assignments from or to volatile things
4538 and set set[i] to zero so they will be ignored
4539 in the rest of this function.
4541 Nothing in this loop changes the hash table or the register chains. */
4543 for (i
= 0; i
< n_sets
; i
++)
4545 bool repeat
= false;
4548 struct table_elt
*elt
= 0, *p
;
4552 rtx src_related
= 0;
4553 bool src_related_is_const_anchor
= false;
4554 struct table_elt
*src_const_elt
= 0;
4555 int src_cost
= MAX_COST
;
4556 int src_eqv_cost
= MAX_COST
;
4557 int src_folded_cost
= MAX_COST
;
4558 int src_related_cost
= MAX_COST
;
4559 int src_elt_cost
= MAX_COST
;
4560 int src_regcost
= MAX_COST
;
4561 int src_eqv_regcost
= MAX_COST
;
4562 int src_folded_regcost
= MAX_COST
;
4563 int src_related_regcost
= MAX_COST
;
4564 int src_elt_regcost
= MAX_COST
;
4565 /* Set nonzero if we need to call force_const_mem on with the
4566 contents of src_folded before using it. */
4567 int src_folded_force_flag
= 0;
4569 dest
= SET_DEST (sets
[i
].rtl
);
4570 src
= SET_SRC (sets
[i
].rtl
);
4572 /* If SRC is a constant that has no machine mode,
4573 hash it with the destination's machine mode.
4574 This way we can keep different modes separate. */
4576 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4577 sets
[i
].mode
= mode
;
4581 machine_mode eqvmode
= mode
;
4582 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4583 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4585 hash_arg_in_memory
= 0;
4586 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4588 /* Find the equivalence class for the equivalent expression. */
4591 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4593 src_eqv_volatile
= do_not_record
;
4594 src_eqv_in_memory
= hash_arg_in_memory
;
4597 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4598 value of the INNER register, not the destination. So it is not
4599 a valid substitution for the source. But save it for later. */
4600 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4603 src_eqv_here
= src_eqv
;
4605 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4606 simplified result, which may not necessarily be valid. */
4607 src_folded
= fold_rtx (src
, insn
);
4610 /* ??? This caused bad code to be generated for the m68k port with -O2.
4611 Suppose src is (CONST_INT -1), and that after truncation src_folded
4612 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4613 At the end we will add src and src_const to the same equivalence
4614 class. We now have 3 and -1 on the same equivalence class. This
4615 causes later instructions to be mis-optimized. */
4616 /* If storing a constant in a bitfield, pre-truncate the constant
4617 so we will be able to record it later. */
4618 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4620 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4622 if (CONST_INT_P (src
)
4623 && CONST_INT_P (width
)
4624 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4625 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4627 = GEN_INT (INTVAL (src
) & (((HOST_WIDE_INT
) 1
4628 << INTVAL (width
)) - 1));
4632 /* Compute SRC's hash code, and also notice if it
4633 should not be recorded at all. In that case,
4634 prevent any further processing of this assignment. */
4636 hash_arg_in_memory
= 0;
4639 sets
[i
].src_hash
= HASH (src
, mode
);
4640 sets
[i
].src_volatile
= do_not_record
;
4641 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4643 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4644 a pseudo, do not record SRC. Using SRC as a replacement for
4645 anything else will be incorrect in that situation. Note that
4646 this usually occurs only for stack slots, in which case all the
4647 RTL would be referring to SRC, so we don't lose any optimization
4648 opportunities by not having SRC in the hash table. */
4651 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4653 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4654 sets
[i
].src_volatile
= 1;
4656 else if (GET_CODE (src
) == ASM_OPERANDS
4657 && GET_CODE (x
) == PARALLEL
)
4659 /* Do not record result of a non-volatile inline asm with
4660 more than one result. */
4662 sets
[i
].src_volatile
= 1;
4664 int j
, lim
= XVECLEN (x
, 0);
4665 for (j
= 0; j
< lim
; j
++)
4667 rtx y
= XVECEXP (x
, 0, j
);
4668 /* And do not record result of a non-volatile inline asm
4669 with "memory" clobber. */
4670 if (GET_CODE (y
) == CLOBBER
&& MEM_P (XEXP (y
, 0)))
4672 sets
[i
].src_volatile
= 1;
4679 /* It is no longer clear why we used to do this, but it doesn't
4680 appear to still be needed. So let's try without it since this
4681 code hurts cse'ing widened ops. */
4682 /* If source is a paradoxical subreg (such as QI treated as an SI),
4683 treat it as volatile. It may do the work of an SI in one context
4684 where the extra bits are not being used, but cannot replace an SI
4686 if (paradoxical_subreg_p (src
))
4687 sets
[i
].src_volatile
= 1;
4690 /* Locate all possible equivalent forms for SRC. Try to replace
4691 SRC in the insn with each cheaper equivalent.
4693 We have the following types of equivalents: SRC itself, a folded
4694 version, a value given in a REG_EQUAL note, or a value related
4697 Each of these equivalents may be part of an additional class
4698 of equivalents (if more than one is in the table, they must be in
4699 the same class; we check for this).
4701 If the source is volatile, we don't do any table lookups.
4703 We note any constant equivalent for possible later use in a
4706 if (!sets
[i
].src_volatile
)
4707 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4709 sets
[i
].src_elt
= elt
;
4711 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4713 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4715 /* The REG_EQUAL is indicating that two formerly distinct
4716 classes are now equivalent. So merge them. */
4717 merge_equiv_classes (elt
, src_eqv_elt
);
4718 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4719 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4725 else if (src_eqv_elt
)
4728 /* Try to find a constant somewhere and record it in `src_const'.
4729 Record its table element, if any, in `src_const_elt'. Look in
4730 any known equivalences first. (If the constant is not in the
4731 table, also set `sets[i].src_const_hash'). */
4733 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4737 src_const_elt
= elt
;
4742 && (CONSTANT_P (src_folded
)
4743 /* Consider (minus (label_ref L1) (label_ref L2)) as
4744 "constant" here so we will record it. This allows us
4745 to fold switch statements when an ADDR_DIFF_VEC is used. */
4746 || (GET_CODE (src_folded
) == MINUS
4747 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4748 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4749 src_const
= src_folded
, src_const_elt
= elt
;
4750 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4751 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4753 /* If we don't know if the constant is in the table, get its
4754 hash code and look it up. */
4755 if (src_const
&& src_const_elt
== 0)
4757 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4758 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4761 sets
[i
].src_const
= src_const
;
4762 sets
[i
].src_const_elt
= src_const_elt
;
4764 /* If the constant and our source are both in the table, mark them as
4765 equivalent. Otherwise, if a constant is in the table but the source
4766 isn't, set ELT to it. */
4767 if (src_const_elt
&& elt
4768 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4769 merge_equiv_classes (elt
, src_const_elt
);
4770 else if (src_const_elt
&& elt
== 0)
4771 elt
= src_const_elt
;
4773 /* See if there is a register linearly related to a constant
4774 equivalent of SRC. */
4776 && (GET_CODE (src_const
) == CONST
4777 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4779 src_related
= use_related_value (src_const
, src_const_elt
);
4782 struct table_elt
*src_related_elt
4783 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4784 if (src_related_elt
&& elt
)
4786 if (elt
->first_same_value
4787 != src_related_elt
->first_same_value
)
4788 /* This can occur when we previously saw a CONST
4789 involving a SYMBOL_REF and then see the SYMBOL_REF
4790 twice. Merge the involved classes. */
4791 merge_equiv_classes (elt
, src_related_elt
);
4794 src_related_elt
= 0;
4796 else if (src_related_elt
&& elt
== 0)
4797 elt
= src_related_elt
;
4801 /* See if we have a CONST_INT that is already in a register in a
4804 if (src_const
&& src_related
== 0 && CONST_INT_P (src_const
)
4805 && GET_MODE_CLASS (mode
) == MODE_INT
4806 && GET_MODE_PRECISION (mode
) < BITS_PER_WORD
)
4808 machine_mode wider_mode
;
4810 for (wider_mode
= GET_MODE_WIDER_MODE (mode
);
4811 wider_mode
!= VOIDmode
4812 && GET_MODE_PRECISION (wider_mode
) <= BITS_PER_WORD
4813 && src_related
== 0;
4814 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4816 struct table_elt
*const_elt
4817 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4822 for (const_elt
= const_elt
->first_same_value
;
4823 const_elt
; const_elt
= const_elt
->next_same_value
)
4824 if (REG_P (const_elt
->exp
))
4826 src_related
= gen_lowpart (mode
, const_elt
->exp
);
4832 /* Another possibility is that we have an AND with a constant in
4833 a mode narrower than a word. If so, it might have been generated
4834 as part of an "if" which would narrow the AND. If we already
4835 have done the AND in a wider mode, we can use a SUBREG of that
4838 if (flag_expensive_optimizations
&& ! src_related
4839 && GET_CODE (src
) == AND
&& CONST_INT_P (XEXP (src
, 1))
4840 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4843 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4845 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4846 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4847 tmode
= GET_MODE_WIDER_MODE (tmode
))
4849 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4850 struct table_elt
*larger_elt
;
4854 PUT_MODE (new_and
, tmode
);
4855 XEXP (new_and
, 0) = inner
;
4856 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4857 if (larger_elt
== 0)
4860 for (larger_elt
= larger_elt
->first_same_value
;
4861 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4862 if (REG_P (larger_elt
->exp
))
4865 = gen_lowpart (mode
, larger_elt
->exp
);
4875 #ifdef LOAD_EXTEND_OP
4876 /* See if a MEM has already been loaded with a widening operation;
4877 if it has, we can use a subreg of that. Many CISC machines
4878 also have such operations, but this is only likely to be
4879 beneficial on these machines. */
4881 if (flag_expensive_optimizations
&& src_related
== 0
4882 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4883 && GET_MODE_CLASS (mode
) == MODE_INT
4884 && MEM_P (src
) && ! do_not_record
4885 && LOAD_EXTEND_OP (mode
) != UNKNOWN
)
4887 struct rtx_def memory_extend_buf
;
4888 rtx memory_extend_rtx
= &memory_extend_buf
;
4891 /* Set what we are trying to extend and the operation it might
4892 have been extended with. */
4893 memset (memory_extend_rtx
, 0, sizeof (*memory_extend_rtx
));
4894 PUT_CODE (memory_extend_rtx
, LOAD_EXTEND_OP (mode
));
4895 XEXP (memory_extend_rtx
, 0) = src
;
4897 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4898 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4899 tmode
= GET_MODE_WIDER_MODE (tmode
))
4901 struct table_elt
*larger_elt
;
4903 PUT_MODE (memory_extend_rtx
, tmode
);
4904 larger_elt
= lookup (memory_extend_rtx
,
4905 HASH (memory_extend_rtx
, tmode
), tmode
);
4906 if (larger_elt
== 0)
4909 for (larger_elt
= larger_elt
->first_same_value
;
4910 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4911 if (REG_P (larger_elt
->exp
))
4913 src_related
= gen_lowpart (mode
, larger_elt
->exp
);
4921 #endif /* LOAD_EXTEND_OP */
4923 /* Try to express the constant using a register+offset expression
4924 derived from a constant anchor. */
4926 if (targetm
.const_anchor
4929 && GET_CODE (src_const
) == CONST_INT
)
4931 src_related
= try_const_anchors (src_const
, mode
);
4932 src_related_is_const_anchor
= src_related
!= NULL_RTX
;
4936 if (src
== src_folded
)
4939 /* At this point, ELT, if nonzero, points to a class of expressions
4940 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4941 and SRC_RELATED, if nonzero, each contain additional equivalent
4942 expressions. Prune these latter expressions by deleting expressions
4943 already in the equivalence class.
4945 Check for an equivalent identical to the destination. If found,
4946 this is the preferred equivalent since it will likely lead to
4947 elimination of the insn. Indicate this by placing it in
4951 elt
= elt
->first_same_value
;
4952 for (p
= elt
; p
; p
= p
->next_same_value
)
4954 enum rtx_code code
= GET_CODE (p
->exp
);
4956 /* If the expression is not valid, ignore it. Then we do not
4957 have to check for validity below. In most cases, we can use
4958 `rtx_equal_p', since canonicalization has already been done. */
4959 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
4962 /* Also skip paradoxical subregs, unless that's what we're
4964 if (paradoxical_subreg_p (p
->exp
)
4966 && GET_CODE (src
) == SUBREG
4967 && GET_MODE (src
) == GET_MODE (p
->exp
)
4968 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4969 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))))
4972 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
4974 else if (src_folded
&& GET_CODE (src_folded
) == code
4975 && rtx_equal_p (src_folded
, p
->exp
))
4977 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
4978 && rtx_equal_p (src_eqv_here
, p
->exp
))
4980 else if (src_related
&& GET_CODE (src_related
) == code
4981 && rtx_equal_p (src_related
, p
->exp
))
4984 /* This is the same as the destination of the insns, we want
4985 to prefer it. Copy it to src_related. The code below will
4986 then give it a negative cost. */
4987 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
4991 /* Find the cheapest valid equivalent, trying all the available
4992 possibilities. Prefer items not in the hash table to ones
4993 that are when they are equal cost. Note that we can never
4994 worsen an insn as the current contents will also succeed.
4995 If we find an equivalent identical to the destination, use it as best,
4996 since this insn will probably be eliminated in that case. */
4999 if (rtx_equal_p (src
, dest
))
5000 src_cost
= src_regcost
= -1;
5003 src_cost
= COST (src
);
5004 src_regcost
= approx_reg_cost (src
);
5010 if (rtx_equal_p (src_eqv_here
, dest
))
5011 src_eqv_cost
= src_eqv_regcost
= -1;
5014 src_eqv_cost
= COST (src_eqv_here
);
5015 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
5021 if (rtx_equal_p (src_folded
, dest
))
5022 src_folded_cost
= src_folded_regcost
= -1;
5025 src_folded_cost
= COST (src_folded
);
5026 src_folded_regcost
= approx_reg_cost (src_folded
);
5032 if (rtx_equal_p (src_related
, dest
))
5033 src_related_cost
= src_related_regcost
= -1;
5036 src_related_cost
= COST (src_related
);
5037 src_related_regcost
= approx_reg_cost (src_related
);
5039 /* If a const-anchor is used to synthesize a constant that
5040 normally requires multiple instructions then slightly prefer
5041 it over the original sequence. These instructions are likely
5042 to become redundant now. We can't compare against the cost
5043 of src_eqv_here because, on MIPS for example, multi-insn
5044 constants have zero cost; they are assumed to be hoisted from
5046 if (src_related_is_const_anchor
5047 && src_related_cost
== src_cost
5053 /* If this was an indirect jump insn, a known label will really be
5054 cheaper even though it looks more expensive. */
5055 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
5056 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
5058 /* Terminate loop when replacement made. This must terminate since
5059 the current contents will be tested and will always be valid. */
5064 /* Skip invalid entries. */
5065 while (elt
&& !REG_P (elt
->exp
)
5066 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5067 elt
= elt
->next_same_value
;
5069 /* A paradoxical subreg would be bad here: it'll be the right
5070 size, but later may be adjusted so that the upper bits aren't
5071 what we want. So reject it. */
5073 && paradoxical_subreg_p (elt
->exp
)
5074 /* It is okay, though, if the rtx we're trying to match
5075 will ignore any of the bits we can't predict. */
5077 && GET_CODE (src
) == SUBREG
5078 && GET_MODE (src
) == GET_MODE (elt
->exp
)
5079 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
5080 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))))
5082 elt
= elt
->next_same_value
;
5088 src_elt_cost
= elt
->cost
;
5089 src_elt_regcost
= elt
->regcost
;
5092 /* Find cheapest and skip it for the next time. For items
5093 of equal cost, use this order:
5094 src_folded, src, src_eqv, src_related and hash table entry. */
5096 && preferable (src_folded_cost
, src_folded_regcost
,
5097 src_cost
, src_regcost
) <= 0
5098 && preferable (src_folded_cost
, src_folded_regcost
,
5099 src_eqv_cost
, src_eqv_regcost
) <= 0
5100 && preferable (src_folded_cost
, src_folded_regcost
,
5101 src_related_cost
, src_related_regcost
) <= 0
5102 && preferable (src_folded_cost
, src_folded_regcost
,
5103 src_elt_cost
, src_elt_regcost
) <= 0)
5105 trial
= src_folded
, src_folded_cost
= MAX_COST
;
5106 if (src_folded_force_flag
)
5108 rtx forced
= force_const_mem (mode
, trial
);
5114 && preferable (src_cost
, src_regcost
,
5115 src_eqv_cost
, src_eqv_regcost
) <= 0
5116 && preferable (src_cost
, src_regcost
,
5117 src_related_cost
, src_related_regcost
) <= 0
5118 && preferable (src_cost
, src_regcost
,
5119 src_elt_cost
, src_elt_regcost
) <= 0)
5120 trial
= src
, src_cost
= MAX_COST
;
5121 else if (src_eqv_here
5122 && preferable (src_eqv_cost
, src_eqv_regcost
,
5123 src_related_cost
, src_related_regcost
) <= 0
5124 && preferable (src_eqv_cost
, src_eqv_regcost
,
5125 src_elt_cost
, src_elt_regcost
) <= 0)
5126 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
5127 else if (src_related
5128 && preferable (src_related_cost
, src_related_regcost
,
5129 src_elt_cost
, src_elt_regcost
) <= 0)
5130 trial
= src_related
, src_related_cost
= MAX_COST
;
5134 elt
= elt
->next_same_value
;
5135 src_elt_cost
= MAX_COST
;
5138 /* Avoid creation of overlapping memory moves. */
5139 if (MEM_P (trial
) && MEM_P (SET_DEST (sets
[i
].rtl
)))
5143 /* BLKmode moves are not handled by cse anyway. */
5144 if (GET_MODE (trial
) == BLKmode
)
5147 src
= canon_rtx (trial
);
5148 dest
= canon_rtx (SET_DEST (sets
[i
].rtl
));
5150 if (!MEM_P (src
) || !MEM_P (dest
)
5151 || !nonoverlapping_memrefs_p (src
, dest
, false))
5156 (set (reg:M N) (const_int A))
5157 (set (reg:M2 O) (const_int B))
5158 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5160 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5161 && CONST_INT_P (trial
)
5162 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5163 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5164 && REG_P (XEXP (SET_DEST (sets
[i
].rtl
), 0))
5165 && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets
[i
].rtl
)))
5166 >= INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1)))
5167 && ((unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5168 + (unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5169 <= HOST_BITS_PER_WIDE_INT
))
5171 rtx dest_reg
= XEXP (SET_DEST (sets
[i
].rtl
), 0);
5172 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5173 rtx pos
= XEXP (SET_DEST (sets
[i
].rtl
), 2);
5174 unsigned int dest_hash
= HASH (dest_reg
, GET_MODE (dest_reg
));
5175 struct table_elt
*dest_elt
5176 = lookup (dest_reg
, dest_hash
, GET_MODE (dest_reg
));
5177 rtx dest_cst
= NULL
;
5180 for (p
= dest_elt
->first_same_value
; p
; p
= p
->next_same_value
)
5181 if (p
->is_const
&& CONST_INT_P (p
->exp
))
5188 HOST_WIDE_INT val
= INTVAL (dest_cst
);
5191 if (BITS_BIG_ENDIAN
)
5192 shift
= GET_MODE_PRECISION (GET_MODE (dest_reg
))
5193 - INTVAL (pos
) - INTVAL (width
);
5195 shift
= INTVAL (pos
);
5196 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
5197 mask
= ~(HOST_WIDE_INT
) 0;
5199 mask
= ((HOST_WIDE_INT
) 1 << INTVAL (width
)) - 1;
5200 val
&= ~(mask
<< shift
);
5201 val
|= (INTVAL (trial
) & mask
) << shift
;
5202 val
= trunc_int_for_mode (val
, GET_MODE (dest_reg
));
5203 validate_unshare_change (insn
, &SET_DEST (sets
[i
].rtl
),
5205 validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5207 if (apply_change_group ())
5209 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5212 remove_note (insn
, note
);
5213 df_notes_rescan (insn
);
5217 src_eqv_volatile
= 0;
5218 src_eqv_in_memory
= 0;
5226 /* We don't normally have an insn matching (set (pc) (pc)), so
5227 check for this separately here. We will delete such an
5230 For other cases such as a table jump or conditional jump
5231 where we know the ultimate target, go ahead and replace the
5232 operand. While that may not make a valid insn, we will
5233 reemit the jump below (and also insert any necessary
5235 if (n_sets
== 1 && dest
== pc_rtx
5237 || (GET_CODE (trial
) == LABEL_REF
5238 && ! condjump_p (insn
))))
5240 /* Don't substitute non-local labels, this confuses CFG. */
5241 if (GET_CODE (trial
) == LABEL_REF
5242 && LABEL_REF_NONLOCAL_P (trial
))
5245 SET_SRC (sets
[i
].rtl
) = trial
;
5246 cse_jumps_altered
= true;
5250 /* Reject certain invalid forms of CONST that we create. */
5251 else if (CONSTANT_P (trial
)
5252 && GET_CODE (trial
) == CONST
5253 /* Reject cases that will cause decode_rtx_const to
5254 die. On the alpha when simplifying a switch, we
5255 get (const (truncate (minus (label_ref)
5257 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
5258 /* Likewise on IA-64, except without the
5260 || (GET_CODE (XEXP (trial
, 0)) == MINUS
5261 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
5262 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
5263 /* Do nothing for this case. */
5266 /* Look for a substitution that makes a valid insn. */
5267 else if (validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5270 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
5272 /* The result of apply_change_group can be ignored; see
5275 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
5276 apply_change_group ();
5281 /* If we previously found constant pool entries for
5282 constants and this is a constant, try making a
5283 pool entry. Put it in src_folded unless we already have done
5284 this since that is where it likely came from. */
5286 else if (constant_pool_entries_cost
5287 && CONSTANT_P (trial
)
5289 || (!MEM_P (src_folded
)
5290 && ! src_folded_force_flag
))
5291 && GET_MODE_CLASS (mode
) != MODE_CC
5292 && mode
!= VOIDmode
)
5294 src_folded_force_flag
= 1;
5296 src_folded_cost
= constant_pool_entries_cost
;
5297 src_folded_regcost
= constant_pool_entries_regcost
;
5301 /* If we changed the insn too much, handle this set from scratch. */
5308 src
= SET_SRC (sets
[i
].rtl
);
5310 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5311 However, there is an important exception: If both are registers
5312 that are not the head of their equivalence class, replace SET_SRC
5313 with the head of the class. If we do not do this, we will have
5314 both registers live over a portion of the basic block. This way,
5315 their lifetimes will likely abut instead of overlapping. */
5317 && REGNO_QTY_VALID_P (REGNO (dest
)))
5319 int dest_q
= REG_QTY (REGNO (dest
));
5320 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
5322 if (dest_ent
->mode
== GET_MODE (dest
)
5323 && dest_ent
->first_reg
!= REGNO (dest
)
5324 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
5325 /* Don't do this if the original insn had a hard reg as
5326 SET_SRC or SET_DEST. */
5327 && (!REG_P (sets
[i
].src
)
5328 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
5329 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
5330 /* We can't call canon_reg here because it won't do anything if
5331 SRC is a hard register. */
5333 int src_q
= REG_QTY (REGNO (src
));
5334 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5335 int first
= src_ent
->first_reg
;
5337 = (first
>= FIRST_PSEUDO_REGISTER
5338 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
5340 /* We must use validate-change even for this, because this
5341 might be a special no-op instruction, suitable only to
5343 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
5346 /* If we had a constant that is cheaper than what we are now
5347 setting SRC to, use that constant. We ignored it when we
5348 thought we could make this into a no-op. */
5349 if (src_const
&& COST (src_const
) < COST (src
)
5350 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
5357 /* If we made a change, recompute SRC values. */
5358 if (src
!= sets
[i
].src
)
5361 hash_arg_in_memory
= 0;
5363 sets
[i
].src_hash
= HASH (src
, mode
);
5364 sets
[i
].src_volatile
= do_not_record
;
5365 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5366 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
5369 /* If this is a single SET, we are setting a register, and we have an
5370 equivalent constant, we want to add a REG_EQUAL note if the constant
5371 is different from the source. We don't want to do it for a constant
5372 pseudo since verifying that this pseudo hasn't been eliminated is a
5373 pain; moreover such a note won't help anything.
5375 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5376 which can be created for a reference to a compile time computable
5377 entry in a jump table. */
5381 && !REG_P (src_const
)
5382 && !(GET_CODE (src_const
) == SUBREG
5383 && REG_P (SUBREG_REG (src_const
)))
5384 && !(GET_CODE (src_const
) == CONST
5385 && GET_CODE (XEXP (src_const
, 0)) == MINUS
5386 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
5387 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
)
5388 && !rtx_equal_p (src
, src_const
))
5390 /* Make sure that the rtx is not shared. */
5391 src_const
= copy_rtx (src_const
);
5393 /* Record the actual constant value in a REG_EQUAL note,
5394 making a new one if one does not already exist. */
5395 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
5396 df_notes_rescan (insn
);
5399 /* Now deal with the destination. */
5402 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5403 while (GET_CODE (dest
) == SUBREG
5404 || GET_CODE (dest
) == ZERO_EXTRACT
5405 || GET_CODE (dest
) == STRICT_LOW_PART
)
5406 dest
= XEXP (dest
, 0);
5408 sets
[i
].inner_dest
= dest
;
5412 #ifdef PUSH_ROUNDING
5413 /* Stack pushes invalidate the stack pointer. */
5414 rtx addr
= XEXP (dest
, 0);
5415 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
5416 && XEXP (addr
, 0) == stack_pointer_rtx
)
5417 invalidate (stack_pointer_rtx
, VOIDmode
);
5419 dest
= fold_rtx (dest
, insn
);
5422 /* Compute the hash code of the destination now,
5423 before the effects of this instruction are recorded,
5424 since the register values used in the address computation
5425 are those before this instruction. */
5426 sets
[i
].dest_hash
= HASH (dest
, mode
);
5428 /* Don't enter a bit-field in the hash table
5429 because the value in it after the store
5430 may not equal what was stored, due to truncation. */
5432 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5434 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5436 if (src_const
!= 0 && CONST_INT_P (src_const
)
5437 && CONST_INT_P (width
)
5438 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5439 && ! (INTVAL (src_const
)
5440 & (HOST_WIDE_INT_M1U
<< INTVAL (width
))))
5441 /* Exception: if the value is constant,
5442 and it won't be truncated, record it. */
5446 /* This is chosen so that the destination will be invalidated
5447 but no new value will be recorded.
5448 We must invalidate because sometimes constant
5449 values can be recorded for bitfields. */
5450 sets
[i
].src_elt
= 0;
5451 sets
[i
].src_volatile
= 1;
5457 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5459 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5461 /* One less use of the label this insn used to jump to. */
5462 delete_insn_and_edges (insn
);
5463 cse_jumps_altered
= true;
5464 /* No more processing for this set. */
5468 /* If this SET is now setting PC to a label, we know it used to
5469 be a conditional or computed branch. */
5470 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5471 && !LABEL_REF_NONLOCAL_P (src
))
5473 /* We reemit the jump in as many cases as possible just in
5474 case the form of an unconditional jump is significantly
5475 different than a computed jump or conditional jump.
5477 If this insn has multiple sets, then reemitting the
5478 jump is nontrivial. So instead we just force rerecognition
5479 and hope for the best. */
5485 new_rtx
= emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
5486 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5487 LABEL_NUSES (XEXP (src
, 0))++;
5489 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5490 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5493 XEXP (note
, 1) = NULL_RTX
;
5494 REG_NOTES (new_rtx
) = note
;
5497 delete_insn_and_edges (insn
);
5501 INSN_CODE (insn
) = -1;
5503 /* Do not bother deleting any unreachable code, let jump do it. */
5504 cse_jumps_altered
= true;
5508 /* If destination is volatile, invalidate it and then do no further
5509 processing for this assignment. */
5511 else if (do_not_record
)
5513 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5514 invalidate (dest
, VOIDmode
);
5515 else if (MEM_P (dest
))
5516 invalidate (dest
, VOIDmode
);
5517 else if (GET_CODE (dest
) == STRICT_LOW_PART
5518 || GET_CODE (dest
) == ZERO_EXTRACT
)
5519 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5523 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5524 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5527 /* If setting CC0, record what it was set to, or a constant, if it
5528 is equivalent to a constant. If it is being set to a floating-point
5529 value, make a COMPARE with the appropriate constant of 0. If we
5530 don't do this, later code can interpret this as a test against
5531 const0_rtx, which can cause problems if we try to put it into an
5532 insn as a floating-point operand. */
5533 if (dest
== cc0_rtx
)
5535 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5536 this_insn_cc0_mode
= mode
;
5537 if (FLOAT_MODE_P (mode
))
5538 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5544 /* Now enter all non-volatile source expressions in the hash table
5545 if they are not already present.
5546 Record their equivalence classes in src_elt.
5547 This way we can insert the corresponding destinations into
5548 the same classes even if the actual sources are no longer in them
5549 (having been invalidated). */
5551 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5552 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5554 struct table_elt
*elt
;
5555 struct table_elt
*classp
= sets
[0].src_elt
;
5556 rtx dest
= SET_DEST (sets
[0].rtl
);
5557 machine_mode eqvmode
= GET_MODE (dest
);
5559 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5561 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5564 if (insert_regs (src_eqv
, classp
, 0))
5566 rehash_using_reg (src_eqv
);
5567 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5569 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5570 elt
->in_memory
= src_eqv_in_memory
;
5573 /* Check to see if src_eqv_elt is the same as a set source which
5574 does not yet have an elt, and if so set the elt of the set source
5576 for (i
= 0; i
< n_sets
; i
++)
5577 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5578 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5579 sets
[i
].src_elt
= src_eqv_elt
;
5582 for (i
= 0; i
< n_sets
; i
++)
5583 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5584 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5586 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5588 /* REG_EQUAL in setting a STRICT_LOW_PART
5589 gives an equivalent for the entire destination register,
5590 not just for the subreg being stored in now.
5591 This is a more interesting equivalence, so we arrange later
5592 to treat the entire reg as the destination. */
5593 sets
[i
].src_elt
= src_eqv_elt
;
5594 sets
[i
].src_hash
= src_eqv_hash
;
5598 /* Insert source and constant equivalent into hash table, if not
5600 struct table_elt
*classp
= src_eqv_elt
;
5601 rtx src
= sets
[i
].src
;
5602 rtx dest
= SET_DEST (sets
[i
].rtl
);
5604 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5606 /* It's possible that we have a source value known to be
5607 constant but don't have a REG_EQUAL note on the insn.
5608 Lack of a note will mean src_eqv_elt will be NULL. This
5609 can happen where we've generated a SUBREG to access a
5610 CONST_INT that is already in a register in a wider mode.
5611 Ensure that the source expression is put in the proper
5614 classp
= sets
[i
].src_const_elt
;
5616 if (sets
[i
].src_elt
== 0)
5618 struct table_elt
*elt
;
5620 /* Note that these insert_regs calls cannot remove
5621 any of the src_elt's, because they would have failed to
5622 match if not still valid. */
5623 if (insert_regs (src
, classp
, 0))
5625 rehash_using_reg (src
);
5626 sets
[i
].src_hash
= HASH (src
, mode
);
5628 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5629 elt
->in_memory
= sets
[i
].src_in_memory
;
5630 /* If inline asm has any clobbers, ensure we only reuse
5631 existing inline asms and never try to put the ASM_OPERANDS
5632 into an insn that isn't inline asm. */
5633 if (GET_CODE (src
) == ASM_OPERANDS
5634 && GET_CODE (x
) == PARALLEL
)
5635 elt
->cost
= MAX_COST
;
5636 sets
[i
].src_elt
= classp
= elt
;
5638 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5639 && src
!= sets
[i
].src_const
5640 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5641 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5642 sets
[i
].src_const_hash
, mode
);
5645 else if (sets
[i
].src_elt
== 0)
5646 /* If we did not insert the source into the hash table (e.g., it was
5647 volatile), note the equivalence class for the REG_EQUAL value, if any,
5648 so that the destination goes into that class. */
5649 sets
[i
].src_elt
= src_eqv_elt
;
5651 /* Record destination addresses in the hash table. This allows us to
5652 check if they are invalidated by other sets. */
5653 for (i
= 0; i
< n_sets
; i
++)
5657 rtx x
= sets
[i
].inner_dest
;
5658 struct table_elt
*elt
;
5665 mode
= GET_MODE (x
);
5666 hash
= HASH (x
, mode
);
5667 elt
= lookup (x
, hash
, mode
);
5670 if (insert_regs (x
, NULL
, 0))
5672 rtx dest
= SET_DEST (sets
[i
].rtl
);
5674 rehash_using_reg (x
);
5675 hash
= HASH (x
, mode
);
5676 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5678 elt
= insert (x
, NULL
, hash
, mode
);
5681 sets
[i
].dest_addr_elt
= elt
;
5684 sets
[i
].dest_addr_elt
= NULL
;
5688 invalidate_from_clobbers (insn
);
5690 /* Some registers are invalidated by subroutine calls. Memory is
5691 invalidated by non-constant calls. */
5695 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5696 invalidate_memory ();
5697 invalidate_for_call ();
5700 /* Now invalidate everything set by this instruction.
5701 If a SUBREG or other funny destination is being set,
5702 sets[i].rtl is still nonzero, so here we invalidate the reg
5703 a part of which is being set. */
5705 for (i
= 0; i
< n_sets
; i
++)
5708 /* We can't use the inner dest, because the mode associated with
5709 a ZERO_EXTRACT is significant. */
5710 rtx dest
= SET_DEST (sets
[i
].rtl
);
5712 /* Needed for registers to remove the register from its
5713 previous quantity's chain.
5714 Needed for memory if this is a nonvarying address, unless
5715 we have just done an invalidate_memory that covers even those. */
5716 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5717 invalidate (dest
, VOIDmode
);
5718 else if (MEM_P (dest
))
5719 invalidate (dest
, VOIDmode
);
5720 else if (GET_CODE (dest
) == STRICT_LOW_PART
5721 || GET_CODE (dest
) == ZERO_EXTRACT
)
5722 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5725 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5726 the regs restored by the longjmp come from a later time
5728 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5730 flush_hash_table ();
5734 /* Make sure registers mentioned in destinations
5735 are safe for use in an expression to be inserted.
5736 This removes from the hash table
5737 any invalid entry that refers to one of these registers.
5739 We don't care about the return value from mention_regs because
5740 we are going to hash the SET_DEST values unconditionally. */
5742 for (i
= 0; i
< n_sets
; i
++)
5746 rtx x
= SET_DEST (sets
[i
].rtl
);
5752 /* We used to rely on all references to a register becoming
5753 inaccessible when a register changes to a new quantity,
5754 since that changes the hash code. However, that is not
5755 safe, since after HASH_SIZE new quantities we get a
5756 hash 'collision' of a register with its own invalid
5757 entries. And since SUBREGs have been changed not to
5758 change their hash code with the hash code of the register,
5759 it wouldn't work any longer at all. So we have to check
5760 for any invalid references lying around now.
5761 This code is similar to the REG case in mention_regs,
5762 but it knows that reg_tick has been incremented, and
5763 it leaves reg_in_table as -1 . */
5764 unsigned int regno
= REGNO (x
);
5765 unsigned int endregno
= END_REGNO (x
);
5768 for (i
= regno
; i
< endregno
; i
++)
5770 if (REG_IN_TABLE (i
) >= 0)
5772 remove_invalid_refs (i
);
5773 REG_IN_TABLE (i
) = -1;
5780 /* We may have just removed some of the src_elt's from the hash table.
5781 So replace each one with the current head of the same class.
5782 Also check if destination addresses have been removed. */
5784 for (i
= 0; i
< n_sets
; i
++)
5787 if (sets
[i
].dest_addr_elt
5788 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5790 /* The elt was removed, which means this destination is not
5791 valid after this instruction. */
5792 sets
[i
].rtl
= NULL_RTX
;
5794 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5795 /* If elt was removed, find current head of same class,
5796 or 0 if nothing remains of that class. */
5798 struct table_elt
*elt
= sets
[i
].src_elt
;
5800 while (elt
&& elt
->prev_same_value
)
5801 elt
= elt
->prev_same_value
;
5803 while (elt
&& elt
->first_same_value
== 0)
5804 elt
= elt
->next_same_value
;
5805 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5809 /* Now insert the destinations into their equivalence classes. */
5811 for (i
= 0; i
< n_sets
; i
++)
5814 rtx dest
= SET_DEST (sets
[i
].rtl
);
5815 struct table_elt
*elt
;
5817 /* Don't record value if we are not supposed to risk allocating
5818 floating-point values in registers that might be wider than
5820 if ((flag_float_store
5822 && FLOAT_MODE_P (GET_MODE (dest
)))
5823 /* Don't record BLKmode values, because we don't know the
5824 size of it, and can't be sure that other BLKmode values
5825 have the same or smaller size. */
5826 || GET_MODE (dest
) == BLKmode
5827 /* If we didn't put a REG_EQUAL value or a source into the hash
5828 table, there is no point is recording DEST. */
5829 || sets
[i
].src_elt
== 0
5830 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5831 or SIGN_EXTEND, don't record DEST since it can cause
5832 some tracking to be wrong.
5834 ??? Think about this more later. */
5835 || (paradoxical_subreg_p (dest
)
5836 && (GET_CODE (sets
[i
].src
) == SIGN_EXTEND
5837 || GET_CODE (sets
[i
].src
) == ZERO_EXTEND
)))
5840 /* STRICT_LOW_PART isn't part of the value BEING set,
5841 and neither is the SUBREG inside it.
5842 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5843 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5844 dest
= SUBREG_REG (XEXP (dest
, 0));
5846 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5847 /* Registers must also be inserted into chains for quantities. */
5848 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5850 /* If `insert_regs' changes something, the hash code must be
5852 rehash_using_reg (dest
);
5853 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5856 elt
= insert (dest
, sets
[i
].src_elt
,
5857 sets
[i
].dest_hash
, GET_MODE (dest
));
5859 /* If this is a constant, insert the constant anchors with the
5860 equivalent register-offset expressions using register DEST. */
5861 if (targetm
.const_anchor
5863 && SCALAR_INT_MODE_P (GET_MODE (dest
))
5864 && GET_CODE (sets
[i
].src_elt
->exp
) == CONST_INT
)
5865 insert_const_anchors (dest
, sets
[i
].src_elt
->exp
, GET_MODE (dest
));
5867 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
5868 && !MEM_READONLY_P (sets
[i
].inner_dest
));
5870 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5871 narrower than M2, and both M1 and M2 are the same number of words,
5872 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5873 make that equivalence as well.
5875 However, BAR may have equivalences for which gen_lowpart
5876 will produce a simpler value than gen_lowpart applied to
5877 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5878 BAR's equivalences. If we don't get a simplified form, make
5879 the SUBREG. It will not be used in an equivalence, but will
5880 cause two similar assignments to be detected.
5882 Note the loop below will find SUBREG_REG (DEST) since we have
5883 already entered SRC and DEST of the SET in the table. */
5885 if (GET_CODE (dest
) == SUBREG
5886 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1)
5888 == (GET_MODE_SIZE (GET_MODE (dest
)) - 1) / UNITS_PER_WORD
)
5889 && (GET_MODE_SIZE (GET_MODE (dest
))
5890 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5891 && sets
[i
].src_elt
!= 0)
5893 machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
5894 struct table_elt
*elt
, *classp
= 0;
5896 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
5897 elt
= elt
->next_same_value
)
5901 struct table_elt
*src_elt
;
5904 /* Ignore invalid entries. */
5905 if (!REG_P (elt
->exp
)
5906 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5909 /* We may have already been playing subreg games. If the
5910 mode is already correct for the destination, use it. */
5911 if (GET_MODE (elt
->exp
) == new_mode
)
5915 /* Calculate big endian correction for the SUBREG_BYTE.
5916 We have already checked that M1 (GET_MODE (dest))
5917 is not narrower than M2 (new_mode). */
5918 if (BYTES_BIG_ENDIAN
)
5919 byte
= (GET_MODE_SIZE (GET_MODE (dest
))
5920 - GET_MODE_SIZE (new_mode
));
5922 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
5923 GET_MODE (dest
), byte
);
5926 /* The call to simplify_gen_subreg fails if the value
5927 is VOIDmode, yet we can't do any simplification, e.g.
5928 for EXPR_LISTs denoting function call results.
5929 It is invalid to construct a SUBREG with a VOIDmode
5930 SUBREG_REG, hence a zero new_src means we can't do
5931 this substitution. */
5935 src_hash
= HASH (new_src
, new_mode
);
5936 src_elt
= lookup (new_src
, src_hash
, new_mode
);
5938 /* Put the new source in the hash table is if isn't
5942 if (insert_regs (new_src
, classp
, 0))
5944 rehash_using_reg (new_src
);
5945 src_hash
= HASH (new_src
, new_mode
);
5947 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
5948 src_elt
->in_memory
= elt
->in_memory
;
5949 if (GET_CODE (new_src
) == ASM_OPERANDS
5950 && elt
->cost
== MAX_COST
)
5951 src_elt
->cost
= MAX_COST
;
5953 else if (classp
&& classp
!= src_elt
->first_same_value
)
5954 /* Show that two things that we've seen before are
5955 actually the same. */
5956 merge_equiv_classes (src_elt
, classp
);
5958 classp
= src_elt
->first_same_value
;
5959 /* Ignore invalid entries. */
5961 && !REG_P (classp
->exp
)
5962 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
5963 classp
= classp
->next_same_value
;
5968 /* Special handling for (set REG0 REG1) where REG0 is the
5969 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5970 be used in the sequel, so (if easily done) change this insn to
5971 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5972 that computed their value. Then REG1 will become a dead store
5973 and won't cloud the situation for later optimizations.
5975 Do not make this change if REG1 is a hard register, because it will
5976 then be used in the sequel and we may be changing a two-operand insn
5977 into a three-operand insn.
5979 Also do not do this if we are operating on a copy of INSN. */
5981 if (n_sets
== 1 && sets
[0].rtl
)
5982 try_back_substitute_reg (sets
[0].rtl
, insn
);
5987 /* Remove from the hash table all expressions that reference memory. */
5990 invalidate_memory (void)
5993 struct table_elt
*p
, *next
;
5995 for (i
= 0; i
< HASH_SIZE
; i
++)
5996 for (p
= table
[i
]; p
; p
= next
)
5998 next
= p
->next_same_hash
;
6000 remove_from_table (p
, i
);
6004 /* Perform invalidation on the basis of everything about INSN,
6005 except for invalidating the actual places that are SET in it.
6006 This includes the places CLOBBERed, and anything that might
6007 alias with something that is SET or CLOBBERed. */
6010 invalidate_from_clobbers (rtx_insn
*insn
)
6012 rtx x
= PATTERN (insn
);
6014 if (GET_CODE (x
) == CLOBBER
)
6016 rtx ref
= XEXP (x
, 0);
6019 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6021 invalidate (ref
, VOIDmode
);
6022 else if (GET_CODE (ref
) == STRICT_LOW_PART
6023 || GET_CODE (ref
) == ZERO_EXTRACT
)
6024 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6027 else if (GET_CODE (x
) == PARALLEL
)
6030 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6032 rtx y
= XVECEXP (x
, 0, i
);
6033 if (GET_CODE (y
) == CLOBBER
)
6035 rtx ref
= XEXP (y
, 0);
6036 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6038 invalidate (ref
, VOIDmode
);
6039 else if (GET_CODE (ref
) == STRICT_LOW_PART
6040 || GET_CODE (ref
) == ZERO_EXTRACT
)
6041 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6047 /* Perform invalidation on the basis of everything about INSN.
6048 This includes the places CLOBBERed, and anything that might
6049 alias with something that is SET or CLOBBERed. */
6052 invalidate_from_sets_and_clobbers (rtx_insn
*insn
)
6055 rtx x
= PATTERN (insn
);
6059 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6060 if (GET_CODE (XEXP (tem
, 0)) == CLOBBER
)
6061 invalidate (SET_DEST (XEXP (tem
, 0)), VOIDmode
);
6064 /* Ensure we invalidate the destination register of a CALL insn.
6065 This is necessary for machines where this register is a fixed_reg,
6066 because no other code would invalidate it. */
6067 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
6068 invalidate (SET_DEST (x
), VOIDmode
);
6070 else if (GET_CODE (x
) == PARALLEL
)
6074 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6076 rtx y
= XVECEXP (x
, 0, i
);
6077 if (GET_CODE (y
) == CLOBBER
)
6079 rtx clobbered
= XEXP (y
, 0);
6081 if (REG_P (clobbered
)
6082 || GET_CODE (clobbered
) == SUBREG
)
6083 invalidate (clobbered
, VOIDmode
);
6084 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
6085 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
6086 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
6088 else if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
6089 invalidate (SET_DEST (y
), VOIDmode
);
6094 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6095 and replace any registers in them with either an equivalent constant
6096 or the canonical form of the register. If we are inside an address,
6097 only do this if the address remains valid.
6099 OBJECT is 0 except when within a MEM in which case it is the MEM.
6101 Return the replacement for X. */
6104 cse_process_notes_1 (rtx x
, rtx object
, bool *changed
)
6106 enum rtx_code code
= GET_CODE (x
);
6107 const char *fmt
= GET_RTX_FORMAT (code
);
6122 validate_change (x
, &XEXP (x
, 0),
6123 cse_process_notes (XEXP (x
, 0), x
, changed
), 0);
6127 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
6128 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
, changed
);
6134 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
, changed
);
6141 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6142 /* We don't substitute VOIDmode constants into these rtx,
6143 since they would impede folding. */
6144 if (GET_MODE (new_rtx
) != VOIDmode
)
6145 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6149 case UNSIGNED_FLOAT
:
6151 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6152 /* We don't substitute negative VOIDmode constants into these rtx,
6153 since they would impede folding. */
6154 if (GET_MODE (new_rtx
) != VOIDmode
6155 || (CONST_INT_P (new_rtx
) && INTVAL (new_rtx
) >= 0)
6156 || (CONST_DOUBLE_P (new_rtx
) && CONST_DOUBLE_HIGH (new_rtx
) >= 0))
6157 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6162 i
= REG_QTY (REGNO (x
));
6164 /* Return a constant or a constant register. */
6165 if (REGNO_QTY_VALID_P (REGNO (x
)))
6167 struct qty_table_elem
*ent
= &qty_table
[i
];
6169 if (ent
->const_rtx
!= NULL_RTX
6170 && (CONSTANT_P (ent
->const_rtx
)
6171 || REG_P (ent
->const_rtx
)))
6173 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
6175 return copy_rtx (new_rtx
);
6179 /* Otherwise, canonicalize this register. */
6180 return canon_reg (x
, NULL
);
6186 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6188 validate_change (object
, &XEXP (x
, i
),
6189 cse_process_notes (XEXP (x
, i
), object
, changed
), 0);
6195 cse_process_notes (rtx x
, rtx object
, bool *changed
)
6197 rtx new_rtx
= cse_process_notes_1 (x
, object
, changed
);
6204 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6206 DATA is a pointer to a struct cse_basic_block_data, that is used to
6208 It is filled with a queue of basic blocks, starting with FIRST_BB
6209 and following a trace through the CFG.
6211 If all paths starting at FIRST_BB have been followed, or no new path
6212 starting at FIRST_BB can be constructed, this function returns FALSE.
6213 Otherwise, DATA->path is filled and the function returns TRUE indicating
6214 that a path to follow was found.
6216 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6217 block in the path will be FIRST_BB. */
6220 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
6227 bitmap_set_bit (cse_visited_basic_blocks
, first_bb
->index
);
6229 /* See if there is a previous path. */
6230 path_size
= data
->path_size
;
6232 /* There is a previous path. Make sure it started with FIRST_BB. */
6234 gcc_assert (data
->path
[0].bb
== first_bb
);
6236 /* There was only one basic block in the last path. Clear the path and
6237 return, so that paths starting at another basic block can be tried. */
6244 /* If the path was empty from the beginning, construct a new path. */
6246 data
->path
[path_size
++].bb
= first_bb
;
6249 /* Otherwise, path_size must be equal to or greater than 2, because
6250 a previous path exists that is at least two basic blocks long.
6252 Update the previous branch path, if any. If the last branch was
6253 previously along the branch edge, take the fallthrough edge now. */
6254 while (path_size
>= 2)
6256 basic_block last_bb_in_path
, previous_bb_in_path
;
6260 last_bb_in_path
= data
->path
[path_size
].bb
;
6261 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
6263 /* If we previously followed a path along the branch edge, try
6264 the fallthru edge now. */
6265 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
6266 && any_condjump_p (BB_END (previous_bb_in_path
))
6267 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
6268 && e
== BRANCH_EDGE (previous_bb_in_path
))
6270 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
6271 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6272 && single_pred_p (bb
)
6273 /* We used to assert here that we would only see blocks
6274 that we have not visited yet. But we may end up
6275 visiting basic blocks twice if the CFG has changed
6276 in this run of cse_main, because when the CFG changes
6277 the topological sort of the CFG also changes. A basic
6278 blocks that previously had more than two predecessors
6279 may now have a single predecessor, and become part of
6280 a path that starts at another basic block.
6282 We still want to visit each basic block only once, so
6283 halt the path here if we have already visited BB. */
6284 && !bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
))
6286 bitmap_set_bit (cse_visited_basic_blocks
, bb
->index
);
6287 data
->path
[path_size
++].bb
= bb
;
6292 data
->path
[path_size
].bb
= NULL
;
6295 /* If only one block remains in the path, bail. */
6303 /* Extend the path if possible. */
6306 bb
= data
->path
[path_size
- 1].bb
;
6307 while (bb
&& path_size
< PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
))
6309 if (single_succ_p (bb
))
6310 e
= single_succ_edge (bb
);
6311 else if (EDGE_COUNT (bb
->succs
) == 2
6312 && any_condjump_p (BB_END (bb
)))
6314 /* First try to follow the branch. If that doesn't lead
6315 to a useful path, follow the fallthru edge. */
6316 e
= BRANCH_EDGE (bb
);
6317 if (!single_pred_p (e
->dest
))
6318 e
= FALLTHRU_EDGE (bb
);
6324 && !((e
->flags
& EDGE_ABNORMAL_CALL
) && cfun
->has_nonlocal_label
)
6325 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6326 && single_pred_p (e
->dest
)
6327 /* Avoid visiting basic blocks twice. The large comment
6328 above explains why this can happen. */
6329 && !bitmap_bit_p (cse_visited_basic_blocks
, e
->dest
->index
))
6331 basic_block bb2
= e
->dest
;
6332 bitmap_set_bit (cse_visited_basic_blocks
, bb2
->index
);
6333 data
->path
[path_size
++].bb
= bb2
;
6342 data
->path_size
= path_size
;
6343 return path_size
!= 0;
6346 /* Dump the path in DATA to file F. NSETS is the number of sets
6350 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
6354 fprintf (f
, ";; Following path with %d sets: ", nsets
);
6355 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
6356 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
6357 fputc ('\n', dump_file
);
6362 /* Return true if BB has exception handling successor edges. */
6365 have_eh_succ_edges (basic_block bb
)
6370 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6371 if (e
->flags
& EDGE_EH
)
6378 /* Scan to the end of the path described by DATA. Return an estimate of
6379 the total number of SETs of all insns in the path. */
6382 cse_prescan_path (struct cse_basic_block_data
*data
)
6385 int path_size
= data
->path_size
;
6388 /* Scan to end of each basic block in the path. */
6389 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6394 bb
= data
->path
[path_entry
].bb
;
6396 FOR_BB_INSNS (bb
, insn
)
6401 /* A PARALLEL can have lots of SETs in it,
6402 especially if it is really an ASM_OPERANDS. */
6403 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6404 nsets
+= XVECLEN (PATTERN (insn
), 0);
6410 data
->nsets
= nsets
;
6413 /* Return true if the pattern of INSN uses a LABEL_REF for which
6414 there isn't a REG_LABEL_OPERAND note. */
6417 check_for_label_ref (rtx_insn
*insn
)
6419 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6420 note for it, we must rerun jump since it needs to place the note. If
6421 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6422 don't do this since no REG_LABEL_OPERAND will be added. */
6423 subrtx_iterator::array_type array
;
6424 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
6426 const_rtx x
= *iter
;
6427 if (GET_CODE (x
) == LABEL_REF
6428 && !LABEL_REF_NONLOCAL_P (x
)
6430 || !label_is_jump_target_p (LABEL_REF_LABEL (x
), insn
))
6431 && LABEL_P (LABEL_REF_LABEL (x
))
6432 && INSN_UID (LABEL_REF_LABEL (x
)) != 0
6433 && !find_reg_note (insn
, REG_LABEL_OPERAND
, LABEL_REF_LABEL (x
)))
6439 /* Process a single extended basic block described by EBB_DATA. */
6442 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
6444 int path_size
= ebb_data
->path_size
;
6448 /* Allocate the space needed by qty_table. */
6449 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
6452 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
6453 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
6454 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6459 bb
= ebb_data
->path
[path_entry
].bb
;
6461 /* Invalidate recorded information for eh regs if there is an EH
6462 edge pointing to that bb. */
6463 if (bb_has_eh_pred (bb
))
6467 FOR_EACH_ARTIFICIAL_DEF (def
, bb
->index
)
6468 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
6469 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6472 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6473 FOR_BB_INSNS (bb
, insn
)
6475 /* If we have processed 1,000 insns, flush the hash table to
6476 avoid extreme quadratic behavior. We must not include NOTEs
6477 in the count since there may be more of them when generating
6478 debugging information. If we clear the table at different
6479 times, code generated with -g -O might be different than code
6480 generated with -O but not -g.
6482 FIXME: This is a real kludge and needs to be done some other
6484 if (NONDEBUG_INSN_P (insn
)
6485 && num_insns
++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS
))
6487 flush_hash_table ();
6493 /* Process notes first so we have all notes in canonical forms
6494 when looking for duplicate operations. */
6495 if (REG_NOTES (insn
))
6497 bool changed
= false;
6498 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
),
6499 NULL_RTX
, &changed
);
6501 df_notes_rescan (insn
);
6506 /* If we haven't already found an insn where we added a LABEL_REF,
6508 if (INSN_P (insn
) && !recorded_label_ref
6509 && check_for_label_ref (insn
))
6510 recorded_label_ref
= true;
6513 if (NONDEBUG_INSN_P (insn
))
6515 /* If the previous insn sets CC0 and this insn no
6516 longer references CC0, delete the previous insn.
6517 Here we use fact that nothing expects CC0 to be
6518 valid over an insn, which is true until the final
6520 rtx_insn
*prev_insn
;
6523 prev_insn
= prev_nonnote_nondebug_insn (insn
);
6524 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6525 && (tem
= single_set (prev_insn
)) != NULL_RTX
6526 && SET_DEST (tem
) == cc0_rtx
6527 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6528 delete_insn (prev_insn
);
6530 /* If this insn is not the last insn in the basic
6531 block, it will be PREV_INSN(insn) in the next
6532 iteration. If we recorded any CC0-related
6533 information for this insn, remember it. */
6534 if (insn
!= BB_END (bb
))
6536 prev_insn_cc0
= this_insn_cc0
;
6537 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6544 /* With non-call exceptions, we are not always able to update
6545 the CFG properly inside cse_insn. So clean up possibly
6546 redundant EH edges here. */
6547 if (cfun
->can_throw_non_call_exceptions
&& have_eh_succ_edges (bb
))
6548 cse_cfg_altered
|= purge_dead_edges (bb
);
6550 /* If we changed a conditional jump, we may have terminated
6551 the path we are following. Check that by verifying that
6552 the edge we would take still exists. If the edge does
6553 not exist anymore, purge the remainder of the path.
6554 Note that this will cause us to return to the caller. */
6555 if (path_entry
< path_size
- 1)
6557 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6558 if (!find_edge (bb
, next_bb
))
6564 /* If we truncate the path, we must also reset the
6565 visited bit on the remaining blocks in the path,
6566 or we will never visit them at all. */
6567 bitmap_clear_bit (cse_visited_basic_blocks
,
6568 ebb_data
->path
[path_size
].bb
->index
);
6569 ebb_data
->path
[path_size
].bb
= NULL
;
6571 while (path_size
- 1 != path_entry
);
6572 ebb_data
->path_size
= path_size
;
6576 /* If this is a conditional jump insn, record any known
6577 equivalences due to the condition being tested. */
6579 if (path_entry
< path_size
- 1
6581 && single_set (insn
)
6582 && any_condjump_p (insn
))
6584 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6585 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6586 record_jump_equiv (insn
, taken
);
6590 /* Clear the CC0-tracking related insns, they can't provide
6591 useful information across basic block boundaries. */
6596 gcc_assert (next_qty
<= max_qty
);
6602 /* Perform cse on the instructions of a function.
6603 F is the first instruction.
6604 NREGS is one plus the highest pseudo-reg number used in the instruction.
6606 Return 2 if jump optimizations should be redone due to simplifications
6607 in conditional jump instructions.
6608 Return 1 if the CFG should be cleaned up because it has been modified.
6609 Return 0 otherwise. */
6612 cse_main (rtx_insn
*f ATTRIBUTE_UNUSED
, int nregs
)
6614 struct cse_basic_block_data ebb_data
;
6616 int *rc_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
6619 df_set_flags (DF_LR_RUN_DCE
);
6620 df_note_add_problem ();
6622 df_set_flags (DF_DEFER_INSN_RESCAN
);
6624 reg_scan (get_insns (), max_reg_num ());
6625 init_cse_reg_info (nregs
);
6627 ebb_data
.path
= XNEWVEC (struct branch_path
,
6628 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
));
6630 cse_cfg_altered
= false;
6631 cse_jumps_altered
= false;
6632 recorded_label_ref
= false;
6633 constant_pool_entries_cost
= 0;
6634 constant_pool_entries_regcost
= 0;
6635 ebb_data
.path_size
= 0;
6637 rtl_hooks
= cse_rtl_hooks
;
6640 init_alias_analysis ();
6642 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6644 /* Set up the table of already visited basic blocks. */
6645 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6646 bitmap_clear (cse_visited_basic_blocks
);
6648 /* Loop over basic blocks in reverse completion order (RPO),
6649 excluding the ENTRY and EXIT blocks. */
6650 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6652 while (i
< n_blocks
)
6654 /* Find the first block in the RPO queue that we have not yet
6655 processed before. */
6658 bb
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[i
++]);
6660 while (bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
)
6663 /* Find all paths starting with BB, and process them. */
6664 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6666 /* Pre-scan the path. */
6667 cse_prescan_path (&ebb_data
);
6669 /* If this basic block has no sets, skip it. */
6670 if (ebb_data
.nsets
== 0)
6673 /* Get a reasonable estimate for the maximum number of qty's
6674 needed for this path. For this, we take the number of sets
6675 and multiply that by MAX_RECOG_OPERANDS. */
6676 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6678 /* Dump the path we're about to process. */
6680 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6682 cse_extended_basic_block (&ebb_data
);
6687 end_alias_analysis ();
6688 free (reg_eqv_table
);
6689 free (ebb_data
.path
);
6690 sbitmap_free (cse_visited_basic_blocks
);
6692 rtl_hooks
= general_rtl_hooks
;
6694 if (cse_jumps_altered
|| recorded_label_ref
)
6696 else if (cse_cfg_altered
)
6702 /* Count the number of times registers are used (not set) in X.
6703 COUNTS is an array in which we accumulate the count, INCR is how much
6704 we count each register usage.
6706 Don't count a usage of DEST, which is the SET_DEST of a SET which
6707 contains X in its SET_SRC. This is because such a SET does not
6708 modify the liveness of DEST.
6709 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6710 We must then count uses of a SET_DEST regardless, because the insn can't be
6714 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6724 switch (code
= GET_CODE (x
))
6728 counts
[REGNO (x
)] += incr
;
6740 /* If we are clobbering a MEM, mark any registers inside the address
6742 if (MEM_P (XEXP (x
, 0)))
6743 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6747 /* Unless we are setting a REG, count everything in SET_DEST. */
6748 if (!REG_P (SET_DEST (x
)))
6749 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6750 count_reg_usage (SET_SRC (x
), counts
,
6751 dest
? dest
: SET_DEST (x
),
6761 /* We expect dest to be NULL_RTX here. If the insn may throw,
6762 or if it cannot be deleted due to side-effects, mark this fact
6763 by setting DEST to pc_rtx. */
6764 if ((!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (x
))
6765 || side_effects_p (PATTERN (x
)))
6767 if (code
== CALL_INSN
)
6768 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6769 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6771 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6774 note
= find_reg_equal_equiv_note (x
);
6777 rtx eqv
= XEXP (note
, 0);
6779 if (GET_CODE (eqv
) == EXPR_LIST
)
6780 /* This REG_EQUAL note describes the result of a function call.
6781 Process all the arguments. */
6784 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6785 eqv
= XEXP (eqv
, 1);
6787 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6789 count_reg_usage (eqv
, counts
, dest
, incr
);
6794 if (REG_NOTE_KIND (x
) == REG_EQUAL
6795 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6796 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6797 involving registers in the address. */
6798 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6799 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6801 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6805 /* Iterate over just the inputs, not the constraints as well. */
6806 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6807 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6818 fmt
= GET_RTX_FORMAT (code
);
6819 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6822 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6823 else if (fmt
[i
] == 'E')
6824 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6825 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6829 /* Return true if X is a dead register. */
6832 is_dead_reg (const_rtx x
, int *counts
)
6835 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6836 && counts
[REGNO (x
)] == 0);
6839 /* Return true if set is live. */
6841 set_live_p (rtx set
, rtx_insn
*insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6848 if (set_noop_p (set
))
6852 else if (GET_CODE (SET_DEST (set
)) == CC0
6853 && !side_effects_p (SET_SRC (set
))
6854 && ((tem
= next_nonnote_nondebug_insn (insn
)) == NULL_RTX
6856 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6859 else if (!is_dead_reg (SET_DEST (set
), counts
)
6860 || side_effects_p (SET_SRC (set
)))
6865 /* Return true if insn is live. */
6868 insn_live_p (rtx_insn
*insn
, int *counts
)
6871 if (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
6873 else if (GET_CODE (PATTERN (insn
)) == SET
)
6874 return set_live_p (PATTERN (insn
), insn
, counts
);
6875 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6877 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6879 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6881 if (GET_CODE (elt
) == SET
)
6883 if (set_live_p (elt
, insn
, counts
))
6886 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6891 else if (DEBUG_INSN_P (insn
))
6895 for (next
= NEXT_INSN (insn
); next
; next
= NEXT_INSN (next
))
6898 else if (!DEBUG_INSN_P (next
))
6900 else if (INSN_VAR_LOCATION_DECL (insn
) == INSN_VAR_LOCATION_DECL (next
))
6909 /* Count the number of stores into pseudo. Callback for note_stores. */
6912 count_stores (rtx x
, const_rtx set ATTRIBUTE_UNUSED
, void *data
)
6914 int *counts
= (int *) data
;
6915 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
6916 counts
[REGNO (x
)]++;
6919 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6920 pseudo doesn't have a replacement. COUNTS[X] is zero if register X
6921 is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6922 Set *SEEN_REPL to true if we see a dead register that does have
6926 is_dead_debug_insn (const_rtx pat
, int *counts
, rtx
*replacements
,
6929 subrtx_iterator::array_type array
;
6930 FOR_EACH_SUBRTX (iter
, array
, pat
, NONCONST
)
6932 const_rtx x
= *iter
;
6933 if (is_dead_reg (x
, counts
))
6935 if (replacements
&& replacements
[REGNO (x
)] != NULL_RTX
)
6944 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6945 Callback for simplify_replace_fn_rtx. */
6948 replace_dead_reg (rtx x
, const_rtx old_rtx ATTRIBUTE_UNUSED
, void *data
)
6950 rtx
*replacements
= (rtx
*) data
;
6953 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6954 && replacements
[REGNO (x
)] != NULL_RTX
)
6956 if (GET_MODE (x
) == GET_MODE (replacements
[REGNO (x
)]))
6957 return replacements
[REGNO (x
)];
6958 return lowpart_subreg (GET_MODE (x
), replacements
[REGNO (x
)],
6959 GET_MODE (replacements
[REGNO (x
)]));
6964 /* Scan all the insns and delete any that are dead; i.e., they store a register
6965 that is never used or they copy a register to itself.
6967 This is used to remove insns made obviously dead by cse, loop or other
6968 optimizations. It improves the heuristics in loop since it won't try to
6969 move dead invariants out of loops or make givs for dead quantities. The
6970 remaining passes of the compilation are also sped up. */
6973 delete_trivially_dead_insns (rtx_insn
*insns
, int nreg
)
6976 rtx_insn
*insn
, *prev
;
6977 rtx
*replacements
= NULL
;
6980 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
6981 /* First count the number of times each register is used. */
6982 if (MAY_HAVE_DEBUG_INSNS
)
6984 counts
= XCNEWVEC (int, nreg
* 3);
6985 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6986 if (DEBUG_INSN_P (insn
))
6987 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
6989 else if (INSN_P (insn
))
6991 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
6992 note_stores (PATTERN (insn
), count_stores
, counts
+ nreg
* 2);
6994 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
6995 First one counts how many times each pseudo is used outside
6996 of debug insns, second counts how many times each pseudo is
6997 used in debug insns and third counts how many times a pseudo
7002 counts
= XCNEWVEC (int, nreg
);
7003 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7005 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7006 /* If no debug insns can be present, COUNTS is just an array
7007 which counts how many times each pseudo is used. */
7009 /* Pseudo PIC register should be considered as used due to possible
7010 new usages generated. */
7011 if (!reload_completed
7012 && pic_offset_table_rtx
7013 && REGNO (pic_offset_table_rtx
) >= FIRST_PSEUDO_REGISTER
)
7014 counts
[REGNO (pic_offset_table_rtx
)]++;
7015 /* Go from the last insn to the first and delete insns that only set unused
7016 registers or copy a register to itself. As we delete an insn, remove
7017 usage counts for registers it uses.
7019 The first jump optimization pass may leave a real insn as the last
7020 insn in the function. We must not skip that insn or we may end
7021 up deleting code that is not really dead.
7023 If some otherwise unused register is only used in DEBUG_INSNs,
7024 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7025 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7026 has been created for the unused register, replace it with
7027 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
7028 for (insn
= get_last_insn (); insn
; insn
= prev
)
7032 prev
= PREV_INSN (insn
);
7036 live_insn
= insn_live_p (insn
, counts
);
7038 /* If this is a dead insn, delete it and show registers in it aren't
7041 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
7043 if (DEBUG_INSN_P (insn
))
7044 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7049 if (MAY_HAVE_DEBUG_INSNS
7050 && (set
= single_set (insn
)) != NULL_RTX
7051 && is_dead_reg (SET_DEST (set
), counts
)
7052 /* Used at least once in some DEBUG_INSN. */
7053 && counts
[REGNO (SET_DEST (set
)) + nreg
] > 0
7054 /* And set exactly once. */
7055 && counts
[REGNO (SET_DEST (set
)) + nreg
* 2] == 1
7056 && !side_effects_p (SET_SRC (set
))
7057 && asm_noperands (PATTERN (insn
)) < 0)
7059 rtx dval
, bind_var_loc
;
7062 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7063 dval
= make_debug_expr_from_rtl (SET_DEST (set
));
7065 /* Emit a debug bind insn before the insn in which
7068 gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set
)),
7069 DEBUG_EXPR_TREE_DECL (dval
),
7071 VAR_INIT_STATUS_INITIALIZED
);
7072 count_reg_usage (bind_var_loc
, counts
+ nreg
, NULL_RTX
, 1);
7074 bind
= emit_debug_insn_before (bind_var_loc
, insn
);
7075 df_insn_rescan (bind
);
7077 if (replacements
== NULL
)
7078 replacements
= XCNEWVEC (rtx
, nreg
);
7079 replacements
[REGNO (SET_DEST (set
))] = dval
;
7082 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
7085 delete_insn_and_edges (insn
);
7089 if (MAY_HAVE_DEBUG_INSNS
)
7091 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
7092 if (DEBUG_INSN_P (insn
))
7094 /* If this debug insn references a dead register that wasn't replaced
7095 with an DEBUG_EXPR, reset the DEBUG_INSN. */
7096 bool seen_repl
= false;
7097 if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn
),
7098 counts
, replacements
, &seen_repl
))
7100 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
7101 df_insn_rescan (insn
);
7105 INSN_VAR_LOCATION_LOC (insn
)
7106 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn
),
7107 NULL_RTX
, replace_dead_reg
,
7109 df_insn_rescan (insn
);
7112 free (replacements
);
7115 if (dump_file
&& ndead
)
7116 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
7120 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
7124 /* If LOC contains references to NEWREG in a different mode, change them
7125 to use NEWREG instead. */
7128 cse_change_cc_mode (subrtx_ptr_iterator::array_type
&array
,
7129 rtx
*loc
, rtx insn
, rtx newreg
)
7131 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
7137 && REGNO (x
) == REGNO (newreg
)
7138 && GET_MODE (x
) != GET_MODE (newreg
))
7140 validate_change (insn
, loc
, newreg
, 1);
7141 iter
.skip_subrtxes ();
7146 /* Change the mode of any reference to the register REGNO (NEWREG) to
7147 GET_MODE (NEWREG) in INSN. */
7150 cse_change_cc_mode_insn (rtx_insn
*insn
, rtx newreg
)
7157 subrtx_ptr_iterator::array_type array
;
7158 cse_change_cc_mode (array
, &PATTERN (insn
), insn
, newreg
);
7159 cse_change_cc_mode (array
, ®_NOTES (insn
), insn
, newreg
);
7161 /* If the following assertion was triggered, there is most probably
7162 something wrong with the cc_modes_compatible back end function.
7163 CC modes only can be considered compatible if the insn - with the mode
7164 replaced by any of the compatible modes - can still be recognized. */
7165 success
= apply_change_group ();
7166 gcc_assert (success
);
7169 /* Change the mode of any reference to the register REGNO (NEWREG) to
7170 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7171 any instruction which modifies NEWREG. */
7174 cse_change_cc_mode_insns (rtx_insn
*start
, rtx_insn
*end
, rtx newreg
)
7178 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
7180 if (! INSN_P (insn
))
7183 if (reg_set_p (newreg
, insn
))
7186 cse_change_cc_mode_insn (insn
, newreg
);
7190 /* BB is a basic block which finishes with CC_REG as a condition code
7191 register which is set to CC_SRC. Look through the successors of BB
7192 to find blocks which have a single predecessor (i.e., this one),
7193 and look through those blocks for an assignment to CC_REG which is
7194 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7195 permitted to change the mode of CC_SRC to a compatible mode. This
7196 returns VOIDmode if no equivalent assignments were found.
7197 Otherwise it returns the mode which CC_SRC should wind up with.
7198 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7199 but is passed unmodified down to recursive calls in order to prevent
7202 The main complexity in this function is handling the mode issues.
7203 We may have more than one duplicate which we can eliminate, and we
7204 try to find a mode which will work for multiple duplicates. */
7207 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
7208 bool can_change_mode
)
7212 unsigned int insn_count
;
7215 machine_mode modes
[2];
7216 rtx_insn
*last_insns
[2];
7221 /* We expect to have two successors. Look at both before picking
7222 the final mode for the comparison. If we have more successors
7223 (i.e., some sort of table jump, although that seems unlikely),
7224 then we require all beyond the first two to use the same
7227 found_equiv
= false;
7228 mode
= GET_MODE (cc_src
);
7230 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7235 if (e
->flags
& EDGE_COMPLEX
)
7238 if (EDGE_COUNT (e
->dest
->preds
) != 1
7239 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
7240 /* Avoid endless recursion on unreachable blocks. */
7241 || e
->dest
== orig_bb
)
7244 end
= NEXT_INSN (BB_END (e
->dest
));
7245 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
7249 if (! INSN_P (insn
))
7252 /* If CC_SRC is modified, we have to stop looking for
7253 something which uses it. */
7254 if (modified_in_p (cc_src
, insn
))
7257 /* Check whether INSN sets CC_REG to CC_SRC. */
7258 set
= single_set (insn
);
7260 && REG_P (SET_DEST (set
))
7261 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7264 machine_mode set_mode
;
7265 machine_mode comp_mode
;
7268 set_mode
= GET_MODE (SET_SRC (set
));
7269 comp_mode
= set_mode
;
7270 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
7272 else if (GET_CODE (cc_src
) == COMPARE
7273 && GET_CODE (SET_SRC (set
)) == COMPARE
7275 && rtx_equal_p (XEXP (cc_src
, 0),
7276 XEXP (SET_SRC (set
), 0))
7277 && rtx_equal_p (XEXP (cc_src
, 1),
7278 XEXP (SET_SRC (set
), 1)))
7281 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
7282 if (comp_mode
!= VOIDmode
7283 && (can_change_mode
|| comp_mode
== mode
))
7290 if (insn_count
< ARRAY_SIZE (insns
))
7292 insns
[insn_count
] = insn
;
7293 modes
[insn_count
] = set_mode
;
7294 last_insns
[insn_count
] = end
;
7297 if (mode
!= comp_mode
)
7299 gcc_assert (can_change_mode
);
7302 /* The modified insn will be re-recognized later. */
7303 PUT_MODE (cc_src
, mode
);
7308 if (set_mode
!= mode
)
7310 /* We found a matching expression in the
7311 wrong mode, but we don't have room to
7312 store it in the array. Punt. This case
7316 /* INSN sets CC_REG to a value equal to CC_SRC
7317 with the right mode. We can simply delete
7322 /* We found an instruction to delete. Keep looking,
7323 in the hopes of finding a three-way jump. */
7327 /* We found an instruction which sets the condition
7328 code, so don't look any farther. */
7332 /* If INSN sets CC_REG in some other way, don't look any
7334 if (reg_set_p (cc_reg
, insn
))
7338 /* If we fell off the bottom of the block, we can keep looking
7339 through successors. We pass CAN_CHANGE_MODE as false because
7340 we aren't prepared to handle compatibility between the
7341 further blocks and this block. */
7344 machine_mode submode
;
7346 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
7347 if (submode
!= VOIDmode
)
7349 gcc_assert (submode
== mode
);
7351 can_change_mode
= false;
7359 /* Now INSN_COUNT is the number of instructions we found which set
7360 CC_REG to a value equivalent to CC_SRC. The instructions are in
7361 INSNS. The modes used by those instructions are in MODES. */
7364 for (i
= 0; i
< insn_count
; ++i
)
7366 if (modes
[i
] != mode
)
7368 /* We need to change the mode of CC_REG in INSNS[i] and
7369 subsequent instructions. */
7372 if (GET_MODE (cc_reg
) == mode
)
7375 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7377 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
7381 delete_insn_and_edges (insns
[i
]);
7387 /* If we have a fixed condition code register (or two), walk through
7388 the instructions and try to eliminate duplicate assignments. */
7391 cse_condition_code_reg (void)
7393 unsigned int cc_regno_1
;
7394 unsigned int cc_regno_2
;
7399 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
7402 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
7403 if (cc_regno_2
!= INVALID_REGNUM
)
7404 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
7406 cc_reg_2
= NULL_RTX
;
7408 FOR_EACH_BB_FN (bb
, cfun
)
7410 rtx_insn
*last_insn
;
7413 rtx_insn
*cc_src_insn
;
7416 machine_mode orig_mode
;
7418 /* Look for blocks which end with a conditional jump based on a
7419 condition code register. Then look for the instruction which
7420 sets the condition code register. Then look through the
7421 successor blocks for instructions which set the condition
7422 code register to the same value. There are other possible
7423 uses of the condition code register, but these are by far the
7424 most common and the ones which we are most likely to be able
7427 last_insn
= BB_END (bb
);
7428 if (!JUMP_P (last_insn
))
7431 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
7433 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
7440 for (insn
= PREV_INSN (last_insn
);
7441 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
7442 insn
= PREV_INSN (insn
))
7446 if (! INSN_P (insn
))
7448 set
= single_set (insn
);
7450 && REG_P (SET_DEST (set
))
7451 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7454 cc_src
= SET_SRC (set
);
7457 else if (reg_set_p (cc_reg
, insn
))
7464 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
7467 /* Now CC_REG is a condition code register used for a
7468 conditional jump at the end of the block, and CC_SRC, in
7469 CC_SRC_INSN, is the value to which that condition code
7470 register is set, and CC_SRC is still meaningful at the end of
7473 orig_mode
= GET_MODE (cc_src
);
7474 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
7475 if (mode
!= VOIDmode
)
7477 gcc_assert (mode
== GET_MODE (cc_src
));
7478 if (mode
!= orig_mode
)
7480 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7482 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
7484 /* Do the same in the following insns that use the
7485 current value of CC_REG within BB. */
7486 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
7487 NEXT_INSN (last_insn
),
7495 /* Perform common subexpression elimination. Nonzero value from
7496 `cse_main' means that jumps were simplified and some code may now
7497 be unreachable, so do jump optimization again. */
7499 rest_of_handle_cse (void)
7504 dump_flow_info (dump_file
, dump_flags
);
7506 tem
= cse_main (get_insns (), max_reg_num ());
7508 /* If we are not running more CSE passes, then we are no longer
7509 expecting CSE to be run. But always rerun it in a cheap mode. */
7510 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
7514 timevar_push (TV_JUMP
);
7515 rebuild_jump_labels (get_insns ());
7516 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7517 timevar_pop (TV_JUMP
);
7519 else if (tem
== 1 || optimize
> 1)
7527 const pass_data pass_data_cse
=
7529 RTL_PASS
, /* type */
7531 OPTGROUP_NONE
, /* optinfo_flags */
7533 0, /* properties_required */
7534 0, /* properties_provided */
7535 0, /* properties_destroyed */
7536 0, /* todo_flags_start */
7537 TODO_df_finish
, /* todo_flags_finish */
7540 class pass_cse
: public rtl_opt_pass
7543 pass_cse (gcc::context
*ctxt
)
7544 : rtl_opt_pass (pass_data_cse
, ctxt
)
7547 /* opt_pass methods: */
7548 virtual bool gate (function
*) { return optimize
> 0; }
7549 virtual unsigned int execute (function
*) { return rest_of_handle_cse (); }
7551 }; // class pass_cse
7556 make_pass_cse (gcc::context
*ctxt
)
7558 return new pass_cse (ctxt
);
7562 /* Run second CSE pass after loop optimizations. */
7564 rest_of_handle_cse2 (void)
7569 dump_flow_info (dump_file
, dump_flags
);
7571 tem
= cse_main (get_insns (), max_reg_num ());
7573 /* Run a pass to eliminate duplicated assignments to condition code
7574 registers. We have to run this after bypass_jumps, because it
7575 makes it harder for that pass to determine whether a jump can be
7577 cse_condition_code_reg ();
7579 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7583 timevar_push (TV_JUMP
);
7584 rebuild_jump_labels (get_insns ());
7585 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7586 timevar_pop (TV_JUMP
);
7591 cse_not_expected
= 1;
7598 const pass_data pass_data_cse2
=
7600 RTL_PASS
, /* type */
7602 OPTGROUP_NONE
, /* optinfo_flags */
7603 TV_CSE2
, /* tv_id */
7604 0, /* properties_required */
7605 0, /* properties_provided */
7606 0, /* properties_destroyed */
7607 0, /* todo_flags_start */
7608 TODO_df_finish
, /* todo_flags_finish */
7611 class pass_cse2
: public rtl_opt_pass
7614 pass_cse2 (gcc::context
*ctxt
)
7615 : rtl_opt_pass (pass_data_cse2
, ctxt
)
7618 /* opt_pass methods: */
7619 virtual bool gate (function
*)
7621 return optimize
> 0 && flag_rerun_cse_after_loop
;
7624 virtual unsigned int execute (function
*) { return rest_of_handle_cse2 (); }
7626 }; // class pass_cse2
7631 make_pass_cse2 (gcc::context
*ctxt
)
7633 return new pass_cse2 (ctxt
);
7636 /* Run second CSE pass after loop optimizations. */
7638 rest_of_handle_cse_after_global_opts (void)
7643 /* We only want to do local CSE, so don't follow jumps. */
7644 save_cfj
= flag_cse_follow_jumps
;
7645 flag_cse_follow_jumps
= 0;
7647 rebuild_jump_labels (get_insns ());
7648 tem
= cse_main (get_insns (), max_reg_num ());
7649 purge_all_dead_edges ();
7650 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7652 cse_not_expected
= !flag_rerun_cse_after_loop
;
7654 /* If cse altered any jumps, rerun jump opts to clean things up. */
7657 timevar_push (TV_JUMP
);
7658 rebuild_jump_labels (get_insns ());
7659 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7660 timevar_pop (TV_JUMP
);
7665 flag_cse_follow_jumps
= save_cfj
;
7671 const pass_data pass_data_cse_after_global_opts
=
7673 RTL_PASS
, /* type */
7674 "cse_local", /* name */
7675 OPTGROUP_NONE
, /* optinfo_flags */
7677 0, /* properties_required */
7678 0, /* properties_provided */
7679 0, /* properties_destroyed */
7680 0, /* todo_flags_start */
7681 TODO_df_finish
, /* todo_flags_finish */
7684 class pass_cse_after_global_opts
: public rtl_opt_pass
7687 pass_cse_after_global_opts (gcc::context
*ctxt
)
7688 : rtl_opt_pass (pass_data_cse_after_global_opts
, ctxt
)
7691 /* opt_pass methods: */
7692 virtual bool gate (function
*)
7694 return optimize
> 0 && flag_rerun_cse_after_global_opts
;
7697 virtual unsigned int execute (function
*)
7699 return rest_of_handle_cse_after_global_opts ();
7702 }; // class pass_cse_after_global_opts
7707 make_pass_cse_after_global_opts (gcc::context
*ctxt
)
7709 return new pass_cse_after_global_opts (ctxt
);