1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "hard-reg-set.h"
35 #include "dominance.h"
39 #include "cfgcleanup.h"
40 #include "basic-block.h"
42 #include "insn-config.h"
45 #include "statistics.h"
46 #include "double-int.h"
48 #include "fixed-value.h"
61 #include "diagnostic-core.h"
67 #include "rtlhooks-def.h"
68 #include "tree-pass.h"
73 /* The basic idea of common subexpression elimination is to go
74 through the code, keeping a record of expressions that would
75 have the same value at the current scan point, and replacing
76 expressions encountered with the cheapest equivalent expression.
78 It is too complicated to keep track of the different possibilities
79 when control paths merge in this code; so, at each label, we forget all
80 that is known and start fresh. This can be described as processing each
81 extended basic block separately. We have a separate pass to perform
84 Note CSE can turn a conditional or computed jump into a nop or
85 an unconditional jump. When this occurs we arrange to run the jump
86 optimizer after CSE to delete the unreachable code.
88 We use two data structures to record the equivalent expressions:
89 a hash table for most expressions, and a vector of "quantity
90 numbers" to record equivalent (pseudo) registers.
92 The use of the special data structure for registers is desirable
93 because it is faster. It is possible because registers references
94 contain a fairly small number, the register number, taken from
95 a contiguously allocated series, and two register references are
96 identical if they have the same number. General expressions
97 do not have any such thing, so the only way to retrieve the
98 information recorded on an expression other than a register
99 is to keep it in a hash table.
101 Registers and "quantity numbers":
103 At the start of each basic block, all of the (hardware and pseudo)
104 registers used in the function are given distinct quantity
105 numbers to indicate their contents. During scan, when the code
106 copies one register into another, we copy the quantity number.
107 When a register is loaded in any other way, we allocate a new
108 quantity number to describe the value generated by this operation.
109 `REG_QTY (N)' records what quantity register N is currently thought
112 All real quantity numbers are greater than or equal to zero.
113 If register N has not been assigned a quantity, `REG_QTY (N)' will
114 equal -N - 1, which is always negative.
116 Quantity numbers below zero do not exist and none of the `qty_table'
117 entries should be referenced with a negative index.
119 We also maintain a bidirectional chain of registers for each
120 quantity number. The `qty_table` members `first_reg' and `last_reg',
121 and `reg_eqv_table' members `next' and `prev' hold these chains.
123 The first register in a chain is the one whose lifespan is least local.
124 Among equals, it is the one that was seen first.
125 We replace any equivalent register with that one.
127 If two registers have the same quantity number, it must be true that
128 REG expressions with qty_table `mode' must be in the hash table for both
129 registers and must be in the same class.
131 The converse is not true. Since hard registers may be referenced in
132 any mode, two REG expressions might be equivalent in the hash table
133 but not have the same quantity number if the quantity number of one
134 of the registers is not the same mode as those expressions.
136 Constants and quantity numbers
138 When a quantity has a known constant value, that value is stored
139 in the appropriate qty_table `const_rtx'. This is in addition to
140 putting the constant in the hash table as is usual for non-regs.
142 Whether a reg or a constant is preferred is determined by the configuration
143 macro CONST_COSTS and will often depend on the constant value. In any
144 event, expressions containing constants can be simplified, by fold_rtx.
146 When a quantity has a known nearly constant value (such as an address
147 of a stack slot), that value is stored in the appropriate qty_table
150 Integer constants don't have a machine mode. However, cse
151 determines the intended machine mode from the destination
152 of the instruction that moves the constant. The machine mode
153 is recorded in the hash table along with the actual RTL
154 constant expression so that different modes are kept separate.
158 To record known equivalences among expressions in general
159 we use a hash table called `table'. It has a fixed number of buckets
160 that contain chains of `struct table_elt' elements for expressions.
161 These chains connect the elements whose expressions have the same
164 Other chains through the same elements connect the elements which
165 currently have equivalent values.
167 Register references in an expression are canonicalized before hashing
168 the expression. This is done using `reg_qty' and qty_table `first_reg'.
169 The hash code of a register reference is computed using the quantity
170 number, not the register number.
172 When the value of an expression changes, it is necessary to remove from the
173 hash table not just that expression but all expressions whose values
174 could be different as a result.
176 1. If the value changing is in memory, except in special cases
177 ANYTHING referring to memory could be changed. That is because
178 nobody knows where a pointer does not point.
179 The function `invalidate_memory' removes what is necessary.
181 The special cases are when the address is constant or is
182 a constant plus a fixed register such as the frame pointer
183 or a static chain pointer. When such addresses are stored in,
184 we can tell exactly which other such addresses must be invalidated
185 due to overlap. `invalidate' does this.
186 All expressions that refer to non-constant
187 memory addresses are also invalidated. `invalidate_memory' does this.
189 2. If the value changing is a register, all expressions
190 containing references to that register, and only those,
193 Because searching the entire hash table for expressions that contain
194 a register is very slow, we try to figure out when it isn't necessary.
195 Precisely, this is necessary only when expressions have been
196 entered in the hash table using this register, and then the value has
197 changed, and then another expression wants to be added to refer to
198 the register's new value. This sequence of circumstances is rare
199 within any one basic block.
201 `REG_TICK' and `REG_IN_TABLE', accessors for members of
202 cse_reg_info, are used to detect this case. REG_TICK (i) is
203 incremented whenever a value is stored in register i.
204 REG_IN_TABLE (i) holds -1 if no references to register i have been
205 entered in the table; otherwise, it contains the value REG_TICK (i)
206 had when the references were entered. If we want to enter a
207 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
208 remove old references. Until we want to enter a new entry, the
209 mere fact that the two vectors don't match makes the entries be
210 ignored if anyone tries to match them.
212 Registers themselves are entered in the hash table as well as in
213 the equivalent-register chains. However, `REG_TICK' and
214 `REG_IN_TABLE' do not apply to expressions which are simple
215 register references. These expressions are removed from the table
216 immediately when they become invalid, and this can be done even if
217 we do not immediately search for all the expressions that refer to
220 A CLOBBER rtx in an instruction invalidates its operand for further
221 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
222 invalidates everything that resides in memory.
226 Constant expressions that differ only by an additive integer
227 are called related. When a constant expression is put in
228 the table, the related expression with no constant term
229 is also entered. These are made to point at each other
230 so that it is possible to find out if there exists any
231 register equivalent to an expression related to a given expression. */
233 /* Length of qty_table vector. We know in advance we will not need
234 a quantity number this big. */
238 /* Next quantity number to be allocated.
239 This is 1 + the largest number needed so far. */
243 /* Per-qty information tracking.
245 `first_reg' and `last_reg' track the head and tail of the
246 chain of registers which currently contain this quantity.
248 `mode' contains the machine mode of this quantity.
250 `const_rtx' holds the rtx of the constant value of this
251 quantity, if known. A summations of the frame/arg pointer
252 and a constant can also be entered here. When this holds
253 a known value, `const_insn' is the insn which stored the
256 `comparison_{code,const,qty}' are used to track when a
257 comparison between a quantity and some constant or register has
258 been passed. In such a case, we know the results of the comparison
259 in case we see it again. These members record a comparison that
260 is known to be true. `comparison_code' holds the rtx code of such
261 a comparison, else it is set to UNKNOWN and the other two
262 comparison members are undefined. `comparison_const' holds
263 the constant being compared against, or zero if the comparison
264 is not against a constant. `comparison_qty' holds the quantity
265 being compared against when the result is known. If the comparison
266 is not with a register, `comparison_qty' is -1. */
268 struct qty_table_elem
271 rtx_insn
*const_insn
;
272 rtx comparison_const
;
274 unsigned int first_reg
, last_reg
;
275 /* The sizes of these fields should match the sizes of the
276 code and mode fields of struct rtx_def (see rtl.h). */
277 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
278 ENUM_BITFIELD(machine_mode
) mode
: 8;
281 /* The table of all qtys, indexed by qty number. */
282 static struct qty_table_elem
*qty_table
;
285 /* For machines that have a CC0, we do not record its value in the hash
286 table since its use is guaranteed to be the insn immediately following
287 its definition and any other insn is presumed to invalidate it.
289 Instead, we store below the current and last value assigned to CC0.
290 If it should happen to be a constant, it is stored in preference
291 to the actual assigned value. In case it is a constant, we store
292 the mode in which the constant should be interpreted. */
294 static rtx this_insn_cc0
, prev_insn_cc0
;
295 static machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
298 /* Insn being scanned. */
300 static rtx_insn
*this_insn
;
301 static bool optimize_this_for_speed_p
;
303 /* Index by register number, gives the number of the next (or
304 previous) register in the chain of registers sharing the same
307 Or -1 if this register is at the end of the chain.
309 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
311 /* Per-register equivalence chain. */
317 /* The table of all register equivalence chains. */
318 static struct reg_eqv_elem
*reg_eqv_table
;
322 /* The timestamp at which this register is initialized. */
323 unsigned int timestamp
;
325 /* The quantity number of the register's current contents. */
328 /* The number of times the register has been altered in the current
332 /* The REG_TICK value at which rtx's containing this register are
333 valid in the hash table. If this does not equal the current
334 reg_tick value, such expressions existing in the hash table are
338 /* The SUBREG that was set when REG_TICK was last incremented. Set
339 to -1 if the last store was to the whole register, not a subreg. */
340 unsigned int subreg_ticked
;
343 /* A table of cse_reg_info indexed by register numbers. */
344 static struct cse_reg_info
*cse_reg_info_table
;
346 /* The size of the above table. */
347 static unsigned int cse_reg_info_table_size
;
349 /* The index of the first entry that has not been initialized. */
350 static unsigned int cse_reg_info_table_first_uninitialized
;
352 /* The timestamp at the beginning of the current run of
353 cse_extended_basic_block. We increment this variable at the beginning of
354 the current run of cse_extended_basic_block. The timestamp field of a
355 cse_reg_info entry matches the value of this variable if and only
356 if the entry has been initialized during the current run of
357 cse_extended_basic_block. */
358 static unsigned int cse_reg_info_timestamp
;
360 /* A HARD_REG_SET containing all the hard registers for which there is
361 currently a REG expression in the hash table. Note the difference
362 from the above variables, which indicate if the REG is mentioned in some
363 expression in the table. */
365 static HARD_REG_SET hard_regs_in_table
;
367 /* True if CSE has altered the CFG. */
368 static bool cse_cfg_altered
;
370 /* True if CSE has altered conditional jump insns in such a way
371 that jump optimization should be redone. */
372 static bool cse_jumps_altered
;
374 /* True if we put a LABEL_REF into the hash table for an INSN
375 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
376 to put in the note. */
377 static bool recorded_label_ref
;
379 /* canon_hash stores 1 in do_not_record
380 if it notices a reference to CC0, PC, or some other volatile
383 static int do_not_record
;
385 /* canon_hash stores 1 in hash_arg_in_memory
386 if it notices a reference to memory within the expression being hashed. */
388 static int hash_arg_in_memory
;
390 /* The hash table contains buckets which are chains of `struct table_elt's,
391 each recording one expression's information.
392 That expression is in the `exp' field.
394 The canon_exp field contains a canonical (from the point of view of
395 alias analysis) version of the `exp' field.
397 Those elements with the same hash code are chained in both directions
398 through the `next_same_hash' and `prev_same_hash' fields.
400 Each set of expressions with equivalent values
401 are on a two-way chain through the `next_same_value'
402 and `prev_same_value' fields, and all point with
403 the `first_same_value' field at the first element in
404 that chain. The chain is in order of increasing cost.
405 Each element's cost value is in its `cost' field.
407 The `in_memory' field is nonzero for elements that
408 involve any reference to memory. These elements are removed
409 whenever a write is done to an unidentified location in memory.
410 To be safe, we assume that a memory address is unidentified unless
411 the address is either a symbol constant or a constant plus
412 the frame pointer or argument pointer.
414 The `related_value' field is used to connect related expressions
415 (that differ by adding an integer).
416 The related expressions are chained in a circular fashion.
417 `related_value' is zero for expressions for which this
420 The `cost' field stores the cost of this element's expression.
421 The `regcost' field stores the value returned by approx_reg_cost for
422 this element's expression.
424 The `is_const' flag is set if the element is a constant (including
427 The `flag' field is used as a temporary during some search routines.
429 The `mode' field is usually the same as GET_MODE (`exp'), but
430 if `exp' is a CONST_INT and has no machine mode then the `mode'
431 field is the mode it was being used as. Each constant is
432 recorded separately for each mode it is used with. */
438 struct table_elt
*next_same_hash
;
439 struct table_elt
*prev_same_hash
;
440 struct table_elt
*next_same_value
;
441 struct table_elt
*prev_same_value
;
442 struct table_elt
*first_same_value
;
443 struct table_elt
*related_value
;
446 /* The size of this field should match the size
447 of the mode field of struct rtx_def (see rtl.h). */
448 ENUM_BITFIELD(machine_mode
) mode
: 8;
454 /* We don't want a lot of buckets, because we rarely have very many
455 things stored in the hash table, and a lot of buckets slows
456 down a lot of loops that happen frequently. */
458 #define HASH_SIZE (1 << HASH_SHIFT)
459 #define HASH_MASK (HASH_SIZE - 1)
461 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
462 register (hard registers may require `do_not_record' to be set). */
465 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
466 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
467 : canon_hash (X, M)) & HASH_MASK)
469 /* Like HASH, but without side-effects. */
470 #define SAFE_HASH(X, M) \
471 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
472 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
473 : safe_hash (X, M)) & HASH_MASK)
475 /* Determine whether register number N is considered a fixed register for the
476 purpose of approximating register costs.
477 It is desirable to replace other regs with fixed regs, to reduce need for
479 A reg wins if it is either the frame pointer or designated as fixed. */
480 #define FIXED_REGNO_P(N) \
481 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
482 || fixed_regs[N] || global_regs[N])
484 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
485 hard registers and pointers into the frame are the cheapest with a cost
486 of 0. Next come pseudos with a cost of one and other hard registers with
487 a cost of 2. Aside from these special cases, call `rtx_cost'. */
489 #define CHEAP_REGNO(N) \
490 (REGNO_PTR_FRAME_P (N) \
491 || (HARD_REGISTER_NUM_P (N) \
492 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
494 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
495 #define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
497 /* Get the number of times this register has been updated in this
500 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
502 /* Get the point at which REG was recorded in the table. */
504 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
506 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
509 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
511 /* Get the quantity number for REG. */
513 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
515 /* Determine if the quantity number for register X represents a valid index
516 into the qty_table. */
518 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
520 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
522 #define CHEAPER(X, Y) \
523 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
525 static struct table_elt
*table
[HASH_SIZE
];
527 /* Chain of `struct table_elt's made so far for this function
528 but currently removed from the table. */
530 static struct table_elt
*free_element_chain
;
532 /* Set to the cost of a constant pool reference if one was found for a
533 symbolic constant. If this was found, it means we should try to
534 convert constants into constant pool entries if they don't fit in
537 static int constant_pool_entries_cost
;
538 static int constant_pool_entries_regcost
;
540 /* Trace a patch through the CFG. */
544 /* The basic block for this path entry. */
548 /* This data describes a block that will be processed by
549 cse_extended_basic_block. */
551 struct cse_basic_block_data
553 /* Total number of SETs in block. */
555 /* Size of current branch path, if any. */
557 /* Current path, indicating which basic_blocks will be processed. */
558 struct branch_path
*path
;
562 /* Pointers to the live in/live out bitmaps for the boundaries of the
564 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
566 /* A simple bitmap to track which basic blocks have been visited
567 already as part of an already processed extended basic block. */
568 static sbitmap cse_visited_basic_blocks
;
570 static bool fixed_base_plus_p (rtx x
);
571 static int notreg_cost (rtx
, enum rtx_code
, int);
572 static int preferable (int, int, int, int);
573 static void new_basic_block (void);
574 static void make_new_qty (unsigned int, machine_mode
);
575 static void make_regs_eqv (unsigned int, unsigned int);
576 static void delete_reg_equiv (unsigned int);
577 static int mention_regs (rtx
);
578 static int insert_regs (rtx
, struct table_elt
*, int);
579 static void remove_from_table (struct table_elt
*, unsigned);
580 static void remove_pseudo_from_table (rtx
, unsigned);
581 static struct table_elt
*lookup (rtx
, unsigned, machine_mode
);
582 static struct table_elt
*lookup_for_remove (rtx
, unsigned, machine_mode
);
583 static rtx
lookup_as_function (rtx
, enum rtx_code
);
584 static struct table_elt
*insert_with_costs (rtx
, struct table_elt
*, unsigned,
585 machine_mode
, int, int);
586 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
588 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
589 static void invalidate (rtx
, machine_mode
);
590 static void remove_invalid_refs (unsigned int);
591 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
593 static void rehash_using_reg (rtx
);
594 static void invalidate_memory (void);
595 static void invalidate_for_call (void);
596 static rtx
use_related_value (rtx
, struct table_elt
*);
598 static inline unsigned canon_hash (rtx
, machine_mode
);
599 static inline unsigned safe_hash (rtx
, machine_mode
);
600 static inline unsigned hash_rtx_string (const char *);
602 static rtx
canon_reg (rtx
, rtx_insn
*);
603 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
606 static rtx
fold_rtx (rtx
, rtx_insn
*);
607 static rtx
equiv_constant (rtx
);
608 static void record_jump_equiv (rtx_insn
*, bool);
609 static void record_jump_cond (enum rtx_code
, machine_mode
, rtx
, rtx
,
611 static void cse_insn (rtx_insn
*);
612 static void cse_prescan_path (struct cse_basic_block_data
*);
613 static void invalidate_from_clobbers (rtx_insn
*);
614 static void invalidate_from_sets_and_clobbers (rtx_insn
*);
615 static rtx
cse_process_notes (rtx
, rtx
, bool *);
616 static void cse_extended_basic_block (struct cse_basic_block_data
*);
617 extern void dump_class (struct table_elt
*);
618 static void get_cse_reg_info_1 (unsigned int regno
);
619 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
621 static void flush_hash_table (void);
622 static bool insn_live_p (rtx_insn
*, int *);
623 static bool set_live_p (rtx
, rtx_insn
*, int *);
624 static void cse_change_cc_mode_insn (rtx_insn
*, rtx
);
625 static void cse_change_cc_mode_insns (rtx_insn
*, rtx_insn
*, rtx
);
626 static machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
630 #undef RTL_HOOKS_GEN_LOWPART
631 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
633 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
635 /* Nonzero if X has the form (PLUS frame-pointer integer). */
638 fixed_base_plus_p (rtx x
)
640 switch (GET_CODE (x
))
643 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
645 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
650 if (!CONST_INT_P (XEXP (x
, 1)))
652 return fixed_base_plus_p (XEXP (x
, 0));
659 /* Dump the expressions in the equivalence class indicated by CLASSP.
660 This function is used only for debugging. */
662 dump_class (struct table_elt
*classp
)
664 struct table_elt
*elt
;
666 fprintf (stderr
, "Equivalence chain for ");
667 print_rtl (stderr
, classp
->exp
);
668 fprintf (stderr
, ": \n");
670 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
672 print_rtl (stderr
, elt
->exp
);
673 fprintf (stderr
, "\n");
677 /* Return an estimate of the cost of the registers used in an rtx.
678 This is mostly the number of different REG expressions in the rtx;
679 however for some exceptions like fixed registers we use a cost of
680 0. If any other hard register reference occurs, return MAX_COST. */
683 approx_reg_cost (const_rtx x
)
686 subrtx_iterator::array_type array
;
687 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
692 unsigned int regno
= REGNO (x
);
693 if (!CHEAP_REGNO (regno
))
695 if (regno
< FIRST_PSEUDO_REGISTER
)
697 if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
709 /* Return a negative value if an rtx A, whose costs are given by COST_A
710 and REGCOST_A, is more desirable than an rtx B.
711 Return a positive value if A is less desirable, or 0 if the two are
714 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
716 /* First, get rid of cases involving expressions that are entirely
718 if (cost_a
!= cost_b
)
720 if (cost_a
== MAX_COST
)
722 if (cost_b
== MAX_COST
)
726 /* Avoid extending lifetimes of hardregs. */
727 if (regcost_a
!= regcost_b
)
729 if (regcost_a
== MAX_COST
)
731 if (regcost_b
== MAX_COST
)
735 /* Normal operation costs take precedence. */
736 if (cost_a
!= cost_b
)
737 return cost_a
- cost_b
;
738 /* Only if these are identical consider effects on register pressure. */
739 if (regcost_a
!= regcost_b
)
740 return regcost_a
- regcost_b
;
744 /* Internal function, to compute cost when X is not a register; called
745 from COST macro to keep it simple. */
748 notreg_cost (rtx x
, enum rtx_code outer
, int opno
)
750 return ((GET_CODE (x
) == SUBREG
751 && REG_P (SUBREG_REG (x
))
752 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
753 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_INT
754 && (GET_MODE_SIZE (GET_MODE (x
))
755 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
756 && subreg_lowpart_p (x
)
757 && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x
),
758 GET_MODE (SUBREG_REG (x
))))
760 : rtx_cost (x
, outer
, opno
, optimize_this_for_speed_p
) * 2);
764 /* Initialize CSE_REG_INFO_TABLE. */
767 init_cse_reg_info (unsigned int nregs
)
769 /* Do we need to grow the table? */
770 if (nregs
> cse_reg_info_table_size
)
772 unsigned int new_size
;
774 if (cse_reg_info_table_size
< 2048)
776 /* Compute a new size that is a power of 2 and no smaller
777 than the large of NREGS and 64. */
778 new_size
= (cse_reg_info_table_size
779 ? cse_reg_info_table_size
: 64);
781 while (new_size
< nregs
)
786 /* If we need a big table, allocate just enough to hold
791 /* Reallocate the table with NEW_SIZE entries. */
792 free (cse_reg_info_table
);
793 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
794 cse_reg_info_table_size
= new_size
;
795 cse_reg_info_table_first_uninitialized
= 0;
798 /* Do we have all of the first NREGS entries initialized? */
799 if (cse_reg_info_table_first_uninitialized
< nregs
)
801 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
804 /* Put the old timestamp on newly allocated entries so that they
805 will all be considered out of date. We do not touch those
806 entries beyond the first NREGS entries to be nice to the
808 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
809 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
811 cse_reg_info_table_first_uninitialized
= nregs
;
815 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
818 get_cse_reg_info_1 (unsigned int regno
)
820 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
821 entry will be considered to have been initialized. */
822 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
824 /* Initialize the rest of the entry. */
825 cse_reg_info_table
[regno
].reg_tick
= 1;
826 cse_reg_info_table
[regno
].reg_in_table
= -1;
827 cse_reg_info_table
[regno
].subreg_ticked
= -1;
828 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
831 /* Find a cse_reg_info entry for REGNO. */
833 static inline struct cse_reg_info
*
834 get_cse_reg_info (unsigned int regno
)
836 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
838 /* If this entry has not been initialized, go ahead and initialize
840 if (p
->timestamp
!= cse_reg_info_timestamp
)
841 get_cse_reg_info_1 (regno
);
846 /* Clear the hash table and initialize each register with its own quantity,
847 for a new basic block. */
850 new_basic_block (void)
856 /* Invalidate cse_reg_info_table. */
857 cse_reg_info_timestamp
++;
859 /* Clear out hash table state for this pass. */
860 CLEAR_HARD_REG_SET (hard_regs_in_table
);
862 /* The per-quantity values used to be initialized here, but it is
863 much faster to initialize each as it is made in `make_new_qty'. */
865 for (i
= 0; i
< HASH_SIZE
; i
++)
867 struct table_elt
*first
;
872 struct table_elt
*last
= first
;
876 while (last
->next_same_hash
!= NULL
)
877 last
= last
->next_same_hash
;
879 /* Now relink this hash entire chain into
880 the free element list. */
882 last
->next_same_hash
= free_element_chain
;
883 free_element_chain
= first
;
892 /* Say that register REG contains a quantity in mode MODE not in any
893 register before and initialize that quantity. */
896 make_new_qty (unsigned int reg
, machine_mode mode
)
899 struct qty_table_elem
*ent
;
900 struct reg_eqv_elem
*eqv
;
902 gcc_assert (next_qty
< max_qty
);
904 q
= REG_QTY (reg
) = next_qty
++;
906 ent
->first_reg
= reg
;
909 ent
->const_rtx
= ent
->const_insn
= NULL
;
910 ent
->comparison_code
= UNKNOWN
;
912 eqv
= ®_eqv_table
[reg
];
913 eqv
->next
= eqv
->prev
= -1;
916 /* Make reg NEW equivalent to reg OLD.
917 OLD is not changing; NEW is. */
920 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
922 unsigned int lastr
, firstr
;
923 int q
= REG_QTY (old_reg
);
924 struct qty_table_elem
*ent
;
928 /* Nothing should become eqv until it has a "non-invalid" qty number. */
929 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
931 REG_QTY (new_reg
) = q
;
932 firstr
= ent
->first_reg
;
933 lastr
= ent
->last_reg
;
935 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
936 hard regs. Among pseudos, if NEW will live longer than any other reg
937 of the same qty, and that is beyond the current basic block,
938 make it the new canonical replacement for this qty. */
939 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
940 /* Certain fixed registers might be of the class NO_REGS. This means
941 that not only can they not be allocated by the compiler, but
942 they cannot be used in substitutions or canonicalizations
944 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
945 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
946 || (new_reg
>= FIRST_PSEUDO_REGISTER
947 && (firstr
< FIRST_PSEUDO_REGISTER
948 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
949 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
950 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
951 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
953 reg_eqv_table
[firstr
].prev
= new_reg
;
954 reg_eqv_table
[new_reg
].next
= firstr
;
955 reg_eqv_table
[new_reg
].prev
= -1;
956 ent
->first_reg
= new_reg
;
960 /* If NEW is a hard reg (known to be non-fixed), insert at end.
961 Otherwise, insert before any non-fixed hard regs that are at the
962 end. Registers of class NO_REGS cannot be used as an
963 equivalent for anything. */
964 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
965 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
966 && new_reg
>= FIRST_PSEUDO_REGISTER
)
967 lastr
= reg_eqv_table
[lastr
].prev
;
968 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
969 if (reg_eqv_table
[lastr
].next
>= 0)
970 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
972 qty_table
[q
].last_reg
= new_reg
;
973 reg_eqv_table
[lastr
].next
= new_reg
;
974 reg_eqv_table
[new_reg
].prev
= lastr
;
978 /* Remove REG from its equivalence class. */
981 delete_reg_equiv (unsigned int reg
)
983 struct qty_table_elem
*ent
;
984 int q
= REG_QTY (reg
);
987 /* If invalid, do nothing. */
988 if (! REGNO_QTY_VALID_P (reg
))
993 p
= reg_eqv_table
[reg
].prev
;
994 n
= reg_eqv_table
[reg
].next
;
997 reg_eqv_table
[n
].prev
= p
;
1001 reg_eqv_table
[p
].next
= n
;
1005 REG_QTY (reg
) = -reg
- 1;
1008 /* Remove any invalid expressions from the hash table
1009 that refer to any of the registers contained in expression X.
1011 Make sure that newly inserted references to those registers
1012 as subexpressions will be considered valid.
1014 mention_regs is not called when a register itself
1015 is being stored in the table.
1017 Return 1 if we have done something that may have changed the hash code
1021 mention_regs (rtx x
)
1031 code
= GET_CODE (x
);
1034 unsigned int regno
= REGNO (x
);
1035 unsigned int endregno
= END_REGNO (x
);
1038 for (i
= regno
; i
< endregno
; i
++)
1040 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1041 remove_invalid_refs (i
);
1043 REG_IN_TABLE (i
) = REG_TICK (i
);
1044 SUBREG_TICKED (i
) = -1;
1050 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1051 pseudo if they don't use overlapping words. We handle only pseudos
1052 here for simplicity. */
1053 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1054 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1056 unsigned int i
= REGNO (SUBREG_REG (x
));
1058 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1060 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1061 the last store to this register really stored into this
1062 subreg, then remove the memory of this subreg.
1063 Otherwise, remove any memory of the entire register and
1064 all its subregs from the table. */
1065 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1066 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1067 remove_invalid_refs (i
);
1069 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1072 REG_IN_TABLE (i
) = REG_TICK (i
);
1073 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1077 /* If X is a comparison or a COMPARE and either operand is a register
1078 that does not have a quantity, give it one. This is so that a later
1079 call to record_jump_equiv won't cause X to be assigned a different
1080 hash code and not found in the table after that call.
1082 It is not necessary to do this here, since rehash_using_reg can
1083 fix up the table later, but doing this here eliminates the need to
1084 call that expensive function in the most common case where the only
1085 use of the register is in the comparison. */
1087 if (code
== COMPARE
|| COMPARISON_P (x
))
1089 if (REG_P (XEXP (x
, 0))
1090 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1091 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1093 rehash_using_reg (XEXP (x
, 0));
1097 if (REG_P (XEXP (x
, 1))
1098 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1099 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1101 rehash_using_reg (XEXP (x
, 1));
1106 fmt
= GET_RTX_FORMAT (code
);
1107 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1109 changed
|= mention_regs (XEXP (x
, i
));
1110 else if (fmt
[i
] == 'E')
1111 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1112 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1117 /* Update the register quantities for inserting X into the hash table
1118 with a value equivalent to CLASSP.
1119 (If the class does not contain a REG, it is irrelevant.)
1120 If MODIFIED is nonzero, X is a destination; it is being modified.
1121 Note that delete_reg_equiv should be called on a register
1122 before insert_regs is done on that register with MODIFIED != 0.
1124 Nonzero value means that elements of reg_qty have changed
1125 so X's hash code may be different. */
1128 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1132 unsigned int regno
= REGNO (x
);
1135 /* If REGNO is in the equivalence table already but is of the
1136 wrong mode for that equivalence, don't do anything here. */
1138 qty_valid
= REGNO_QTY_VALID_P (regno
);
1141 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1143 if (ent
->mode
!= GET_MODE (x
))
1147 if (modified
|| ! qty_valid
)
1150 for (classp
= classp
->first_same_value
;
1152 classp
= classp
->next_same_value
)
1153 if (REG_P (classp
->exp
)
1154 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1156 unsigned c_regno
= REGNO (classp
->exp
);
1158 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1160 /* Suppose that 5 is hard reg and 100 and 101 are
1163 (set (reg:si 100) (reg:si 5))
1164 (set (reg:si 5) (reg:si 100))
1165 (set (reg:di 101) (reg:di 5))
1167 We would now set REG_QTY (101) = REG_QTY (5), but the
1168 entry for 5 is in SImode. When we use this later in
1169 copy propagation, we get the register in wrong mode. */
1170 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1173 make_regs_eqv (regno
, c_regno
);
1177 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1178 than REG_IN_TABLE to find out if there was only a single preceding
1179 invalidation - for the SUBREG - or another one, which would be
1180 for the full register. However, if we find here that REG_TICK
1181 indicates that the register is invalid, it means that it has
1182 been invalidated in a separate operation. The SUBREG might be used
1183 now (then this is a recursive call), or we might use the full REG
1184 now and a SUBREG of it later. So bump up REG_TICK so that
1185 mention_regs will do the right thing. */
1187 && REG_IN_TABLE (regno
) >= 0
1188 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1190 make_new_qty (regno
, GET_MODE (x
));
1197 /* If X is a SUBREG, we will likely be inserting the inner register in the
1198 table. If that register doesn't have an assigned quantity number at
1199 this point but does later, the insertion that we will be doing now will
1200 not be accessible because its hash code will have changed. So assign
1201 a quantity number now. */
1203 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1204 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1206 insert_regs (SUBREG_REG (x
), NULL
, 0);
1211 return mention_regs (x
);
1215 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1216 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1217 CST is equal to an anchor. */
1220 compute_const_anchors (rtx cst
,
1221 HOST_WIDE_INT
*lower_base
, HOST_WIDE_INT
*lower_offs
,
1222 HOST_WIDE_INT
*upper_base
, HOST_WIDE_INT
*upper_offs
)
1224 HOST_WIDE_INT n
= INTVAL (cst
);
1226 *lower_base
= n
& ~(targetm
.const_anchor
- 1);
1227 if (*lower_base
== n
)
1231 (n
+ (targetm
.const_anchor
- 1)) & ~(targetm
.const_anchor
- 1);
1232 *upper_offs
= n
- *upper_base
;
1233 *lower_offs
= n
- *lower_base
;
1237 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1240 insert_const_anchor (HOST_WIDE_INT anchor
, rtx reg
, HOST_WIDE_INT offs
,
1243 struct table_elt
*elt
;
1248 anchor_exp
= GEN_INT (anchor
);
1249 hash
= HASH (anchor_exp
, mode
);
1250 elt
= lookup (anchor_exp
, hash
, mode
);
1252 elt
= insert (anchor_exp
, NULL
, hash
, mode
);
1254 exp
= plus_constant (mode
, reg
, offs
);
1255 /* REG has just been inserted and the hash codes recomputed. */
1257 hash
= HASH (exp
, mode
);
1259 /* Use the cost of the register rather than the whole expression. When
1260 looking up constant anchors we will further offset the corresponding
1261 expression therefore it does not make sense to prefer REGs over
1262 reg-immediate additions. Prefer instead the oldest expression. Also
1263 don't prefer pseudos over hard regs so that we derive constants in
1264 argument registers from other argument registers rather than from the
1265 original pseudo that was used to synthesize the constant. */
1266 insert_with_costs (exp
, elt
, hash
, mode
, COST (reg
), 1);
1269 /* The constant CST is equivalent to the register REG. Create
1270 equivalences between the two anchors of CST and the corresponding
1271 register-offset expressions using REG. */
1274 insert_const_anchors (rtx reg
, rtx cst
, machine_mode mode
)
1276 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1278 if (!compute_const_anchors (cst
, &lower_base
, &lower_offs
,
1279 &upper_base
, &upper_offs
))
1282 /* Ignore anchors of value 0. Constants accessible from zero are
1284 if (lower_base
!= 0)
1285 insert_const_anchor (lower_base
, reg
, -lower_offs
, mode
);
1287 if (upper_base
!= 0)
1288 insert_const_anchor (upper_base
, reg
, -upper_offs
, mode
);
1291 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1292 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1293 valid expression. Return the cheapest and oldest of such expressions. In
1294 *OLD, return how old the resulting expression is compared to the other
1295 equivalent expressions. */
1298 find_reg_offset_for_const (struct table_elt
*anchor_elt
, HOST_WIDE_INT offs
,
1301 struct table_elt
*elt
;
1303 struct table_elt
*match_elt
;
1306 /* Find the cheapest and *oldest* expression to maximize the chance of
1307 reusing the same pseudo. */
1311 for (elt
= anchor_elt
->first_same_value
, idx
= 0;
1313 elt
= elt
->next_same_value
, idx
++)
1315 if (match_elt
&& CHEAPER (match_elt
, elt
))
1318 if (REG_P (elt
->exp
)
1319 || (GET_CODE (elt
->exp
) == PLUS
1320 && REG_P (XEXP (elt
->exp
, 0))
1321 && GET_CODE (XEXP (elt
->exp
, 1)) == CONST_INT
))
1325 /* Ignore expressions that are no longer valid. */
1326 if (!REG_P (elt
->exp
) && !exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
1329 x
= plus_constant (GET_MODE (elt
->exp
), elt
->exp
, offs
);
1331 || (GET_CODE (x
) == PLUS
1332 && IN_RANGE (INTVAL (XEXP (x
, 1)),
1333 -targetm
.const_anchor
,
1334 targetm
.const_anchor
- 1)))
1346 /* Try to express the constant SRC_CONST using a register+offset expression
1347 derived from a constant anchor. Return it if successful or NULL_RTX,
1351 try_const_anchors (rtx src_const
, machine_mode mode
)
1353 struct table_elt
*lower_elt
, *upper_elt
;
1354 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1355 rtx lower_anchor_rtx
, upper_anchor_rtx
;
1356 rtx lower_exp
= NULL_RTX
, upper_exp
= NULL_RTX
;
1357 unsigned lower_old
, upper_old
;
1359 /* CONST_INT is used for CC modes, but we should leave those alone. */
1360 if (GET_MODE_CLASS (mode
) == MODE_CC
)
1363 gcc_assert (SCALAR_INT_MODE_P (mode
));
1364 if (!compute_const_anchors (src_const
, &lower_base
, &lower_offs
,
1365 &upper_base
, &upper_offs
))
1368 lower_anchor_rtx
= GEN_INT (lower_base
);
1369 upper_anchor_rtx
= GEN_INT (upper_base
);
1370 lower_elt
= lookup (lower_anchor_rtx
, HASH (lower_anchor_rtx
, mode
), mode
);
1371 upper_elt
= lookup (upper_anchor_rtx
, HASH (upper_anchor_rtx
, mode
), mode
);
1374 lower_exp
= find_reg_offset_for_const (lower_elt
, lower_offs
, &lower_old
);
1376 upper_exp
= find_reg_offset_for_const (upper_elt
, upper_offs
, &upper_old
);
1383 /* Return the older expression. */
1384 return (upper_old
> lower_old
? upper_exp
: lower_exp
);
1387 /* Look in or update the hash table. */
1389 /* Remove table element ELT from use in the table.
1390 HASH is its hash code, made using the HASH macro.
1391 It's an argument because often that is known in advance
1392 and we save much time not recomputing it. */
1395 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1400 /* Mark this element as removed. See cse_insn. */
1401 elt
->first_same_value
= 0;
1403 /* Remove the table element from its equivalence class. */
1406 struct table_elt
*prev
= elt
->prev_same_value
;
1407 struct table_elt
*next
= elt
->next_same_value
;
1410 next
->prev_same_value
= prev
;
1413 prev
->next_same_value
= next
;
1416 struct table_elt
*newfirst
= next
;
1419 next
->first_same_value
= newfirst
;
1420 next
= next
->next_same_value
;
1425 /* Remove the table element from its hash bucket. */
1428 struct table_elt
*prev
= elt
->prev_same_hash
;
1429 struct table_elt
*next
= elt
->next_same_hash
;
1432 next
->prev_same_hash
= prev
;
1435 prev
->next_same_hash
= next
;
1436 else if (table
[hash
] == elt
)
1440 /* This entry is not in the proper hash bucket. This can happen
1441 when two classes were merged by `merge_equiv_classes'. Search
1442 for the hash bucket that it heads. This happens only very
1443 rarely, so the cost is acceptable. */
1444 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1445 if (table
[hash
] == elt
)
1450 /* Remove the table element from its related-value circular chain. */
1452 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1454 struct table_elt
*p
= elt
->related_value
;
1456 while (p
->related_value
!= elt
)
1457 p
= p
->related_value
;
1458 p
->related_value
= elt
->related_value
;
1459 if (p
->related_value
== p
)
1460 p
->related_value
= 0;
1463 /* Now add it to the free element chain. */
1464 elt
->next_same_hash
= free_element_chain
;
1465 free_element_chain
= elt
;
1468 /* Same as above, but X is a pseudo-register. */
1471 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1473 struct table_elt
*elt
;
1475 /* Because a pseudo-register can be referenced in more than one
1476 mode, we might have to remove more than one table entry. */
1477 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1478 remove_from_table (elt
, hash
);
1481 /* Look up X in the hash table and return its table element,
1482 or 0 if X is not in the table.
1484 MODE is the machine-mode of X, or if X is an integer constant
1485 with VOIDmode then MODE is the mode with which X will be used.
1487 Here we are satisfied to find an expression whose tree structure
1490 static struct table_elt
*
1491 lookup (rtx x
, unsigned int hash
, machine_mode mode
)
1493 struct table_elt
*p
;
1495 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1496 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1497 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1503 /* Like `lookup' but don't care whether the table element uses invalid regs.
1504 Also ignore discrepancies in the machine mode of a register. */
1506 static struct table_elt
*
1507 lookup_for_remove (rtx x
, unsigned int hash
, machine_mode mode
)
1509 struct table_elt
*p
;
1513 unsigned int regno
= REGNO (x
);
1515 /* Don't check the machine mode when comparing registers;
1516 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1517 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1519 && REGNO (p
->exp
) == regno
)
1524 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1526 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1533 /* Look for an expression equivalent to X and with code CODE.
1534 If one is found, return that expression. */
1537 lookup_as_function (rtx x
, enum rtx_code code
)
1540 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1545 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1546 if (GET_CODE (p
->exp
) == code
1547 /* Make sure this is a valid entry in the table. */
1548 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1554 /* Insert X in the hash table, assuming HASH is its hash code and
1555 CLASSP is an element of the class it should go in (or 0 if a new
1556 class should be made). COST is the code of X and reg_cost is the
1557 cost of registers in X. It is inserted at the proper position to
1558 keep the class in the order cheapest first.
1560 MODE is the machine-mode of X, or if X is an integer constant
1561 with VOIDmode then MODE is the mode with which X will be used.
1563 For elements of equal cheapness, the most recent one
1564 goes in front, except that the first element in the list
1565 remains first unless a cheaper element is added. The order of
1566 pseudo-registers does not matter, as canon_reg will be called to
1567 find the cheapest when a register is retrieved from the table.
1569 The in_memory field in the hash table element is set to 0.
1570 The caller must set it nonzero if appropriate.
1572 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1573 and if insert_regs returns a nonzero value
1574 you must then recompute its hash code before calling here.
1576 If necessary, update table showing constant values of quantities. */
1578 static struct table_elt
*
1579 insert_with_costs (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1580 machine_mode mode
, int cost
, int reg_cost
)
1582 struct table_elt
*elt
;
1584 /* If X is a register and we haven't made a quantity for it,
1585 something is wrong. */
1586 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1588 /* If X is a hard register, show it is being put in the table. */
1589 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1590 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1592 /* Put an element for X into the right hash bucket. */
1594 elt
= free_element_chain
;
1596 free_element_chain
= elt
->next_same_hash
;
1598 elt
= XNEW (struct table_elt
);
1601 elt
->canon_exp
= NULL_RTX
;
1603 elt
->regcost
= reg_cost
;
1604 elt
->next_same_value
= 0;
1605 elt
->prev_same_value
= 0;
1606 elt
->next_same_hash
= table
[hash
];
1607 elt
->prev_same_hash
= 0;
1608 elt
->related_value
= 0;
1611 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1614 table
[hash
]->prev_same_hash
= elt
;
1617 /* Put it into the proper value-class. */
1620 classp
= classp
->first_same_value
;
1621 if (CHEAPER (elt
, classp
))
1622 /* Insert at the head of the class. */
1624 struct table_elt
*p
;
1625 elt
->next_same_value
= classp
;
1626 classp
->prev_same_value
= elt
;
1627 elt
->first_same_value
= elt
;
1629 for (p
= classp
; p
; p
= p
->next_same_value
)
1630 p
->first_same_value
= elt
;
1634 /* Insert not at head of the class. */
1635 /* Put it after the last element cheaper than X. */
1636 struct table_elt
*p
, *next
;
1639 (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1643 /* Put it after P and before NEXT. */
1644 elt
->next_same_value
= next
;
1646 next
->prev_same_value
= elt
;
1648 elt
->prev_same_value
= p
;
1649 p
->next_same_value
= elt
;
1650 elt
->first_same_value
= classp
;
1654 elt
->first_same_value
= elt
;
1656 /* If this is a constant being set equivalent to a register or a register
1657 being set equivalent to a constant, note the constant equivalence.
1659 If this is a constant, it cannot be equivalent to a different constant,
1660 and a constant is the only thing that can be cheaper than a register. So
1661 we know the register is the head of the class (before the constant was
1664 If this is a register that is not already known equivalent to a
1665 constant, we must check the entire class.
1667 If this is a register that is already known equivalent to an insn,
1668 update the qtys `const_insn' to show that `this_insn' is the latest
1669 insn making that quantity equivalent to the constant. */
1671 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1674 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1675 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1677 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1678 exp_ent
->const_insn
= this_insn
;
1683 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1686 struct table_elt
*p
;
1688 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1690 if (p
->is_const
&& !REG_P (p
->exp
))
1692 int x_q
= REG_QTY (REGNO (x
));
1693 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1696 = gen_lowpart (GET_MODE (x
), p
->exp
);
1697 x_ent
->const_insn
= this_insn
;
1704 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1705 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1706 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1708 /* If this is a constant with symbolic value,
1709 and it has a term with an explicit integer value,
1710 link it up with related expressions. */
1711 if (GET_CODE (x
) == CONST
)
1713 rtx subexp
= get_related_value (x
);
1715 struct table_elt
*subelt
, *subelt_prev
;
1719 /* Get the integer-free subexpression in the hash table. */
1720 subhash
= SAFE_HASH (subexp
, mode
);
1721 subelt
= lookup (subexp
, subhash
, mode
);
1723 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1724 /* Initialize SUBELT's circular chain if it has none. */
1725 if (subelt
->related_value
== 0)
1726 subelt
->related_value
= subelt
;
1727 /* Find the element in the circular chain that precedes SUBELT. */
1728 subelt_prev
= subelt
;
1729 while (subelt_prev
->related_value
!= subelt
)
1730 subelt_prev
= subelt_prev
->related_value
;
1731 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1732 This way the element that follows SUBELT is the oldest one. */
1733 elt
->related_value
= subelt_prev
->related_value
;
1734 subelt_prev
->related_value
= elt
;
1741 /* Wrap insert_with_costs by passing the default costs. */
1743 static struct table_elt
*
1744 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1748 insert_with_costs (x
, classp
, hash
, mode
, COST (x
), approx_reg_cost (x
));
1752 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1753 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1754 the two classes equivalent.
1756 CLASS1 will be the surviving class; CLASS2 should not be used after this
1759 Any invalid entries in CLASS2 will not be copied. */
1762 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1764 struct table_elt
*elt
, *next
, *new_elt
;
1766 /* Ensure we start with the head of the classes. */
1767 class1
= class1
->first_same_value
;
1768 class2
= class2
->first_same_value
;
1770 /* If they were already equal, forget it. */
1771 if (class1
== class2
)
1774 for (elt
= class2
; elt
; elt
= next
)
1778 machine_mode mode
= elt
->mode
;
1780 next
= elt
->next_same_value
;
1782 /* Remove old entry, make a new one in CLASS1's class.
1783 Don't do this for invalid entries as we cannot find their
1784 hash code (it also isn't necessary). */
1785 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1787 bool need_rehash
= false;
1789 hash_arg_in_memory
= 0;
1790 hash
= HASH (exp
, mode
);
1794 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1795 delete_reg_equiv (REGNO (exp
));
1798 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1799 remove_pseudo_from_table (exp
, hash
);
1801 remove_from_table (elt
, hash
);
1803 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1805 rehash_using_reg (exp
);
1806 hash
= HASH (exp
, mode
);
1808 new_elt
= insert (exp
, class1
, hash
, mode
);
1809 new_elt
->in_memory
= hash_arg_in_memory
;
1810 if (GET_CODE (exp
) == ASM_OPERANDS
&& elt
->cost
== MAX_COST
)
1811 new_elt
->cost
= MAX_COST
;
1816 /* Flush the entire hash table. */
1819 flush_hash_table (void)
1822 struct table_elt
*p
;
1824 for (i
= 0; i
< HASH_SIZE
; i
++)
1825 for (p
= table
[i
]; p
; p
= table
[i
])
1827 /* Note that invalidate can remove elements
1828 after P in the current hash chain. */
1830 invalidate (p
->exp
, VOIDmode
);
1832 remove_from_table (p
, i
);
1836 /* Check whether an anti dependence exists between X and EXP. MODE and
1837 ADDR are as for canon_anti_dependence. */
1840 check_dependence (const_rtx x
, rtx exp
, machine_mode mode
, rtx addr
)
1842 subrtx_iterator::array_type array
;
1843 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
1845 const_rtx x
= *iter
;
1846 if (MEM_P (x
) && canon_anti_dependence (x
, true, exp
, mode
, addr
))
1852 /* Remove from the hash table, or mark as invalid, all expressions whose
1853 values could be altered by storing in X. X is a register, a subreg, or
1854 a memory reference with nonvarying address (because, when a memory
1855 reference with a varying address is stored in, all memory references are
1856 removed by invalidate_memory so specific invalidation is superfluous).
1857 FULL_MODE, if not VOIDmode, indicates that this much should be
1858 invalidated instead of just the amount indicated by the mode of X. This
1859 is only used for bitfield stores into memory.
1861 A nonvarying address may be just a register or just a symbol reference,
1862 or it may be either of those plus a numeric offset. */
1865 invalidate (rtx x
, machine_mode full_mode
)
1868 struct table_elt
*p
;
1871 switch (GET_CODE (x
))
1875 /* If X is a register, dependencies on its contents are recorded
1876 through the qty number mechanism. Just change the qty number of
1877 the register, mark it as invalid for expressions that refer to it,
1878 and remove it itself. */
1879 unsigned int regno
= REGNO (x
);
1880 unsigned int hash
= HASH (x
, GET_MODE (x
));
1882 /* Remove REGNO from any quantity list it might be on and indicate
1883 that its value might have changed. If it is a pseudo, remove its
1884 entry from the hash table.
1886 For a hard register, we do the first two actions above for any
1887 additional hard registers corresponding to X. Then, if any of these
1888 registers are in the table, we must remove any REG entries that
1889 overlap these registers. */
1891 delete_reg_equiv (regno
);
1893 SUBREG_TICKED (regno
) = -1;
1895 if (regno
>= FIRST_PSEUDO_REGISTER
)
1896 remove_pseudo_from_table (x
, hash
);
1899 HOST_WIDE_INT in_table
1900 = TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1901 unsigned int endregno
= END_HARD_REGNO (x
);
1902 unsigned int tregno
, tendregno
, rn
;
1903 struct table_elt
*p
, *next
;
1905 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1907 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1909 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1910 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1911 delete_reg_equiv (rn
);
1913 SUBREG_TICKED (rn
) = -1;
1917 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1918 for (p
= table
[hash
]; p
; p
= next
)
1920 next
= p
->next_same_hash
;
1923 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1926 tregno
= REGNO (p
->exp
);
1927 tendregno
= END_HARD_REGNO (p
->exp
);
1928 if (tendregno
> regno
&& tregno
< endregno
)
1929 remove_from_table (p
, hash
);
1936 invalidate (SUBREG_REG (x
), VOIDmode
);
1940 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1941 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1945 /* This is part of a disjoint return value; extract the location in
1946 question ignoring the offset. */
1947 invalidate (XEXP (x
, 0), VOIDmode
);
1951 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1952 /* Calculate the canonical version of X here so that
1953 true_dependence doesn't generate new RTL for X on each call. */
1956 /* Remove all hash table elements that refer to overlapping pieces of
1958 if (full_mode
== VOIDmode
)
1959 full_mode
= GET_MODE (x
);
1961 for (i
= 0; i
< HASH_SIZE
; i
++)
1963 struct table_elt
*next
;
1965 for (p
= table
[i
]; p
; p
= next
)
1967 next
= p
->next_same_hash
;
1970 /* Just canonicalize the expression once;
1971 otherwise each time we call invalidate
1972 true_dependence will canonicalize the
1973 expression again. */
1975 p
->canon_exp
= canon_rtx (p
->exp
);
1976 if (check_dependence (p
->canon_exp
, x
, full_mode
, addr
))
1977 remove_from_table (p
, i
);
1988 /* Invalidate DEST. Used when DEST is not going to be added
1989 into the hash table for some reason, e.g. do_not_record
1993 invalidate_dest (rtx dest
)
1996 || GET_CODE (dest
) == SUBREG
1998 invalidate (dest
, VOIDmode
);
1999 else if (GET_CODE (dest
) == STRICT_LOW_PART
2000 || GET_CODE (dest
) == ZERO_EXTRACT
)
2001 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
2004 /* Remove all expressions that refer to register REGNO,
2005 since they are already invalid, and we are about to
2006 mark that register valid again and don't want the old
2007 expressions to reappear as valid. */
2010 remove_invalid_refs (unsigned int regno
)
2013 struct table_elt
*p
, *next
;
2015 for (i
= 0; i
< HASH_SIZE
; i
++)
2016 for (p
= table
[i
]; p
; p
= next
)
2018 next
= p
->next_same_hash
;
2019 if (!REG_P (p
->exp
) && refers_to_regno_p (regno
, p
->exp
))
2020 remove_from_table (p
, i
);
2024 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2027 remove_invalid_subreg_refs (unsigned int regno
, unsigned int offset
,
2031 struct table_elt
*p
, *next
;
2032 unsigned int end
= offset
+ (GET_MODE_SIZE (mode
) - 1);
2034 for (i
= 0; i
< HASH_SIZE
; i
++)
2035 for (p
= table
[i
]; p
; p
= next
)
2038 next
= p
->next_same_hash
;
2041 && (GET_CODE (exp
) != SUBREG
2042 || !REG_P (SUBREG_REG (exp
))
2043 || REGNO (SUBREG_REG (exp
)) != regno
2044 || (((SUBREG_BYTE (exp
)
2045 + (GET_MODE_SIZE (GET_MODE (exp
)) - 1)) >= offset
)
2046 && SUBREG_BYTE (exp
) <= end
))
2047 && refers_to_regno_p (regno
, p
->exp
))
2048 remove_from_table (p
, i
);
2052 /* Recompute the hash codes of any valid entries in the hash table that
2053 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2055 This is called when we make a jump equivalence. */
2058 rehash_using_reg (rtx x
)
2061 struct table_elt
*p
, *next
;
2064 if (GET_CODE (x
) == SUBREG
)
2067 /* If X is not a register or if the register is known not to be in any
2068 valid entries in the table, we have no work to do. */
2071 || REG_IN_TABLE (REGNO (x
)) < 0
2072 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
2075 /* Scan all hash chains looking for valid entries that mention X.
2076 If we find one and it is in the wrong hash chain, move it. */
2078 for (i
= 0; i
< HASH_SIZE
; i
++)
2079 for (p
= table
[i
]; p
; p
= next
)
2081 next
= p
->next_same_hash
;
2082 if (reg_mentioned_p (x
, p
->exp
)
2083 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
2084 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
2086 if (p
->next_same_hash
)
2087 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
2089 if (p
->prev_same_hash
)
2090 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
2092 table
[i
] = p
->next_same_hash
;
2094 p
->next_same_hash
= table
[hash
];
2095 p
->prev_same_hash
= 0;
2097 table
[hash
]->prev_same_hash
= p
;
2103 /* Remove from the hash table any expression that is a call-clobbered
2104 register. Also update their TICK values. */
2107 invalidate_for_call (void)
2109 unsigned int regno
, endregno
;
2112 struct table_elt
*p
, *next
;
2114 hard_reg_set_iterator hrsi
;
2116 /* Go through all the hard registers. For each that is clobbered in
2117 a CALL_INSN, remove the register from quantity chains and update
2118 reg_tick if defined. Also see if any of these registers is currently
2120 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, regno
, hrsi
)
2122 delete_reg_equiv (regno
);
2123 if (REG_TICK (regno
) >= 0)
2126 SUBREG_TICKED (regno
) = -1;
2128 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
2131 /* In the case where we have no call-clobbered hard registers in the
2132 table, we are done. Otherwise, scan the table and remove any
2133 entry that overlaps a call-clobbered register. */
2136 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
2137 for (p
= table
[hash
]; p
; p
= next
)
2139 next
= p
->next_same_hash
;
2142 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
2145 regno
= REGNO (p
->exp
);
2146 endregno
= END_HARD_REGNO (p
->exp
);
2148 for (i
= regno
; i
< endregno
; i
++)
2149 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
2151 remove_from_table (p
, hash
);
2157 /* Given an expression X of type CONST,
2158 and ELT which is its table entry (or 0 if it
2159 is not in the hash table),
2160 return an alternate expression for X as a register plus integer.
2161 If none can be found, return 0. */
2164 use_related_value (rtx x
, struct table_elt
*elt
)
2166 struct table_elt
*relt
= 0;
2167 struct table_elt
*p
, *q
;
2168 HOST_WIDE_INT offset
;
2170 /* First, is there anything related known?
2171 If we have a table element, we can tell from that.
2172 Otherwise, must look it up. */
2174 if (elt
!= 0 && elt
->related_value
!= 0)
2176 else if (elt
== 0 && GET_CODE (x
) == CONST
)
2178 rtx subexp
= get_related_value (x
);
2180 relt
= lookup (subexp
,
2181 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2188 /* Search all related table entries for one that has an
2189 equivalent register. */
2194 /* This loop is strange in that it is executed in two different cases.
2195 The first is when X is already in the table. Then it is searching
2196 the RELATED_VALUE list of X's class (RELT). The second case is when
2197 X is not in the table. Then RELT points to a class for the related
2200 Ensure that, whatever case we are in, that we ignore classes that have
2201 the same value as X. */
2203 if (rtx_equal_p (x
, p
->exp
))
2206 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2213 p
= p
->related_value
;
2215 /* We went all the way around, so there is nothing to be found.
2216 Alternatively, perhaps RELT was in the table for some other reason
2217 and it has no related values recorded. */
2218 if (p
== relt
|| p
== 0)
2225 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2226 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2227 return plus_constant (q
->mode
, q
->exp
, offset
);
2231 /* Hash a string. Just add its bytes up. */
2232 static inline unsigned
2233 hash_rtx_string (const char *ps
)
2236 const unsigned char *p
= (const unsigned char *) ps
;
2245 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2246 When the callback returns true, we continue with the new rtx. */
2249 hash_rtx_cb (const_rtx x
, machine_mode mode
,
2250 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2251 bool have_reg_qty
, hash_rtx_callback_function cb
)
2257 machine_mode newmode
;
2260 /* Used to turn recursion into iteration. We can't rely on GCC's
2261 tail-recursion elimination since we need to keep accumulating values
2267 /* Invoke the callback first. */
2269 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2271 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2272 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2276 code
= GET_CODE (x
);
2281 unsigned int regno
= REGNO (x
);
2283 if (do_not_record_p
&& !reload_completed
)
2285 /* On some machines, we can't record any non-fixed hard register,
2286 because extending its life will cause reload problems. We
2287 consider ap, fp, sp, gp to be fixed for this purpose.
2289 We also consider CCmode registers to be fixed for this purpose;
2290 failure to do so leads to failure to simplify 0<100 type of
2293 On all machines, we can't record any global registers.
2294 Nor should we record any register that is in a small
2295 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2298 if (regno
>= FIRST_PSEUDO_REGISTER
)
2300 else if (x
== frame_pointer_rtx
2301 || x
== hard_frame_pointer_rtx
2302 || x
== arg_pointer_rtx
2303 || x
== stack_pointer_rtx
2304 || x
== pic_offset_table_rtx
)
2306 else if (global_regs
[regno
])
2308 else if (fixed_regs
[regno
])
2310 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2312 else if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
2314 else if (targetm
.class_likely_spilled_p (REGNO_REG_CLASS (regno
)))
2321 *do_not_record_p
= 1;
2326 hash
+= ((unsigned int) REG
<< 7);
2327 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2331 /* We handle SUBREG of a REG specially because the underlying
2332 reg changes its hash value with every value change; we don't
2333 want to have to forget unrelated subregs when one subreg changes. */
2336 if (REG_P (SUBREG_REG (x
)))
2338 hash
+= (((unsigned int) SUBREG
<< 7)
2339 + REGNO (SUBREG_REG (x
))
2340 + (SUBREG_BYTE (x
) / UNITS_PER_WORD
));
2347 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2348 + (unsigned int) INTVAL (x
));
2351 case CONST_WIDE_INT
:
2352 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
2353 hash
+= CONST_WIDE_INT_ELT (x
, i
);
2357 /* This is like the general case, except that it only counts
2358 the integers representing the constant. */
2359 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2360 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
2361 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2362 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2364 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2368 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2369 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2377 units
= CONST_VECTOR_NUNITS (x
);
2379 for (i
= 0; i
< units
; ++i
)
2381 elt
= CONST_VECTOR_ELT (x
, i
);
2382 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2383 do_not_record_p
, hash_arg_in_memory_p
,
2390 /* Assume there is only one rtx object for any given label. */
2392 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2393 differences and differences between each stage's debugging dumps. */
2394 hash
+= (((unsigned int) LABEL_REF
<< 7)
2395 + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x
)));
2400 /* Don't hash on the symbol's address to avoid bootstrap differences.
2401 Different hash values may cause expressions to be recorded in
2402 different orders and thus different registers to be used in the
2403 final assembler. This also avoids differences in the dump files
2404 between various stages. */
2406 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2409 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2411 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2416 /* We don't record if marked volatile or if BLKmode since we don't
2417 know the size of the move. */
2418 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2420 *do_not_record_p
= 1;
2423 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2424 *hash_arg_in_memory_p
= 1;
2426 /* Now that we have already found this special case,
2427 might as well speed it up as much as possible. */
2428 hash
+= (unsigned) MEM
;
2433 /* A USE that mentions non-volatile memory needs special
2434 handling since the MEM may be BLKmode which normally
2435 prevents an entry from being made. Pure calls are
2436 marked by a USE which mentions BLKmode memory.
2437 See calls.c:emit_call_1. */
2438 if (MEM_P (XEXP (x
, 0))
2439 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2441 hash
+= (unsigned) USE
;
2444 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2445 *hash_arg_in_memory_p
= 1;
2447 /* Now that we have already found this special case,
2448 might as well speed it up as much as possible. */
2449 hash
+= (unsigned) MEM
;
2464 case UNSPEC_VOLATILE
:
2465 if (do_not_record_p
) {
2466 *do_not_record_p
= 1;
2474 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2476 *do_not_record_p
= 1;
2481 /* We don't want to take the filename and line into account. */
2482 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2483 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2484 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2485 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2487 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2489 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2491 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2492 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2493 do_not_record_p
, hash_arg_in_memory_p
,
2496 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2499 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2500 x
= ASM_OPERANDS_INPUT (x
, 0);
2501 mode
= GET_MODE (x
);
2513 i
= GET_RTX_LENGTH (code
) - 1;
2514 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2515 fmt
= GET_RTX_FORMAT (code
);
2521 /* If we are about to do the last recursive call
2522 needed at this level, change it into iteration.
2523 This function is called enough to be worth it. */
2530 hash
+= hash_rtx_cb (XEXP (x
, i
), VOIDmode
, do_not_record_p
,
2531 hash_arg_in_memory_p
,
2536 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2537 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), VOIDmode
, do_not_record_p
,
2538 hash_arg_in_memory_p
,
2543 hash
+= hash_rtx_string (XSTR (x
, i
));
2547 hash
+= (unsigned int) XINT (x
, i
);
2562 /* Hash an rtx. We are careful to make sure the value is never negative.
2563 Equivalent registers hash identically.
2564 MODE is used in hashing for CONST_INTs only;
2565 otherwise the mode of X is used.
2567 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2569 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2570 a MEM rtx which does not have the MEM_READONLY_P flag set.
2572 Note that cse_insn knows that the hash code of a MEM expression
2573 is just (int) MEM plus the hash code of the address. */
2576 hash_rtx (const_rtx x
, machine_mode mode
, int *do_not_record_p
,
2577 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2579 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2580 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2583 /* Hash an rtx X for cse via hash_rtx.
2584 Stores 1 in do_not_record if any subexpression is volatile.
2585 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2586 does not have the MEM_READONLY_P flag set. */
2588 static inline unsigned
2589 canon_hash (rtx x
, machine_mode mode
)
2591 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2594 /* Like canon_hash but with no side effects, i.e. do_not_record
2595 and hash_arg_in_memory are not changed. */
2597 static inline unsigned
2598 safe_hash (rtx x
, machine_mode mode
)
2600 int dummy_do_not_record
;
2601 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2604 /* Return 1 iff X and Y would canonicalize into the same thing,
2605 without actually constructing the canonicalization of either one.
2606 If VALIDATE is nonzero,
2607 we assume X is an expression being processed from the rtl
2608 and Y was found in the hash table. We check register refs
2609 in Y for being marked as valid.
2611 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2614 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2620 /* Note: it is incorrect to assume an expression is equivalent to itself
2621 if VALIDATE is nonzero. */
2622 if (x
== y
&& !validate
)
2625 if (x
== 0 || y
== 0)
2628 code
= GET_CODE (x
);
2629 if (code
!= GET_CODE (y
))
2632 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2633 if (GET_MODE (x
) != GET_MODE (y
))
2636 /* MEMs referring to different address space are not equivalent. */
2637 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2648 return LABEL_REF_LABEL (x
) == LABEL_REF_LABEL (y
);
2651 return XSTR (x
, 0) == XSTR (y
, 0);
2655 return REGNO (x
) == REGNO (y
);
2658 unsigned int regno
= REGNO (y
);
2660 unsigned int endregno
= END_REGNO (y
);
2662 /* If the quantities are not the same, the expressions are not
2663 equivalent. If there are and we are not to validate, they
2664 are equivalent. Otherwise, ensure all regs are up-to-date. */
2666 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2672 for (i
= regno
; i
< endregno
; i
++)
2673 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2682 /* A volatile mem should not be considered equivalent to any
2684 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2687 /* Can't merge two expressions in different alias sets, since we
2688 can decide that the expression is transparent in a block when
2689 it isn't, due to it being set with the different alias set.
2691 Also, can't merge two expressions with different MEM_ATTRS.
2692 They could e.g. be two different entities allocated into the
2693 same space on the stack (see e.g. PR25130). In that case, the
2694 MEM addresses can be the same, even though the two MEMs are
2695 absolutely not equivalent.
2697 But because really all MEM attributes should be the same for
2698 equivalent MEMs, we just use the invariant that MEMs that have
2699 the same attributes share the same mem_attrs data structure. */
2700 if (!mem_attrs_eq_p (MEM_ATTRS (x
), MEM_ATTRS (y
)))
2703 /* If we are handling exceptions, we cannot consider two expressions
2704 with different trapping status as equivalent, because simple_mem
2705 might accept one and reject the other. */
2706 if (cfun
->can_throw_non_call_exceptions
2707 && (MEM_NOTRAP_P (x
) != MEM_NOTRAP_P (y
)))
2712 /* For commutative operations, check both orders. */
2720 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2722 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2723 validate
, for_gcse
))
2724 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2726 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2727 validate
, for_gcse
)));
2730 /* We don't use the generic code below because we want to
2731 disregard filename and line numbers. */
2733 /* A volatile asm isn't equivalent to any other. */
2734 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2737 if (GET_MODE (x
) != GET_MODE (y
)
2738 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2739 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2740 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2741 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2742 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2745 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2747 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2748 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2749 ASM_OPERANDS_INPUT (y
, i
),
2751 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2752 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2762 /* Compare the elements. If any pair of corresponding elements
2763 fail to match, return 0 for the whole thing. */
2765 fmt
= GET_RTX_FORMAT (code
);
2766 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2771 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2772 validate
, for_gcse
))
2777 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2779 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2780 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2781 validate
, for_gcse
))
2786 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2791 if (XINT (x
, i
) != XINT (y
, i
))
2796 if (XWINT (x
, i
) != XWINT (y
, i
))
2812 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2813 the result if necessary. INSN is as for canon_reg. */
2816 validate_canon_reg (rtx
*xloc
, rtx_insn
*insn
)
2820 rtx new_rtx
= canon_reg (*xloc
, insn
);
2822 /* If replacing pseudo with hard reg or vice versa, ensure the
2823 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2824 gcc_assert (insn
&& new_rtx
);
2825 validate_change (insn
, xloc
, new_rtx
, 1);
2829 /* Canonicalize an expression:
2830 replace each register reference inside it
2831 with the "oldest" equivalent register.
2833 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2834 after we make our substitution. The calls are made with IN_GROUP nonzero
2835 so apply_change_group must be called upon the outermost return from this
2836 function (unless INSN is zero). The result of apply_change_group can
2837 generally be discarded since the changes we are making are optional. */
2840 canon_reg (rtx x
, rtx_insn
*insn
)
2849 code
= GET_CODE (x
);
2866 struct qty_table_elem
*ent
;
2868 /* Never replace a hard reg, because hard regs can appear
2869 in more than one machine mode, and we must preserve the mode
2870 of each occurrence. Also, some hard regs appear in
2871 MEMs that are shared and mustn't be altered. Don't try to
2872 replace any reg that maps to a reg of class NO_REGS. */
2873 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2874 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2877 q
= REG_QTY (REGNO (x
));
2878 ent
= &qty_table
[q
];
2879 first
= ent
->first_reg
;
2880 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2881 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2882 : gen_rtx_REG (ent
->mode
, first
));
2889 fmt
= GET_RTX_FORMAT (code
);
2890 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2895 validate_canon_reg (&XEXP (x
, i
), insn
);
2896 else if (fmt
[i
] == 'E')
2897 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2898 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2904 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2905 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2906 what values are being compared.
2908 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2909 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2910 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2911 compared to produce cc0.
2913 The return value is the comparison operator and is either the code of
2914 A or the code corresponding to the inverse of the comparison. */
2916 static enum rtx_code
2917 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2918 machine_mode
*pmode1
, machine_mode
*pmode2
)
2921 hash_set
<rtx
> *visited
= NULL
;
2922 /* Set nonzero when we find something of interest. */
2925 arg1
= *parg1
, arg2
= *parg2
;
2927 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2929 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2931 int reverse_code
= 0;
2932 struct table_elt
*p
= 0;
2934 /* Remember state from previous iteration. */
2938 visited
= new hash_set
<rtx
>;
2943 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2944 On machines with CC0, this is the only case that can occur, since
2945 fold_rtx will return the COMPARE or item being compared with zero
2948 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2951 /* If ARG1 is a comparison operator and CODE is testing for
2952 STORE_FLAG_VALUE, get the inner arguments. */
2954 else if (COMPARISON_P (arg1
))
2956 #ifdef FLOAT_STORE_FLAG_VALUE
2957 REAL_VALUE_TYPE fsfv
;
2961 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2962 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2963 #ifdef FLOAT_STORE_FLAG_VALUE
2964 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2965 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2966 REAL_VALUE_NEGATIVE (fsfv
)))
2971 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2972 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2973 #ifdef FLOAT_STORE_FLAG_VALUE
2974 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2975 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2976 REAL_VALUE_NEGATIVE (fsfv
)))
2979 x
= arg1
, reverse_code
= 1;
2982 /* ??? We could also check for
2984 (ne (and (eq (...) (const_int 1))) (const_int 0))
2986 and related forms, but let's wait until we see them occurring. */
2989 /* Look up ARG1 in the hash table and see if it has an equivalence
2990 that lets us see what is being compared. */
2991 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
2994 p
= p
->first_same_value
;
2996 /* If what we compare is already known to be constant, that is as
2998 We need to break the loop in this case, because otherwise we
2999 can have an infinite loop when looking at a reg that is known
3000 to be a constant which is the same as a comparison of a reg
3001 against zero which appears later in the insn stream, which in
3002 turn is constant and the same as the comparison of the first reg
3008 for (; p
; p
= p
->next_same_value
)
3010 machine_mode inner_mode
= GET_MODE (p
->exp
);
3011 #ifdef FLOAT_STORE_FLAG_VALUE
3012 REAL_VALUE_TYPE fsfv
;
3015 /* If the entry isn't valid, skip it. */
3016 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3019 /* If it's a comparison we've used before, skip it. */
3020 if (visited
&& visited
->contains (p
->exp
))
3023 if (GET_CODE (p
->exp
) == COMPARE
3024 /* Another possibility is that this machine has a compare insn
3025 that includes the comparison code. In that case, ARG1 would
3026 be equivalent to a comparison operation that would set ARG1 to
3027 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3028 ORIG_CODE is the actual comparison being done; if it is an EQ,
3029 we must reverse ORIG_CODE. On machine with a negative value
3030 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3033 && val_signbit_known_set_p (inner_mode
,
3035 #ifdef FLOAT_STORE_FLAG_VALUE
3037 && SCALAR_FLOAT_MODE_P (inner_mode
)
3038 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3039 REAL_VALUE_NEGATIVE (fsfv
)))
3042 && COMPARISON_P (p
->exp
)))
3047 else if ((code
== EQ
3049 && val_signbit_known_set_p (inner_mode
,
3051 #ifdef FLOAT_STORE_FLAG_VALUE
3053 && SCALAR_FLOAT_MODE_P (inner_mode
)
3054 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3055 REAL_VALUE_NEGATIVE (fsfv
)))
3058 && COMPARISON_P (p
->exp
))
3065 /* If this non-trapping address, e.g. fp + constant, the
3066 equivalent is a better operand since it may let us predict
3067 the value of the comparison. */
3068 else if (!rtx_addr_can_trap_p (p
->exp
))
3075 /* If we didn't find a useful equivalence for ARG1, we are done.
3076 Otherwise, set up for the next iteration. */
3080 /* If we need to reverse the comparison, make sure that that is
3081 possible -- we can't necessarily infer the value of GE from LT
3082 with floating-point operands. */
3085 enum rtx_code reversed
= reversed_comparison_code (x
, NULL_RTX
);
3086 if (reversed
== UNKNOWN
)
3091 else if (COMPARISON_P (x
))
3092 code
= GET_CODE (x
);
3093 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
3096 /* Return our results. Return the modes from before fold_rtx
3097 because fold_rtx might produce const_int, and then it's too late. */
3098 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
3099 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
3106 /* If X is a nontrivial arithmetic operation on an argument for which
3107 a constant value can be determined, return the result of operating
3108 on that value, as a constant. Otherwise, return X, possibly with
3109 one or more operands changed to a forward-propagated constant.
3111 If X is a register whose contents are known, we do NOT return
3112 those contents here; equiv_constant is called to perform that task.
3113 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3115 INSN is the insn that we may be modifying. If it is 0, make a copy
3116 of X before modifying it. */
3119 fold_rtx (rtx x
, rtx_insn
*insn
)
3128 /* Operands of X. */
3129 /* Workaround -Wmaybe-uninitialized false positive during
3130 profiledbootstrap by initializing them. */
3131 rtx folded_arg0
= NULL_RTX
;
3132 rtx folded_arg1
= NULL_RTX
;
3134 /* Constant equivalents of first three operands of X;
3135 0 when no such equivalent is known. */
3140 /* The mode of the first operand of X. We need this for sign and zero
3142 machine_mode mode_arg0
;
3147 /* Try to perform some initial simplifications on X. */
3148 code
= GET_CODE (x
);
3153 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3163 /* No use simplifying an EXPR_LIST
3164 since they are used only for lists of args
3165 in a function call's REG_EQUAL note. */
3171 return prev_insn_cc0
;
3177 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3178 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3179 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3183 #ifdef NO_FUNCTION_CSE
3185 if (CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3190 /* Anything else goes through the loop below. */
3195 mode
= GET_MODE (x
);
3199 mode_arg0
= VOIDmode
;
3201 /* Try folding our operands.
3202 Then see which ones have constant values known. */
3204 fmt
= GET_RTX_FORMAT (code
);
3205 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3208 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3209 machine_mode mode_arg
= GET_MODE (folded_arg
);
3211 switch (GET_CODE (folded_arg
))
3216 const_arg
= equiv_constant (folded_arg
);
3223 const_arg
= folded_arg
;
3228 /* The cc0-user and cc0-setter may be in different blocks if
3229 the cc0-setter potentially traps. In that case PREV_INSN_CC0
3230 will have been cleared as we exited the block with the
3233 While we could potentially track cc0 in this case, it just
3234 doesn't seem to be worth it given that cc0 targets are not
3235 terribly common or important these days and trapping math
3236 is rarely used. The combination of those two conditions
3237 necessary to trip this situation is exceedingly rare in the
3241 const_arg
= NULL_RTX
;
3245 folded_arg
= prev_insn_cc0
;
3246 mode_arg
= prev_insn_cc0_mode
;
3247 const_arg
= equiv_constant (folded_arg
);
3253 folded_arg
= fold_rtx (folded_arg
, insn
);
3254 const_arg
= equiv_constant (folded_arg
);
3258 /* For the first three operands, see if the operand
3259 is constant or equivalent to a constant. */
3263 folded_arg0
= folded_arg
;
3264 const_arg0
= const_arg
;
3265 mode_arg0
= mode_arg
;
3268 folded_arg1
= folded_arg
;
3269 const_arg1
= const_arg
;
3272 const_arg2
= const_arg
;
3276 /* Pick the least expensive of the argument and an equivalent constant
3279 && const_arg
!= folded_arg
3280 && COST_IN (const_arg
, code
, i
) <= COST_IN (folded_arg
, code
, i
)
3282 /* It's not safe to substitute the operand of a conversion
3283 operator with a constant, as the conversion's identity
3284 depends upon the mode of its operand. This optimization
3285 is handled by the call to simplify_unary_operation. */
3286 && (GET_RTX_CLASS (code
) != RTX_UNARY
3287 || GET_MODE (const_arg
) == mode_arg0
3288 || (code
!= ZERO_EXTEND
3289 && code
!= SIGN_EXTEND
3291 && code
!= FLOAT_TRUNCATE
3292 && code
!= FLOAT_EXTEND
3295 && code
!= UNSIGNED_FLOAT
3296 && code
!= UNSIGNED_FIX
)))
3297 folded_arg
= const_arg
;
3299 if (folded_arg
== XEXP (x
, i
))
3302 if (insn
== NULL_RTX
&& !changed
)
3305 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3310 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3311 consistent with the order in X. */
3312 if (canonicalize_change_group (insn
, x
))
3315 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
3316 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
3319 apply_change_group ();
3322 /* If X is an arithmetic operation, see if we can simplify it. */
3324 switch (GET_RTX_CLASS (code
))
3328 /* We can't simplify extension ops unless we know the
3330 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3331 && mode_arg0
== VOIDmode
)
3334 new_rtx
= simplify_unary_operation (code
, mode
,
3335 const_arg0
? const_arg0
: folded_arg0
,
3341 case RTX_COMM_COMPARE
:
3342 /* See what items are actually being compared and set FOLDED_ARG[01]
3343 to those values and CODE to the actual comparison code. If any are
3344 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3345 do anything if both operands are already known to be constant. */
3347 /* ??? Vector mode comparisons are not supported yet. */
3348 if (VECTOR_MODE_P (mode
))
3351 if (const_arg0
== 0 || const_arg1
== 0)
3353 struct table_elt
*p0
, *p1
;
3354 rtx true_rtx
, false_rtx
;
3355 machine_mode mode_arg1
;
3357 if (SCALAR_FLOAT_MODE_P (mode
))
3359 #ifdef FLOAT_STORE_FLAG_VALUE
3360 true_rtx
= (CONST_DOUBLE_FROM_REAL_VALUE
3361 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3363 true_rtx
= NULL_RTX
;
3365 false_rtx
= CONST0_RTX (mode
);
3369 true_rtx
= const_true_rtx
;
3370 false_rtx
= const0_rtx
;
3373 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3374 &mode_arg0
, &mode_arg1
);
3376 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3377 what kinds of things are being compared, so we can't do
3378 anything with this comparison. */
3380 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3383 const_arg0
= equiv_constant (folded_arg0
);
3384 const_arg1
= equiv_constant (folded_arg1
);
3386 /* If we do not now have two constants being compared, see
3387 if we can nevertheless deduce some things about the
3389 if (const_arg0
== 0 || const_arg1
== 0)
3391 if (const_arg1
!= NULL
)
3393 rtx cheapest_simplification
;
3396 struct table_elt
*p
;
3398 /* See if we can find an equivalent of folded_arg0
3399 that gets us a cheaper expression, possibly a
3400 constant through simplifications. */
3401 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3406 cheapest_simplification
= x
;
3407 cheapest_cost
= COST (x
);
3409 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3413 /* If the entry isn't valid, skip it. */
3414 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3417 /* Try to simplify using this equivalence. */
3419 = simplify_relational_operation (code
, mode
,
3424 if (simp_result
== NULL
)
3427 cost
= COST (simp_result
);
3428 if (cost
< cheapest_cost
)
3430 cheapest_cost
= cost
;
3431 cheapest_simplification
= simp_result
;
3435 /* If we have a cheaper expression now, use that
3436 and try folding it further, from the top. */
3437 if (cheapest_simplification
!= x
)
3438 return fold_rtx (copy_rtx (cheapest_simplification
),
3443 /* See if the two operands are the same. */
3445 if ((REG_P (folded_arg0
)
3446 && REG_P (folded_arg1
)
3447 && (REG_QTY (REGNO (folded_arg0
))
3448 == REG_QTY (REGNO (folded_arg1
))))
3449 || ((p0
= lookup (folded_arg0
,
3450 SAFE_HASH (folded_arg0
, mode_arg0
),
3452 && (p1
= lookup (folded_arg1
,
3453 SAFE_HASH (folded_arg1
, mode_arg0
),
3455 && p0
->first_same_value
== p1
->first_same_value
))
3456 folded_arg1
= folded_arg0
;
3458 /* If FOLDED_ARG0 is a register, see if the comparison we are
3459 doing now is either the same as we did before or the reverse
3460 (we only check the reverse if not floating-point). */
3461 else if (REG_P (folded_arg0
))
3463 int qty
= REG_QTY (REGNO (folded_arg0
));
3465 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3467 struct qty_table_elem
*ent
= &qty_table
[qty
];
3469 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3470 || (! FLOAT_MODE_P (mode_arg0
)
3471 && comparison_dominates_p (ent
->comparison_code
,
3472 reverse_condition (code
))))
3473 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3475 && rtx_equal_p (ent
->comparison_const
,
3477 || (REG_P (folded_arg1
)
3478 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3480 if (comparison_dominates_p (ent
->comparison_code
, code
))
3495 /* If we are comparing against zero, see if the first operand is
3496 equivalent to an IOR with a constant. If so, we may be able to
3497 determine the result of this comparison. */
3498 if (const_arg1
== const0_rtx
&& !const_arg0
)
3500 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3504 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3505 && CONST_INT_P (inner_const
)
3506 && INTVAL (inner_const
) != 0)
3507 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3511 rtx op0
= const_arg0
? const_arg0
: copy_rtx (folded_arg0
);
3512 rtx op1
= const_arg1
? const_arg1
: copy_rtx (folded_arg1
);
3513 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
,
3519 case RTX_COMM_ARITH
:
3523 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3524 with that LABEL_REF as its second operand. If so, the result is
3525 the first operand of that MINUS. This handles switches with an
3526 ADDR_DIFF_VEC table. */
3527 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3530 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3531 : lookup_as_function (folded_arg0
, MINUS
);
3533 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3534 && LABEL_REF_LABEL (XEXP (y
, 1)) == LABEL_REF_LABEL (const_arg1
))
3537 /* Now try for a CONST of a MINUS like the above. */
3538 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3539 : lookup_as_function (folded_arg0
, CONST
))) != 0
3540 && GET_CODE (XEXP (y
, 0)) == MINUS
3541 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3542 && LABEL_REF_LABEL (XEXP (XEXP (y
, 0), 1)) == LABEL_REF_LABEL (const_arg1
))
3543 return XEXP (XEXP (y
, 0), 0);
3546 /* Likewise if the operands are in the other order. */
3547 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3550 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3551 : lookup_as_function (folded_arg1
, MINUS
);
3553 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3554 && LABEL_REF_LABEL (XEXP (y
, 1)) == LABEL_REF_LABEL (const_arg0
))
3557 /* Now try for a CONST of a MINUS like the above. */
3558 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3559 : lookup_as_function (folded_arg1
, CONST
))) != 0
3560 && GET_CODE (XEXP (y
, 0)) == MINUS
3561 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3562 && LABEL_REF_LABEL (XEXP (XEXP (y
, 0), 1)) == LABEL_REF_LABEL (const_arg0
))
3563 return XEXP (XEXP (y
, 0), 0);
3566 /* If second operand is a register equivalent to a negative
3567 CONST_INT, see if we can find a register equivalent to the
3568 positive constant. Make a MINUS if so. Don't do this for
3569 a non-negative constant since we might then alternate between
3570 choosing positive and negative constants. Having the positive
3571 constant previously-used is the more common case. Be sure
3572 the resulting constant is non-negative; if const_arg1 were
3573 the smallest negative number this would overflow: depending
3574 on the mode, this would either just be the same value (and
3575 hence not save anything) or be incorrect. */
3576 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
)
3577 && INTVAL (const_arg1
) < 0
3578 /* This used to test
3580 -INTVAL (const_arg1) >= 0
3582 But The Sun V5.0 compilers mis-compiled that test. So
3583 instead we test for the problematic value in a more direct
3584 manner and hope the Sun compilers get it correct. */
3585 && INTVAL (const_arg1
) !=
3586 ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1))
3587 && REG_P (folded_arg1
))
3589 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3591 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3594 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3596 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3597 canon_reg (p
->exp
, NULL
));
3602 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3603 If so, produce (PLUS Z C2-C). */
3604 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
))
3606 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3607 if (y
&& CONST_INT_P (XEXP (y
, 1)))
3608 return fold_rtx (plus_constant (mode
, copy_rtx (y
),
3609 -INTVAL (const_arg1
)),
3616 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3617 case IOR
: case AND
: case XOR
:
3619 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3620 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3621 is known to be of similar form, we may be able to replace the
3622 operation with a combined operation. This may eliminate the
3623 intermediate operation if every use is simplified in this way.
3624 Note that the similar optimization done by combine.c only works
3625 if the intermediate operation's result has only one reference. */
3627 if (REG_P (folded_arg0
)
3628 && const_arg1
&& CONST_INT_P (const_arg1
))
3631 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3632 rtx y
, inner_const
, new_const
;
3633 rtx canon_const_arg1
= const_arg1
;
3634 enum rtx_code associate_code
;
3637 && (INTVAL (const_arg1
) >= GET_MODE_PRECISION (mode
)
3638 || INTVAL (const_arg1
) < 0))
3640 if (SHIFT_COUNT_TRUNCATED
)
3641 canon_const_arg1
= GEN_INT (INTVAL (const_arg1
)
3642 & (GET_MODE_BITSIZE (mode
)
3648 y
= lookup_as_function (folded_arg0
, code
);
3652 /* If we have compiled a statement like
3653 "if (x == (x & mask1))", and now are looking at
3654 "x & mask2", we will have a case where the first operand
3655 of Y is the same as our first operand. Unless we detect
3656 this case, an infinite loop will result. */
3657 if (XEXP (y
, 0) == folded_arg0
)
3660 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3661 if (!inner_const
|| !CONST_INT_P (inner_const
))
3664 /* Don't associate these operations if they are a PLUS with the
3665 same constant and it is a power of two. These might be doable
3666 with a pre- or post-increment. Similarly for two subtracts of
3667 identical powers of two with post decrement. */
3669 if (code
== PLUS
&& const_arg1
== inner_const
3670 && ((HAVE_PRE_INCREMENT
3671 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3672 || (HAVE_POST_INCREMENT
3673 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3674 || (HAVE_PRE_DECREMENT
3675 && exact_log2 (- INTVAL (const_arg1
)) >= 0)
3676 || (HAVE_POST_DECREMENT
3677 && exact_log2 (- INTVAL (const_arg1
)) >= 0)))
3680 /* ??? Vector mode shifts by scalar
3681 shift operand are not supported yet. */
3682 if (is_shift
&& VECTOR_MODE_P (mode
))
3686 && (INTVAL (inner_const
) >= GET_MODE_PRECISION (mode
)
3687 || INTVAL (inner_const
) < 0))
3689 if (SHIFT_COUNT_TRUNCATED
)
3690 inner_const
= GEN_INT (INTVAL (inner_const
)
3691 & (GET_MODE_BITSIZE (mode
) - 1));
3696 /* Compute the code used to compose the constants. For example,
3697 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3699 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3701 new_const
= simplify_binary_operation (associate_code
, mode
,
3708 /* If we are associating shift operations, don't let this
3709 produce a shift of the size of the object or larger.
3710 This could occur when we follow a sign-extend by a right
3711 shift on a machine that does a sign-extend as a pair
3715 && CONST_INT_P (new_const
)
3716 && INTVAL (new_const
) >= GET_MODE_PRECISION (mode
))
3718 /* As an exception, we can turn an ASHIFTRT of this
3719 form into a shift of the number of bits - 1. */
3720 if (code
== ASHIFTRT
)
3721 new_const
= GEN_INT (GET_MODE_BITSIZE (mode
) - 1);
3722 else if (!side_effects_p (XEXP (y
, 0)))
3723 return CONST0_RTX (mode
);
3728 y
= copy_rtx (XEXP (y
, 0));
3730 /* If Y contains our first operand (the most common way this
3731 can happen is if Y is a MEM), we would do into an infinite
3732 loop if we tried to fold it. So don't in that case. */
3734 if (! reg_mentioned_p (folded_arg0
, y
))
3735 y
= fold_rtx (y
, insn
);
3737 return simplify_gen_binary (code
, mode
, y
, new_const
);
3741 case DIV
: case UDIV
:
3742 /* ??? The associative optimization performed immediately above is
3743 also possible for DIV and UDIV using associate_code of MULT.
3744 However, we would need extra code to verify that the
3745 multiplication does not overflow, that is, there is no overflow
3746 in the calculation of new_const. */
3753 new_rtx
= simplify_binary_operation (code
, mode
,
3754 const_arg0
? const_arg0
: folded_arg0
,
3755 const_arg1
? const_arg1
: folded_arg1
);
3759 /* (lo_sum (high X) X) is simply X. */
3760 if (code
== LO_SUM
&& const_arg0
!= 0
3761 && GET_CODE (const_arg0
) == HIGH
3762 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3767 case RTX_BITFIELD_OPS
:
3768 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3769 const_arg0
? const_arg0
: folded_arg0
,
3770 const_arg1
? const_arg1
: folded_arg1
,
3771 const_arg2
? const_arg2
: XEXP (x
, 2));
3778 return new_rtx
? new_rtx
: x
;
3781 /* Return a constant value currently equivalent to X.
3782 Return 0 if we don't know one. */
3785 equiv_constant (rtx x
)
3788 && REGNO_QTY_VALID_P (REGNO (x
)))
3790 int x_q
= REG_QTY (REGNO (x
));
3791 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3793 if (x_ent
->const_rtx
)
3794 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3797 if (x
== 0 || CONSTANT_P (x
))
3800 if (GET_CODE (x
) == SUBREG
)
3802 machine_mode mode
= GET_MODE (x
);
3803 machine_mode imode
= GET_MODE (SUBREG_REG (x
));
3806 /* See if we previously assigned a constant value to this SUBREG. */
3807 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3808 || (new_rtx
= lookup_as_function (x
, CONST_WIDE_INT
)) != 0
3809 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3810 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3813 /* If we didn't and if doing so makes sense, see if we previously
3814 assigned a constant value to the enclosing word mode SUBREG. */
3815 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
)
3816 && GET_MODE_SIZE (word_mode
) < GET_MODE_SIZE (imode
))
3818 int byte
= SUBREG_BYTE (x
) - subreg_lowpart_offset (mode
, word_mode
);
3819 if (byte
>= 0 && (byte
% UNITS_PER_WORD
) == 0)
3821 rtx y
= gen_rtx_SUBREG (word_mode
, SUBREG_REG (x
), byte
);
3822 new_rtx
= lookup_as_function (y
, CONST_INT
);
3824 return gen_lowpart (mode
, new_rtx
);
3828 /* Otherwise see if we already have a constant for the inner REG,
3829 and if that is enough to calculate an equivalent constant for
3830 the subreg. Note that the upper bits of paradoxical subregs
3831 are undefined, so they cannot be said to equal anything. */
3832 if (REG_P (SUBREG_REG (x
))
3833 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (imode
)
3834 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3835 return simplify_subreg (mode
, new_rtx
, imode
, SUBREG_BYTE (x
));
3840 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3841 the hash table in case its value was seen before. */
3845 struct table_elt
*elt
;
3847 x
= avoid_constant_pool_reference (x
);
3851 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3855 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3856 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3863 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3866 In certain cases, this can cause us to add an equivalence. For example,
3867 if we are following the taken case of
3869 we can add the fact that `i' and '2' are now equivalent.
3871 In any case, we can record that this comparison was passed. If the same
3872 comparison is seen later, we will know its value. */
3875 record_jump_equiv (rtx_insn
*insn
, bool taken
)
3877 int cond_known_true
;
3880 machine_mode mode
, mode0
, mode1
;
3881 int reversed_nonequality
= 0;
3884 /* Ensure this is the right kind of insn. */
3885 gcc_assert (any_condjump_p (insn
));
3887 set
= pc_set (insn
);
3889 /* See if this jump condition is known true or false. */
3891 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3893 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3895 /* Get the type of comparison being done and the operands being compared.
3896 If we had to reverse a non-equality condition, record that fact so we
3897 know that it isn't valid for floating-point. */
3898 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3899 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3900 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3902 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3903 if (! cond_known_true
)
3905 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3907 /* Don't remember if we can't find the inverse. */
3908 if (code
== UNKNOWN
)
3912 /* The mode is the mode of the non-constant. */
3914 if (mode1
!= VOIDmode
)
3917 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3920 /* Yet another form of subreg creation. In this case, we want something in
3921 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3924 record_jump_cond_subreg (machine_mode mode
, rtx op
)
3926 machine_mode op_mode
= GET_MODE (op
);
3927 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3929 return lowpart_subreg (mode
, op
, op_mode
);
3932 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3933 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3934 Make any useful entries we can with that information. Called from
3935 above function and called recursively. */
3938 record_jump_cond (enum rtx_code code
, machine_mode mode
, rtx op0
,
3939 rtx op1
, int reversed_nonequality
)
3941 unsigned op0_hash
, op1_hash
;
3942 int op0_in_memory
, op1_in_memory
;
3943 struct table_elt
*op0_elt
, *op1_elt
;
3945 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3946 we know that they are also equal in the smaller mode (this is also
3947 true for all smaller modes whether or not there is a SUBREG, but
3948 is not worth testing for with no SUBREG). */
3950 /* Note that GET_MODE (op0) may not equal MODE. */
3951 if (code
== EQ
&& paradoxical_subreg_p (op0
))
3953 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3954 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3956 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3957 reversed_nonequality
);
3960 if (code
== EQ
&& paradoxical_subreg_p (op1
))
3962 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3963 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3965 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3966 reversed_nonequality
);
3969 /* Similarly, if this is an NE comparison, and either is a SUBREG
3970 making a smaller mode, we know the whole thing is also NE. */
3972 /* Note that GET_MODE (op0) may not equal MODE;
3973 if we test MODE instead, we can get an infinite recursion
3974 alternating between two modes each wider than MODE. */
3976 if (code
== NE
&& GET_CODE (op0
) == SUBREG
3977 && subreg_lowpart_p (op0
)
3978 && (GET_MODE_SIZE (GET_MODE (op0
))
3979 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
3981 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3982 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3984 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3985 reversed_nonequality
);
3988 if (code
== NE
&& GET_CODE (op1
) == SUBREG
3989 && subreg_lowpart_p (op1
)
3990 && (GET_MODE_SIZE (GET_MODE (op1
))
3991 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
3993 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3994 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3996 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3997 reversed_nonequality
);
4000 /* Hash both operands. */
4003 hash_arg_in_memory
= 0;
4004 op0_hash
= HASH (op0
, mode
);
4005 op0_in_memory
= hash_arg_in_memory
;
4011 hash_arg_in_memory
= 0;
4012 op1_hash
= HASH (op1
, mode
);
4013 op1_in_memory
= hash_arg_in_memory
;
4018 /* Look up both operands. */
4019 op0_elt
= lookup (op0
, op0_hash
, mode
);
4020 op1_elt
= lookup (op1
, op1_hash
, mode
);
4022 /* If both operands are already equivalent or if they are not in the
4023 table but are identical, do nothing. */
4024 if ((op0_elt
!= 0 && op1_elt
!= 0
4025 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
4026 || op0
== op1
|| rtx_equal_p (op0
, op1
))
4029 /* If we aren't setting two things equal all we can do is save this
4030 comparison. Similarly if this is floating-point. In the latter
4031 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4032 If we record the equality, we might inadvertently delete code
4033 whose intent was to change -0 to +0. */
4035 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
4037 struct qty_table_elem
*ent
;
4040 /* If we reversed a floating-point comparison, if OP0 is not a
4041 register, or if OP1 is neither a register or constant, we can't
4045 op1
= equiv_constant (op1
);
4047 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
4048 || !REG_P (op0
) || op1
== 0)
4051 /* Put OP0 in the hash table if it isn't already. This gives it a
4052 new quantity number. */
4055 if (insert_regs (op0
, NULL
, 0))
4057 rehash_using_reg (op0
);
4058 op0_hash
= HASH (op0
, mode
);
4060 /* If OP0 is contained in OP1, this changes its hash code
4061 as well. Faster to rehash than to check, except
4062 for the simple case of a constant. */
4063 if (! CONSTANT_P (op1
))
4064 op1_hash
= HASH (op1
,mode
);
4067 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4068 op0_elt
->in_memory
= op0_in_memory
;
4071 qty
= REG_QTY (REGNO (op0
));
4072 ent
= &qty_table
[qty
];
4074 ent
->comparison_code
= code
;
4077 /* Look it up again--in case op0 and op1 are the same. */
4078 op1_elt
= lookup (op1
, op1_hash
, mode
);
4080 /* Put OP1 in the hash table so it gets a new quantity number. */
4083 if (insert_regs (op1
, NULL
, 0))
4085 rehash_using_reg (op1
);
4086 op1_hash
= HASH (op1
, mode
);
4089 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4090 op1_elt
->in_memory
= op1_in_memory
;
4093 ent
->comparison_const
= NULL_RTX
;
4094 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
4098 ent
->comparison_const
= op1
;
4099 ent
->comparison_qty
= -1;
4105 /* If either side is still missing an equivalence, make it now,
4106 then merge the equivalences. */
4110 if (insert_regs (op0
, NULL
, 0))
4112 rehash_using_reg (op0
);
4113 op0_hash
= HASH (op0
, mode
);
4116 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4117 op0_elt
->in_memory
= op0_in_memory
;
4122 if (insert_regs (op1
, NULL
, 0))
4124 rehash_using_reg (op1
);
4125 op1_hash
= HASH (op1
, mode
);
4128 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4129 op1_elt
->in_memory
= op1_in_memory
;
4132 merge_equiv_classes (op0_elt
, op1_elt
);
4135 /* CSE processing for one instruction.
4137 Most "true" common subexpressions are mostly optimized away in GIMPLE,
4138 but the few that "leak through" are cleaned up by cse_insn, and complex
4139 addressing modes are often formed here.
4141 The main function is cse_insn, and between here and that function
4142 a couple of helper functions is defined to keep the size of cse_insn
4143 within reasonable proportions.
4145 Data is shared between the main and helper functions via STRUCT SET,
4146 that contains all data related for every set in the instruction that
4149 Note that cse_main processes all sets in the instruction. Most
4150 passes in GCC only process simple SET insns or single_set insns, but
4151 CSE processes insns with multiple sets as well. */
4153 /* Data on one SET contained in the instruction. */
4157 /* The SET rtx itself. */
4159 /* The SET_SRC of the rtx (the original value, if it is changing). */
4161 /* The hash-table element for the SET_SRC of the SET. */
4162 struct table_elt
*src_elt
;
4163 /* Hash value for the SET_SRC. */
4165 /* Hash value for the SET_DEST. */
4167 /* The SET_DEST, with SUBREG, etc., stripped. */
4169 /* Nonzero if the SET_SRC is in memory. */
4171 /* Nonzero if the SET_SRC contains something
4172 whose value cannot be predicted and understood. */
4174 /* Original machine mode, in case it becomes a CONST_INT.
4175 The size of this field should match the size of the mode
4176 field of struct rtx_def (see rtl.h). */
4177 ENUM_BITFIELD(machine_mode
) mode
: 8;
4178 /* A constant equivalent for SET_SRC, if any. */
4180 /* Hash value of constant equivalent for SET_SRC. */
4181 unsigned src_const_hash
;
4182 /* Table entry for constant equivalent for SET_SRC, if any. */
4183 struct table_elt
*src_const_elt
;
4184 /* Table entry for the destination address. */
4185 struct table_elt
*dest_addr_elt
;
4188 /* Special handling for (set REG0 REG1) where REG0 is the
4189 "cheapest", cheaper than REG1. After cse, REG1 will probably not
4190 be used in the sequel, so (if easily done) change this insn to
4191 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4192 that computed their value. Then REG1 will become a dead store
4193 and won't cloud the situation for later optimizations.
4195 Do not make this change if REG1 is a hard register, because it will
4196 then be used in the sequel and we may be changing a two-operand insn
4197 into a three-operand insn.
4199 This is the last transformation that cse_insn will try to do. */
4202 try_back_substitute_reg (rtx set
, rtx_insn
*insn
)
4204 rtx dest
= SET_DEST (set
);
4205 rtx src
= SET_SRC (set
);
4208 && REG_P (src
) && ! HARD_REGISTER_P (src
)
4209 && REGNO_QTY_VALID_P (REGNO (src
)))
4211 int src_q
= REG_QTY (REGNO (src
));
4212 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
4214 if (src_ent
->first_reg
== REGNO (dest
))
4216 /* Scan for the previous nonnote insn, but stop at a basic
4218 rtx_insn
*prev
= insn
;
4219 rtx_insn
*bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
4222 prev
= PREV_INSN (prev
);
4224 while (prev
!= bb_head
&& (NOTE_P (prev
) || DEBUG_INSN_P (prev
)));
4226 /* Do not swap the registers around if the previous instruction
4227 attaches a REG_EQUIV note to REG1.
4229 ??? It's not entirely clear whether we can transfer a REG_EQUIV
4230 from the pseudo that originally shadowed an incoming argument
4231 to another register. Some uses of REG_EQUIV might rely on it
4232 being attached to REG1 rather than REG2.
4234 This section previously turned the REG_EQUIV into a REG_EQUAL
4235 note. We cannot do that because REG_EQUIV may provide an
4236 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4237 if (NONJUMP_INSN_P (prev
)
4238 && GET_CODE (PATTERN (prev
)) == SET
4239 && SET_DEST (PATTERN (prev
)) == src
4240 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
4244 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
4245 validate_change (insn
, &SET_DEST (set
), src
, 1);
4246 validate_change (insn
, &SET_SRC (set
), dest
, 1);
4247 apply_change_group ();
4249 /* If INSN has a REG_EQUAL note, and this note mentions
4250 REG0, then we must delete it, because the value in
4251 REG0 has changed. If the note's value is REG1, we must
4252 also delete it because that is now this insn's dest. */
4253 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
4255 && (reg_mentioned_p (dest
, XEXP (note
, 0))
4256 || rtx_equal_p (src
, XEXP (note
, 0))))
4257 remove_note (insn
, note
);
4263 /* Record all the SETs in this instruction into SETS_PTR,
4264 and return the number of recorded sets. */
4266 find_sets_in_insn (rtx_insn
*insn
, struct set
**psets
)
4268 struct set
*sets
= *psets
;
4270 rtx x
= PATTERN (insn
);
4272 if (GET_CODE (x
) == SET
)
4274 /* Ignore SETs that are unconditional jumps.
4275 They never need cse processing, so this does not hurt.
4276 The reason is not efficiency but rather
4277 so that we can test at the end for instructions
4278 that have been simplified to unconditional jumps
4279 and not be misled by unchanged instructions
4280 that were unconditional jumps to begin with. */
4281 if (SET_DEST (x
) == pc_rtx
4282 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4284 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4285 The hard function value register is used only once, to copy to
4286 someplace else, so it isn't worth cse'ing. */
4287 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4290 sets
[n_sets
++].rtl
= x
;
4292 else if (GET_CODE (x
) == PARALLEL
)
4294 int i
, lim
= XVECLEN (x
, 0);
4296 /* Go over the expressions of the PARALLEL in forward order, to
4297 put them in the same order in the SETS array. */
4298 for (i
= 0; i
< lim
; i
++)
4300 rtx y
= XVECEXP (x
, 0, i
);
4301 if (GET_CODE (y
) == SET
)
4303 /* As above, we ignore unconditional jumps and call-insns and
4304 ignore the result of apply_change_group. */
4305 if (SET_DEST (y
) == pc_rtx
4306 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4308 else if (GET_CODE (SET_SRC (y
)) == CALL
)
4311 sets
[n_sets
++].rtl
= y
;
4319 /* Where possible, substitute every register reference in the N_SETS
4320 number of SETS in INSN with the the canonical register.
4322 Register canonicalization propagatest the earliest register (i.e.
4323 one that is set before INSN) with the same value. This is a very
4324 useful, simple form of CSE, to clean up warts from expanding GIMPLE
4325 to RTL. For instance, a CONST for an address is usually expanded
4326 multiple times to loads into different registers, thus creating many
4327 subexpressions of the form:
4329 (set (reg1) (some_const))
4330 (set (mem (... reg1 ...) (thing)))
4331 (set (reg2) (some_const))
4332 (set (mem (... reg2 ...) (thing)))
4334 After canonicalizing, the code takes the following form:
4336 (set (reg1) (some_const))
4337 (set (mem (... reg1 ...) (thing)))
4338 (set (reg2) (some_const))
4339 (set (mem (... reg1 ...) (thing)))
4341 The set to reg2 is now trivially dead, and the memory reference (or
4342 address, or whatever) may be a candidate for further CSEing.
4344 In this function, the result of apply_change_group can be ignored;
4348 canonicalize_insn (rtx_insn
*insn
, struct set
**psets
, int n_sets
)
4350 struct set
*sets
= *psets
;
4352 rtx x
= PATTERN (insn
);
4357 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4358 if (GET_CODE (XEXP (tem
, 0)) != SET
)
4359 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4362 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
4364 canon_reg (SET_SRC (x
), insn
);
4365 apply_change_group ();
4366 fold_rtx (SET_SRC (x
), insn
);
4368 else if (GET_CODE (x
) == CLOBBER
)
4370 /* If we clobber memory, canon the address.
4371 This does nothing when a register is clobbered
4372 because we have already invalidated the reg. */
4373 if (MEM_P (XEXP (x
, 0)))
4374 canon_reg (XEXP (x
, 0), insn
);
4376 else if (GET_CODE (x
) == USE
4377 && ! (REG_P (XEXP (x
, 0))
4378 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4379 /* Canonicalize a USE of a pseudo register or memory location. */
4380 canon_reg (x
, insn
);
4381 else if (GET_CODE (x
) == ASM_OPERANDS
)
4383 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
4385 rtx input
= ASM_OPERANDS_INPUT (x
, i
);
4386 if (!(REG_P (input
) && REGNO (input
) < FIRST_PSEUDO_REGISTER
))
4388 input
= canon_reg (input
, insn
);
4389 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
), input
, 1);
4393 else if (GET_CODE (x
) == CALL
)
4395 canon_reg (x
, insn
);
4396 apply_change_group ();
4399 else if (DEBUG_INSN_P (insn
))
4400 canon_reg (PATTERN (insn
), insn
);
4401 else if (GET_CODE (x
) == PARALLEL
)
4403 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4405 rtx y
= XVECEXP (x
, 0, i
);
4406 if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
4408 canon_reg (SET_SRC (y
), insn
);
4409 apply_change_group ();
4410 fold_rtx (SET_SRC (y
), insn
);
4412 else if (GET_CODE (y
) == CLOBBER
)
4414 if (MEM_P (XEXP (y
, 0)))
4415 canon_reg (XEXP (y
, 0), insn
);
4417 else if (GET_CODE (y
) == USE
4418 && ! (REG_P (XEXP (y
, 0))
4419 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4420 canon_reg (y
, insn
);
4421 else if (GET_CODE (y
) == CALL
)
4423 canon_reg (y
, insn
);
4424 apply_change_group ();
4430 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4431 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4433 /* We potentially will process this insn many times. Therefore,
4434 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4437 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4438 because cse_insn handles those specially. */
4439 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != STRICT_LOW_PART
4440 && rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
4441 remove_note (insn
, tem
);
4444 canon_reg (XEXP (tem
, 0), insn
);
4445 apply_change_group ();
4446 XEXP (tem
, 0) = fold_rtx (XEXP (tem
, 0), insn
);
4447 df_notes_rescan (insn
);
4451 /* Canonicalize sources and addresses of destinations.
4452 We do this in a separate pass to avoid problems when a MATCH_DUP is
4453 present in the insn pattern. In that case, we want to ensure that
4454 we don't break the duplicate nature of the pattern. So we will replace
4455 both operands at the same time. Otherwise, we would fail to find an
4456 equivalent substitution in the loop calling validate_change below.
4458 We used to suppress canonicalization of DEST if it appears in SRC,
4459 but we don't do this any more. */
4461 for (i
= 0; i
< n_sets
; i
++)
4463 rtx dest
= SET_DEST (sets
[i
].rtl
);
4464 rtx src
= SET_SRC (sets
[i
].rtl
);
4465 rtx new_rtx
= canon_reg (src
, insn
);
4467 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4469 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4471 validate_change (insn
, &XEXP (dest
, 1),
4472 canon_reg (XEXP (dest
, 1), insn
), 1);
4473 validate_change (insn
, &XEXP (dest
, 2),
4474 canon_reg (XEXP (dest
, 2), insn
), 1);
4477 while (GET_CODE (dest
) == SUBREG
4478 || GET_CODE (dest
) == ZERO_EXTRACT
4479 || GET_CODE (dest
) == STRICT_LOW_PART
)
4480 dest
= XEXP (dest
, 0);
4483 canon_reg (dest
, insn
);
4486 /* Now that we have done all the replacements, we can apply the change
4487 group and see if they all work. Note that this will cause some
4488 canonicalizations that would have worked individually not to be applied
4489 because some other canonicalization didn't work, but this should not
4492 The result of apply_change_group can be ignored; see canon_reg. */
4494 apply_change_group ();
4497 /* Main function of CSE.
4498 First simplify sources and addresses of all assignments
4499 in the instruction, using previously-computed equivalents values.
4500 Then install the new sources and destinations in the table
4501 of available values. */
4504 cse_insn (rtx_insn
*insn
)
4506 rtx x
= PATTERN (insn
);
4512 struct table_elt
*src_eqv_elt
= 0;
4513 int src_eqv_volatile
= 0;
4514 int src_eqv_in_memory
= 0;
4515 unsigned src_eqv_hash
= 0;
4517 struct set
*sets
= (struct set
*) 0;
4519 if (GET_CODE (x
) == SET
)
4520 sets
= XALLOCA (struct set
);
4521 else if (GET_CODE (x
) == PARALLEL
)
4522 sets
= XALLOCAVEC (struct set
, XVECLEN (x
, 0));
4526 /* Records what this insn does to set CC0. */
4528 this_insn_cc0_mode
= VOIDmode
;
4531 /* Find all regs explicitly clobbered in this insn,
4532 to ensure they are not replaced with any other regs
4533 elsewhere in this insn. */
4534 invalidate_from_sets_and_clobbers (insn
);
4536 /* Record all the SETs in this instruction. */
4537 n_sets
= find_sets_in_insn (insn
, &sets
);
4539 /* Substitute the canonical register where possible. */
4540 canonicalize_insn (insn
, &sets
, n_sets
);
4542 /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4543 if different, or if the DEST is a STRICT_LOW_PART. The latter condition
4544 is necessary because SRC_EQV is handled specially for this case, and if
4545 it isn't set, then there will be no equivalence for the destination. */
4546 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4547 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0
4548 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4549 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4550 src_eqv
= copy_rtx (XEXP (tem
, 0));
4552 /* Set sets[i].src_elt to the class each source belongs to.
4553 Detect assignments from or to volatile things
4554 and set set[i] to zero so they will be ignored
4555 in the rest of this function.
4557 Nothing in this loop changes the hash table or the register chains. */
4559 for (i
= 0; i
< n_sets
; i
++)
4561 bool repeat
= false;
4564 struct table_elt
*elt
= 0, *p
;
4568 rtx src_related
= 0;
4569 bool src_related_is_const_anchor
= false;
4570 struct table_elt
*src_const_elt
= 0;
4571 int src_cost
= MAX_COST
;
4572 int src_eqv_cost
= MAX_COST
;
4573 int src_folded_cost
= MAX_COST
;
4574 int src_related_cost
= MAX_COST
;
4575 int src_elt_cost
= MAX_COST
;
4576 int src_regcost
= MAX_COST
;
4577 int src_eqv_regcost
= MAX_COST
;
4578 int src_folded_regcost
= MAX_COST
;
4579 int src_related_regcost
= MAX_COST
;
4580 int src_elt_regcost
= MAX_COST
;
4581 /* Set nonzero if we need to call force_const_mem on with the
4582 contents of src_folded before using it. */
4583 int src_folded_force_flag
= 0;
4585 dest
= SET_DEST (sets
[i
].rtl
);
4586 src
= SET_SRC (sets
[i
].rtl
);
4588 /* If SRC is a constant that has no machine mode,
4589 hash it with the destination's machine mode.
4590 This way we can keep different modes separate. */
4592 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4593 sets
[i
].mode
= mode
;
4597 machine_mode eqvmode
= mode
;
4598 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4599 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4601 hash_arg_in_memory
= 0;
4602 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4604 /* Find the equivalence class for the equivalent expression. */
4607 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4609 src_eqv_volatile
= do_not_record
;
4610 src_eqv_in_memory
= hash_arg_in_memory
;
4613 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4614 value of the INNER register, not the destination. So it is not
4615 a valid substitution for the source. But save it for later. */
4616 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4619 src_eqv_here
= src_eqv
;
4621 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4622 simplified result, which may not necessarily be valid. */
4623 src_folded
= fold_rtx (src
, insn
);
4626 /* ??? This caused bad code to be generated for the m68k port with -O2.
4627 Suppose src is (CONST_INT -1), and that after truncation src_folded
4628 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4629 At the end we will add src and src_const to the same equivalence
4630 class. We now have 3 and -1 on the same equivalence class. This
4631 causes later instructions to be mis-optimized. */
4632 /* If storing a constant in a bitfield, pre-truncate the constant
4633 so we will be able to record it later. */
4634 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4636 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4638 if (CONST_INT_P (src
)
4639 && CONST_INT_P (width
)
4640 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4641 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4643 = GEN_INT (INTVAL (src
) & (((HOST_WIDE_INT
) 1
4644 << INTVAL (width
)) - 1));
4648 /* Compute SRC's hash code, and also notice if it
4649 should not be recorded at all. In that case,
4650 prevent any further processing of this assignment. */
4652 hash_arg_in_memory
= 0;
4655 sets
[i
].src_hash
= HASH (src
, mode
);
4656 sets
[i
].src_volatile
= do_not_record
;
4657 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4659 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4660 a pseudo, do not record SRC. Using SRC as a replacement for
4661 anything else will be incorrect in that situation. Note that
4662 this usually occurs only for stack slots, in which case all the
4663 RTL would be referring to SRC, so we don't lose any optimization
4664 opportunities by not having SRC in the hash table. */
4667 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4669 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4670 sets
[i
].src_volatile
= 1;
4672 else if (GET_CODE (src
) == ASM_OPERANDS
4673 && GET_CODE (x
) == PARALLEL
)
4675 /* Do not record result of a non-volatile inline asm with
4676 more than one result. */
4678 sets
[i
].src_volatile
= 1;
4680 int j
, lim
= XVECLEN (x
, 0);
4681 for (j
= 0; j
< lim
; j
++)
4683 rtx y
= XVECEXP (x
, 0, j
);
4684 /* And do not record result of a non-volatile inline asm
4685 with "memory" clobber. */
4686 if (GET_CODE (y
) == CLOBBER
&& MEM_P (XEXP (y
, 0)))
4688 sets
[i
].src_volatile
= 1;
4695 /* It is no longer clear why we used to do this, but it doesn't
4696 appear to still be needed. So let's try without it since this
4697 code hurts cse'ing widened ops. */
4698 /* If source is a paradoxical subreg (such as QI treated as an SI),
4699 treat it as volatile. It may do the work of an SI in one context
4700 where the extra bits are not being used, but cannot replace an SI
4702 if (paradoxical_subreg_p (src
))
4703 sets
[i
].src_volatile
= 1;
4706 /* Locate all possible equivalent forms for SRC. Try to replace
4707 SRC in the insn with each cheaper equivalent.
4709 We have the following types of equivalents: SRC itself, a folded
4710 version, a value given in a REG_EQUAL note, or a value related
4713 Each of these equivalents may be part of an additional class
4714 of equivalents (if more than one is in the table, they must be in
4715 the same class; we check for this).
4717 If the source is volatile, we don't do any table lookups.
4719 We note any constant equivalent for possible later use in a
4722 if (!sets
[i
].src_volatile
)
4723 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4725 sets
[i
].src_elt
= elt
;
4727 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4729 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4731 /* The REG_EQUAL is indicating that two formerly distinct
4732 classes are now equivalent. So merge them. */
4733 merge_equiv_classes (elt
, src_eqv_elt
);
4734 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4735 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4741 else if (src_eqv_elt
)
4744 /* Try to find a constant somewhere and record it in `src_const'.
4745 Record its table element, if any, in `src_const_elt'. Look in
4746 any known equivalences first. (If the constant is not in the
4747 table, also set `sets[i].src_const_hash'). */
4749 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4753 src_const_elt
= elt
;
4758 && (CONSTANT_P (src_folded
)
4759 /* Consider (minus (label_ref L1) (label_ref L2)) as
4760 "constant" here so we will record it. This allows us
4761 to fold switch statements when an ADDR_DIFF_VEC is used. */
4762 || (GET_CODE (src_folded
) == MINUS
4763 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4764 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4765 src_const
= src_folded
, src_const_elt
= elt
;
4766 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4767 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4769 /* If we don't know if the constant is in the table, get its
4770 hash code and look it up. */
4771 if (src_const
&& src_const_elt
== 0)
4773 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4774 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4777 sets
[i
].src_const
= src_const
;
4778 sets
[i
].src_const_elt
= src_const_elt
;
4780 /* If the constant and our source are both in the table, mark them as
4781 equivalent. Otherwise, if a constant is in the table but the source
4782 isn't, set ELT to it. */
4783 if (src_const_elt
&& elt
4784 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4785 merge_equiv_classes (elt
, src_const_elt
);
4786 else if (src_const_elt
&& elt
== 0)
4787 elt
= src_const_elt
;
4789 /* See if there is a register linearly related to a constant
4790 equivalent of SRC. */
4792 && (GET_CODE (src_const
) == CONST
4793 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4795 src_related
= use_related_value (src_const
, src_const_elt
);
4798 struct table_elt
*src_related_elt
4799 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4800 if (src_related_elt
&& elt
)
4802 if (elt
->first_same_value
4803 != src_related_elt
->first_same_value
)
4804 /* This can occur when we previously saw a CONST
4805 involving a SYMBOL_REF and then see the SYMBOL_REF
4806 twice. Merge the involved classes. */
4807 merge_equiv_classes (elt
, src_related_elt
);
4810 src_related_elt
= 0;
4812 else if (src_related_elt
&& elt
== 0)
4813 elt
= src_related_elt
;
4817 /* See if we have a CONST_INT that is already in a register in a
4820 if (src_const
&& src_related
== 0 && CONST_INT_P (src_const
)
4821 && GET_MODE_CLASS (mode
) == MODE_INT
4822 && GET_MODE_PRECISION (mode
) < BITS_PER_WORD
)
4824 machine_mode wider_mode
;
4826 for (wider_mode
= GET_MODE_WIDER_MODE (mode
);
4827 wider_mode
!= VOIDmode
4828 && GET_MODE_PRECISION (wider_mode
) <= BITS_PER_WORD
4829 && src_related
== 0;
4830 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4832 struct table_elt
*const_elt
4833 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4838 for (const_elt
= const_elt
->first_same_value
;
4839 const_elt
; const_elt
= const_elt
->next_same_value
)
4840 if (REG_P (const_elt
->exp
))
4842 src_related
= gen_lowpart (mode
, const_elt
->exp
);
4848 /* Another possibility is that we have an AND with a constant in
4849 a mode narrower than a word. If so, it might have been generated
4850 as part of an "if" which would narrow the AND. If we already
4851 have done the AND in a wider mode, we can use a SUBREG of that
4854 if (flag_expensive_optimizations
&& ! src_related
4855 && GET_CODE (src
) == AND
&& CONST_INT_P (XEXP (src
, 1))
4856 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4859 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4861 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4862 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4863 tmode
= GET_MODE_WIDER_MODE (tmode
))
4865 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4866 struct table_elt
*larger_elt
;
4870 PUT_MODE (new_and
, tmode
);
4871 XEXP (new_and
, 0) = inner
;
4872 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4873 if (larger_elt
== 0)
4876 for (larger_elt
= larger_elt
->first_same_value
;
4877 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4878 if (REG_P (larger_elt
->exp
))
4881 = gen_lowpart (mode
, larger_elt
->exp
);
4891 #ifdef LOAD_EXTEND_OP
4892 /* See if a MEM has already been loaded with a widening operation;
4893 if it has, we can use a subreg of that. Many CISC machines
4894 also have such operations, but this is only likely to be
4895 beneficial on these machines. */
4897 if (flag_expensive_optimizations
&& src_related
== 0
4898 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4899 && GET_MODE_CLASS (mode
) == MODE_INT
4900 && MEM_P (src
) && ! do_not_record
4901 && LOAD_EXTEND_OP (mode
) != UNKNOWN
)
4903 struct rtx_def memory_extend_buf
;
4904 rtx memory_extend_rtx
= &memory_extend_buf
;
4907 /* Set what we are trying to extend and the operation it might
4908 have been extended with. */
4909 memset (memory_extend_rtx
, 0, sizeof (*memory_extend_rtx
));
4910 PUT_CODE (memory_extend_rtx
, LOAD_EXTEND_OP (mode
));
4911 XEXP (memory_extend_rtx
, 0) = src
;
4913 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4914 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4915 tmode
= GET_MODE_WIDER_MODE (tmode
))
4917 struct table_elt
*larger_elt
;
4919 PUT_MODE (memory_extend_rtx
, tmode
);
4920 larger_elt
= lookup (memory_extend_rtx
,
4921 HASH (memory_extend_rtx
, tmode
), tmode
);
4922 if (larger_elt
== 0)
4925 for (larger_elt
= larger_elt
->first_same_value
;
4926 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4927 if (REG_P (larger_elt
->exp
))
4929 src_related
= gen_lowpart (mode
, larger_elt
->exp
);
4937 #endif /* LOAD_EXTEND_OP */
4939 /* Try to express the constant using a register+offset expression
4940 derived from a constant anchor. */
4942 if (targetm
.const_anchor
4945 && GET_CODE (src_const
) == CONST_INT
)
4947 src_related
= try_const_anchors (src_const
, mode
);
4948 src_related_is_const_anchor
= src_related
!= NULL_RTX
;
4952 if (src
== src_folded
)
4955 /* At this point, ELT, if nonzero, points to a class of expressions
4956 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4957 and SRC_RELATED, if nonzero, each contain additional equivalent
4958 expressions. Prune these latter expressions by deleting expressions
4959 already in the equivalence class.
4961 Check for an equivalent identical to the destination. If found,
4962 this is the preferred equivalent since it will likely lead to
4963 elimination of the insn. Indicate this by placing it in
4967 elt
= elt
->first_same_value
;
4968 for (p
= elt
; p
; p
= p
->next_same_value
)
4970 enum rtx_code code
= GET_CODE (p
->exp
);
4972 /* If the expression is not valid, ignore it. Then we do not
4973 have to check for validity below. In most cases, we can use
4974 `rtx_equal_p', since canonicalization has already been done. */
4975 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
4978 /* Also skip paradoxical subregs, unless that's what we're
4980 if (paradoxical_subreg_p (p
->exp
)
4982 && GET_CODE (src
) == SUBREG
4983 && GET_MODE (src
) == GET_MODE (p
->exp
)
4984 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4985 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))))
4988 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
4990 else if (src_folded
&& GET_CODE (src_folded
) == code
4991 && rtx_equal_p (src_folded
, p
->exp
))
4993 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
4994 && rtx_equal_p (src_eqv_here
, p
->exp
))
4996 else if (src_related
&& GET_CODE (src_related
) == code
4997 && rtx_equal_p (src_related
, p
->exp
))
5000 /* This is the same as the destination of the insns, we want
5001 to prefer it. Copy it to src_related. The code below will
5002 then give it a negative cost. */
5003 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
5007 /* Find the cheapest valid equivalent, trying all the available
5008 possibilities. Prefer items not in the hash table to ones
5009 that are when they are equal cost. Note that we can never
5010 worsen an insn as the current contents will also succeed.
5011 If we find an equivalent identical to the destination, use it as best,
5012 since this insn will probably be eliminated in that case. */
5015 if (rtx_equal_p (src
, dest
))
5016 src_cost
= src_regcost
= -1;
5019 src_cost
= COST (src
);
5020 src_regcost
= approx_reg_cost (src
);
5026 if (rtx_equal_p (src_eqv_here
, dest
))
5027 src_eqv_cost
= src_eqv_regcost
= -1;
5030 src_eqv_cost
= COST (src_eqv_here
);
5031 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
5037 if (rtx_equal_p (src_folded
, dest
))
5038 src_folded_cost
= src_folded_regcost
= -1;
5041 src_folded_cost
= COST (src_folded
);
5042 src_folded_regcost
= approx_reg_cost (src_folded
);
5048 if (rtx_equal_p (src_related
, dest
))
5049 src_related_cost
= src_related_regcost
= -1;
5052 src_related_cost
= COST (src_related
);
5053 src_related_regcost
= approx_reg_cost (src_related
);
5055 /* If a const-anchor is used to synthesize a constant that
5056 normally requires multiple instructions then slightly prefer
5057 it over the original sequence. These instructions are likely
5058 to become redundant now. We can't compare against the cost
5059 of src_eqv_here because, on MIPS for example, multi-insn
5060 constants have zero cost; they are assumed to be hoisted from
5062 if (src_related_is_const_anchor
5063 && src_related_cost
== src_cost
5069 /* If this was an indirect jump insn, a known label will really be
5070 cheaper even though it looks more expensive. */
5071 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
5072 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
5074 /* Terminate loop when replacement made. This must terminate since
5075 the current contents will be tested and will always be valid. */
5080 /* Skip invalid entries. */
5081 while (elt
&& !REG_P (elt
->exp
)
5082 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5083 elt
= elt
->next_same_value
;
5085 /* A paradoxical subreg would be bad here: it'll be the right
5086 size, but later may be adjusted so that the upper bits aren't
5087 what we want. So reject it. */
5089 && paradoxical_subreg_p (elt
->exp
)
5090 /* It is okay, though, if the rtx we're trying to match
5091 will ignore any of the bits we can't predict. */
5093 && GET_CODE (src
) == SUBREG
5094 && GET_MODE (src
) == GET_MODE (elt
->exp
)
5095 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
5096 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))))
5098 elt
= elt
->next_same_value
;
5104 src_elt_cost
= elt
->cost
;
5105 src_elt_regcost
= elt
->regcost
;
5108 /* Find cheapest and skip it for the next time. For items
5109 of equal cost, use this order:
5110 src_folded, src, src_eqv, src_related and hash table entry. */
5112 && preferable (src_folded_cost
, src_folded_regcost
,
5113 src_cost
, src_regcost
) <= 0
5114 && preferable (src_folded_cost
, src_folded_regcost
,
5115 src_eqv_cost
, src_eqv_regcost
) <= 0
5116 && preferable (src_folded_cost
, src_folded_regcost
,
5117 src_related_cost
, src_related_regcost
) <= 0
5118 && preferable (src_folded_cost
, src_folded_regcost
,
5119 src_elt_cost
, src_elt_regcost
) <= 0)
5121 trial
= src_folded
, src_folded_cost
= MAX_COST
;
5122 if (src_folded_force_flag
)
5124 rtx forced
= force_const_mem (mode
, trial
);
5130 && preferable (src_cost
, src_regcost
,
5131 src_eqv_cost
, src_eqv_regcost
) <= 0
5132 && preferable (src_cost
, src_regcost
,
5133 src_related_cost
, src_related_regcost
) <= 0
5134 && preferable (src_cost
, src_regcost
,
5135 src_elt_cost
, src_elt_regcost
) <= 0)
5136 trial
= src
, src_cost
= MAX_COST
;
5137 else if (src_eqv_here
5138 && preferable (src_eqv_cost
, src_eqv_regcost
,
5139 src_related_cost
, src_related_regcost
) <= 0
5140 && preferable (src_eqv_cost
, src_eqv_regcost
,
5141 src_elt_cost
, src_elt_regcost
) <= 0)
5142 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
5143 else if (src_related
5144 && preferable (src_related_cost
, src_related_regcost
,
5145 src_elt_cost
, src_elt_regcost
) <= 0)
5146 trial
= src_related
, src_related_cost
= MAX_COST
;
5150 elt
= elt
->next_same_value
;
5151 src_elt_cost
= MAX_COST
;
5154 /* Avoid creation of overlapping memory moves. */
5155 if (MEM_P (trial
) && MEM_P (SET_DEST (sets
[i
].rtl
)))
5159 /* BLKmode moves are not handled by cse anyway. */
5160 if (GET_MODE (trial
) == BLKmode
)
5163 src
= canon_rtx (trial
);
5164 dest
= canon_rtx (SET_DEST (sets
[i
].rtl
));
5166 if (!MEM_P (src
) || !MEM_P (dest
)
5167 || !nonoverlapping_memrefs_p (src
, dest
, false))
5172 (set (reg:M N) (const_int A))
5173 (set (reg:M2 O) (const_int B))
5174 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5176 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5177 && CONST_INT_P (trial
)
5178 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5179 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5180 && REG_P (XEXP (SET_DEST (sets
[i
].rtl
), 0))
5181 && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets
[i
].rtl
)))
5182 >= INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1)))
5183 && ((unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5184 + (unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5185 <= HOST_BITS_PER_WIDE_INT
))
5187 rtx dest_reg
= XEXP (SET_DEST (sets
[i
].rtl
), 0);
5188 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5189 rtx pos
= XEXP (SET_DEST (sets
[i
].rtl
), 2);
5190 unsigned int dest_hash
= HASH (dest_reg
, GET_MODE (dest_reg
));
5191 struct table_elt
*dest_elt
5192 = lookup (dest_reg
, dest_hash
, GET_MODE (dest_reg
));
5193 rtx dest_cst
= NULL
;
5196 for (p
= dest_elt
->first_same_value
; p
; p
= p
->next_same_value
)
5197 if (p
->is_const
&& CONST_INT_P (p
->exp
))
5204 HOST_WIDE_INT val
= INTVAL (dest_cst
);
5207 if (BITS_BIG_ENDIAN
)
5208 shift
= GET_MODE_PRECISION (GET_MODE (dest_reg
))
5209 - INTVAL (pos
) - INTVAL (width
);
5211 shift
= INTVAL (pos
);
5212 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
5213 mask
= ~(HOST_WIDE_INT
) 0;
5215 mask
= ((HOST_WIDE_INT
) 1 << INTVAL (width
)) - 1;
5216 val
&= ~(mask
<< shift
);
5217 val
|= (INTVAL (trial
) & mask
) << shift
;
5218 val
= trunc_int_for_mode (val
, GET_MODE (dest_reg
));
5219 validate_unshare_change (insn
, &SET_DEST (sets
[i
].rtl
),
5221 validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5223 if (apply_change_group ())
5225 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5228 remove_note (insn
, note
);
5229 df_notes_rescan (insn
);
5233 src_eqv_volatile
= 0;
5234 src_eqv_in_memory
= 0;
5242 /* We don't normally have an insn matching (set (pc) (pc)), so
5243 check for this separately here. We will delete such an
5246 For other cases such as a table jump or conditional jump
5247 where we know the ultimate target, go ahead and replace the
5248 operand. While that may not make a valid insn, we will
5249 reemit the jump below (and also insert any necessary
5251 if (n_sets
== 1 && dest
== pc_rtx
5253 || (GET_CODE (trial
) == LABEL_REF
5254 && ! condjump_p (insn
))))
5256 /* Don't substitute non-local labels, this confuses CFG. */
5257 if (GET_CODE (trial
) == LABEL_REF
5258 && LABEL_REF_NONLOCAL_P (trial
))
5261 SET_SRC (sets
[i
].rtl
) = trial
;
5262 cse_jumps_altered
= true;
5266 /* Reject certain invalid forms of CONST that we create. */
5267 else if (CONSTANT_P (trial
)
5268 && GET_CODE (trial
) == CONST
5269 /* Reject cases that will cause decode_rtx_const to
5270 die. On the alpha when simplifying a switch, we
5271 get (const (truncate (minus (label_ref)
5273 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
5274 /* Likewise on IA-64, except without the
5276 || (GET_CODE (XEXP (trial
, 0)) == MINUS
5277 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
5278 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
5279 /* Do nothing for this case. */
5282 /* Look for a substitution that makes a valid insn. */
5283 else if (validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5286 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
5288 /* The result of apply_change_group can be ignored; see
5291 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
5292 apply_change_group ();
5297 /* If we previously found constant pool entries for
5298 constants and this is a constant, try making a
5299 pool entry. Put it in src_folded unless we already have done
5300 this since that is where it likely came from. */
5302 else if (constant_pool_entries_cost
5303 && CONSTANT_P (trial
)
5305 || (!MEM_P (src_folded
)
5306 && ! src_folded_force_flag
))
5307 && GET_MODE_CLASS (mode
) != MODE_CC
5308 && mode
!= VOIDmode
)
5310 src_folded_force_flag
= 1;
5312 src_folded_cost
= constant_pool_entries_cost
;
5313 src_folded_regcost
= constant_pool_entries_regcost
;
5317 /* If we changed the insn too much, handle this set from scratch. */
5324 src
= SET_SRC (sets
[i
].rtl
);
5326 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5327 However, there is an important exception: If both are registers
5328 that are not the head of their equivalence class, replace SET_SRC
5329 with the head of the class. If we do not do this, we will have
5330 both registers live over a portion of the basic block. This way,
5331 their lifetimes will likely abut instead of overlapping. */
5333 && REGNO_QTY_VALID_P (REGNO (dest
)))
5335 int dest_q
= REG_QTY (REGNO (dest
));
5336 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
5338 if (dest_ent
->mode
== GET_MODE (dest
)
5339 && dest_ent
->first_reg
!= REGNO (dest
)
5340 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
5341 /* Don't do this if the original insn had a hard reg as
5342 SET_SRC or SET_DEST. */
5343 && (!REG_P (sets
[i
].src
)
5344 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
5345 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
5346 /* We can't call canon_reg here because it won't do anything if
5347 SRC is a hard register. */
5349 int src_q
= REG_QTY (REGNO (src
));
5350 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5351 int first
= src_ent
->first_reg
;
5353 = (first
>= FIRST_PSEUDO_REGISTER
5354 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
5356 /* We must use validate-change even for this, because this
5357 might be a special no-op instruction, suitable only to
5359 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
5362 /* If we had a constant that is cheaper than what we are now
5363 setting SRC to, use that constant. We ignored it when we
5364 thought we could make this into a no-op. */
5365 if (src_const
&& COST (src_const
) < COST (src
)
5366 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
5373 /* If we made a change, recompute SRC values. */
5374 if (src
!= sets
[i
].src
)
5377 hash_arg_in_memory
= 0;
5379 sets
[i
].src_hash
= HASH (src
, mode
);
5380 sets
[i
].src_volatile
= do_not_record
;
5381 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5382 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
5385 /* If this is a single SET, we are setting a register, and we have an
5386 equivalent constant, we want to add a REG_EQUAL note if the constant
5387 is different from the source. We don't want to do it for a constant
5388 pseudo since verifying that this pseudo hasn't been eliminated is a
5389 pain; moreover such a note won't help anything.
5391 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5392 which can be created for a reference to a compile time computable
5393 entry in a jump table. */
5397 && !REG_P (src_const
)
5398 && !(GET_CODE (src_const
) == SUBREG
5399 && REG_P (SUBREG_REG (src_const
)))
5400 && !(GET_CODE (src_const
) == CONST
5401 && GET_CODE (XEXP (src_const
, 0)) == MINUS
5402 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
5403 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
)
5404 && !rtx_equal_p (src
, src_const
))
5406 /* Make sure that the rtx is not shared. */
5407 src_const
= copy_rtx (src_const
);
5409 /* Record the actual constant value in a REG_EQUAL note,
5410 making a new one if one does not already exist. */
5411 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
5412 df_notes_rescan (insn
);
5415 /* Now deal with the destination. */
5418 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5419 while (GET_CODE (dest
) == SUBREG
5420 || GET_CODE (dest
) == ZERO_EXTRACT
5421 || GET_CODE (dest
) == STRICT_LOW_PART
)
5422 dest
= XEXP (dest
, 0);
5424 sets
[i
].inner_dest
= dest
;
5428 #ifdef PUSH_ROUNDING
5429 /* Stack pushes invalidate the stack pointer. */
5430 rtx addr
= XEXP (dest
, 0);
5431 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
5432 && XEXP (addr
, 0) == stack_pointer_rtx
)
5433 invalidate (stack_pointer_rtx
, VOIDmode
);
5435 dest
= fold_rtx (dest
, insn
);
5438 /* Compute the hash code of the destination now,
5439 before the effects of this instruction are recorded,
5440 since the register values used in the address computation
5441 are those before this instruction. */
5442 sets
[i
].dest_hash
= HASH (dest
, mode
);
5444 /* Don't enter a bit-field in the hash table
5445 because the value in it after the store
5446 may not equal what was stored, due to truncation. */
5448 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5450 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5452 if (src_const
!= 0 && CONST_INT_P (src_const
)
5453 && CONST_INT_P (width
)
5454 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5455 && ! (INTVAL (src_const
)
5456 & (HOST_WIDE_INT_M1U
<< INTVAL (width
))))
5457 /* Exception: if the value is constant,
5458 and it won't be truncated, record it. */
5462 /* This is chosen so that the destination will be invalidated
5463 but no new value will be recorded.
5464 We must invalidate because sometimes constant
5465 values can be recorded for bitfields. */
5466 sets
[i
].src_elt
= 0;
5467 sets
[i
].src_volatile
= 1;
5473 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5475 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5477 /* One less use of the label this insn used to jump to. */
5478 delete_insn_and_edges (insn
);
5479 cse_jumps_altered
= true;
5480 /* No more processing for this set. */
5484 /* If this SET is now setting PC to a label, we know it used to
5485 be a conditional or computed branch. */
5486 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5487 && !LABEL_REF_NONLOCAL_P (src
))
5489 /* We reemit the jump in as many cases as possible just in
5490 case the form of an unconditional jump is significantly
5491 different than a computed jump or conditional jump.
5493 If this insn has multiple sets, then reemitting the
5494 jump is nontrivial. So instead we just force rerecognition
5495 and hope for the best. */
5501 new_rtx
= emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
5502 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5503 LABEL_NUSES (XEXP (src
, 0))++;
5505 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5506 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5509 XEXP (note
, 1) = NULL_RTX
;
5510 REG_NOTES (new_rtx
) = note
;
5513 delete_insn_and_edges (insn
);
5517 INSN_CODE (insn
) = -1;
5519 /* Do not bother deleting any unreachable code, let jump do it. */
5520 cse_jumps_altered
= true;
5524 /* If destination is volatile, invalidate it and then do no further
5525 processing for this assignment. */
5527 else if (do_not_record
)
5529 invalidate_dest (dest
);
5533 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5536 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5539 invalidate_dest (SET_DEST (sets
[i
].rtl
));
5545 /* If setting CC0, record what it was set to, or a constant, if it
5546 is equivalent to a constant. If it is being set to a floating-point
5547 value, make a COMPARE with the appropriate constant of 0. If we
5548 don't do this, later code can interpret this as a test against
5549 const0_rtx, which can cause problems if we try to put it into an
5550 insn as a floating-point operand. */
5551 if (dest
== cc0_rtx
)
5553 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5554 this_insn_cc0_mode
= mode
;
5555 if (FLOAT_MODE_P (mode
))
5556 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5562 /* Now enter all non-volatile source expressions in the hash table
5563 if they are not already present.
5564 Record their equivalence classes in src_elt.
5565 This way we can insert the corresponding destinations into
5566 the same classes even if the actual sources are no longer in them
5567 (having been invalidated). */
5569 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5570 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5572 struct table_elt
*elt
;
5573 struct table_elt
*classp
= sets
[0].src_elt
;
5574 rtx dest
= SET_DEST (sets
[0].rtl
);
5575 machine_mode eqvmode
= GET_MODE (dest
);
5577 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5579 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5582 if (insert_regs (src_eqv
, classp
, 0))
5584 rehash_using_reg (src_eqv
);
5585 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5587 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5588 elt
->in_memory
= src_eqv_in_memory
;
5591 /* Check to see if src_eqv_elt is the same as a set source which
5592 does not yet have an elt, and if so set the elt of the set source
5594 for (i
= 0; i
< n_sets
; i
++)
5595 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5596 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5597 sets
[i
].src_elt
= src_eqv_elt
;
5600 for (i
= 0; i
< n_sets
; i
++)
5601 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5602 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5604 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5606 /* REG_EQUAL in setting a STRICT_LOW_PART
5607 gives an equivalent for the entire destination register,
5608 not just for the subreg being stored in now.
5609 This is a more interesting equivalence, so we arrange later
5610 to treat the entire reg as the destination. */
5611 sets
[i
].src_elt
= src_eqv_elt
;
5612 sets
[i
].src_hash
= src_eqv_hash
;
5616 /* Insert source and constant equivalent into hash table, if not
5618 struct table_elt
*classp
= src_eqv_elt
;
5619 rtx src
= sets
[i
].src
;
5620 rtx dest
= SET_DEST (sets
[i
].rtl
);
5622 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5624 /* It's possible that we have a source value known to be
5625 constant but don't have a REG_EQUAL note on the insn.
5626 Lack of a note will mean src_eqv_elt will be NULL. This
5627 can happen where we've generated a SUBREG to access a
5628 CONST_INT that is already in a register in a wider mode.
5629 Ensure that the source expression is put in the proper
5632 classp
= sets
[i
].src_const_elt
;
5634 if (sets
[i
].src_elt
== 0)
5636 struct table_elt
*elt
;
5638 /* Note that these insert_regs calls cannot remove
5639 any of the src_elt's, because they would have failed to
5640 match if not still valid. */
5641 if (insert_regs (src
, classp
, 0))
5643 rehash_using_reg (src
);
5644 sets
[i
].src_hash
= HASH (src
, mode
);
5646 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5647 elt
->in_memory
= sets
[i
].src_in_memory
;
5648 /* If inline asm has any clobbers, ensure we only reuse
5649 existing inline asms and never try to put the ASM_OPERANDS
5650 into an insn that isn't inline asm. */
5651 if (GET_CODE (src
) == ASM_OPERANDS
5652 && GET_CODE (x
) == PARALLEL
)
5653 elt
->cost
= MAX_COST
;
5654 sets
[i
].src_elt
= classp
= elt
;
5656 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5657 && src
!= sets
[i
].src_const
5658 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5659 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5660 sets
[i
].src_const_hash
, mode
);
5663 else if (sets
[i
].src_elt
== 0)
5664 /* If we did not insert the source into the hash table (e.g., it was
5665 volatile), note the equivalence class for the REG_EQUAL value, if any,
5666 so that the destination goes into that class. */
5667 sets
[i
].src_elt
= src_eqv_elt
;
5669 /* Record destination addresses in the hash table. This allows us to
5670 check if they are invalidated by other sets. */
5671 for (i
= 0; i
< n_sets
; i
++)
5675 rtx x
= sets
[i
].inner_dest
;
5676 struct table_elt
*elt
;
5683 mode
= GET_MODE (x
);
5684 hash
= HASH (x
, mode
);
5685 elt
= lookup (x
, hash
, mode
);
5688 if (insert_regs (x
, NULL
, 0))
5690 rtx dest
= SET_DEST (sets
[i
].rtl
);
5692 rehash_using_reg (x
);
5693 hash
= HASH (x
, mode
);
5694 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5696 elt
= insert (x
, NULL
, hash
, mode
);
5699 sets
[i
].dest_addr_elt
= elt
;
5702 sets
[i
].dest_addr_elt
= NULL
;
5706 invalidate_from_clobbers (insn
);
5708 /* Some registers are invalidated by subroutine calls. Memory is
5709 invalidated by non-constant calls. */
5713 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5714 invalidate_memory ();
5715 invalidate_for_call ();
5718 /* Now invalidate everything set by this instruction.
5719 If a SUBREG or other funny destination is being set,
5720 sets[i].rtl is still nonzero, so here we invalidate the reg
5721 a part of which is being set. */
5723 for (i
= 0; i
< n_sets
; i
++)
5726 /* We can't use the inner dest, because the mode associated with
5727 a ZERO_EXTRACT is significant. */
5728 rtx dest
= SET_DEST (sets
[i
].rtl
);
5730 /* Needed for registers to remove the register from its
5731 previous quantity's chain.
5732 Needed for memory if this is a nonvarying address, unless
5733 we have just done an invalidate_memory that covers even those. */
5734 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5735 invalidate (dest
, VOIDmode
);
5736 else if (MEM_P (dest
))
5737 invalidate (dest
, VOIDmode
);
5738 else if (GET_CODE (dest
) == STRICT_LOW_PART
5739 || GET_CODE (dest
) == ZERO_EXTRACT
)
5740 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5743 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5744 the regs restored by the longjmp come from a later time
5746 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5748 flush_hash_table ();
5752 /* Make sure registers mentioned in destinations
5753 are safe for use in an expression to be inserted.
5754 This removes from the hash table
5755 any invalid entry that refers to one of these registers.
5757 We don't care about the return value from mention_regs because
5758 we are going to hash the SET_DEST values unconditionally. */
5760 for (i
= 0; i
< n_sets
; i
++)
5764 rtx x
= SET_DEST (sets
[i
].rtl
);
5770 /* We used to rely on all references to a register becoming
5771 inaccessible when a register changes to a new quantity,
5772 since that changes the hash code. However, that is not
5773 safe, since after HASH_SIZE new quantities we get a
5774 hash 'collision' of a register with its own invalid
5775 entries. And since SUBREGs have been changed not to
5776 change their hash code with the hash code of the register,
5777 it wouldn't work any longer at all. So we have to check
5778 for any invalid references lying around now.
5779 This code is similar to the REG case in mention_regs,
5780 but it knows that reg_tick has been incremented, and
5781 it leaves reg_in_table as -1 . */
5782 unsigned int regno
= REGNO (x
);
5783 unsigned int endregno
= END_REGNO (x
);
5786 for (i
= regno
; i
< endregno
; i
++)
5788 if (REG_IN_TABLE (i
) >= 0)
5790 remove_invalid_refs (i
);
5791 REG_IN_TABLE (i
) = -1;
5798 /* We may have just removed some of the src_elt's from the hash table.
5799 So replace each one with the current head of the same class.
5800 Also check if destination addresses have been removed. */
5802 for (i
= 0; i
< n_sets
; i
++)
5805 if (sets
[i
].dest_addr_elt
5806 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5808 /* The elt was removed, which means this destination is not
5809 valid after this instruction. */
5810 sets
[i
].rtl
= NULL_RTX
;
5812 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5813 /* If elt was removed, find current head of same class,
5814 or 0 if nothing remains of that class. */
5816 struct table_elt
*elt
= sets
[i
].src_elt
;
5818 while (elt
&& elt
->prev_same_value
)
5819 elt
= elt
->prev_same_value
;
5821 while (elt
&& elt
->first_same_value
== 0)
5822 elt
= elt
->next_same_value
;
5823 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5827 /* Now insert the destinations into their equivalence classes. */
5829 for (i
= 0; i
< n_sets
; i
++)
5832 rtx dest
= SET_DEST (sets
[i
].rtl
);
5833 struct table_elt
*elt
;
5835 /* Don't record value if we are not supposed to risk allocating
5836 floating-point values in registers that might be wider than
5838 if ((flag_float_store
5840 && FLOAT_MODE_P (GET_MODE (dest
)))
5841 /* Don't record BLKmode values, because we don't know the
5842 size of it, and can't be sure that other BLKmode values
5843 have the same or smaller size. */
5844 || GET_MODE (dest
) == BLKmode
5845 /* If we didn't put a REG_EQUAL value or a source into the hash
5846 table, there is no point is recording DEST. */
5847 || sets
[i
].src_elt
== 0
5848 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5849 or SIGN_EXTEND, don't record DEST since it can cause
5850 some tracking to be wrong.
5852 ??? Think about this more later. */
5853 || (paradoxical_subreg_p (dest
)
5854 && (GET_CODE (sets
[i
].src
) == SIGN_EXTEND
5855 || GET_CODE (sets
[i
].src
) == ZERO_EXTEND
)))
5858 /* STRICT_LOW_PART isn't part of the value BEING set,
5859 and neither is the SUBREG inside it.
5860 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5861 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5862 dest
= SUBREG_REG (XEXP (dest
, 0));
5864 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5865 /* Registers must also be inserted into chains for quantities. */
5866 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5868 /* If `insert_regs' changes something, the hash code must be
5870 rehash_using_reg (dest
);
5871 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5874 elt
= insert (dest
, sets
[i
].src_elt
,
5875 sets
[i
].dest_hash
, GET_MODE (dest
));
5877 /* If this is a constant, insert the constant anchors with the
5878 equivalent register-offset expressions using register DEST. */
5879 if (targetm
.const_anchor
5881 && SCALAR_INT_MODE_P (GET_MODE (dest
))
5882 && GET_CODE (sets
[i
].src_elt
->exp
) == CONST_INT
)
5883 insert_const_anchors (dest
, sets
[i
].src_elt
->exp
, GET_MODE (dest
));
5885 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
5886 && !MEM_READONLY_P (sets
[i
].inner_dest
));
5888 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5889 narrower than M2, and both M1 and M2 are the same number of words,
5890 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5891 make that equivalence as well.
5893 However, BAR may have equivalences for which gen_lowpart
5894 will produce a simpler value than gen_lowpart applied to
5895 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5896 BAR's equivalences. If we don't get a simplified form, make
5897 the SUBREG. It will not be used in an equivalence, but will
5898 cause two similar assignments to be detected.
5900 Note the loop below will find SUBREG_REG (DEST) since we have
5901 already entered SRC and DEST of the SET in the table. */
5903 if (GET_CODE (dest
) == SUBREG
5904 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1)
5906 == (GET_MODE_SIZE (GET_MODE (dest
)) - 1) / UNITS_PER_WORD
)
5907 && (GET_MODE_SIZE (GET_MODE (dest
))
5908 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5909 && sets
[i
].src_elt
!= 0)
5911 machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
5912 struct table_elt
*elt
, *classp
= 0;
5914 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
5915 elt
= elt
->next_same_value
)
5919 struct table_elt
*src_elt
;
5922 /* Ignore invalid entries. */
5923 if (!REG_P (elt
->exp
)
5924 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5927 /* We may have already been playing subreg games. If the
5928 mode is already correct for the destination, use it. */
5929 if (GET_MODE (elt
->exp
) == new_mode
)
5933 /* Calculate big endian correction for the SUBREG_BYTE.
5934 We have already checked that M1 (GET_MODE (dest))
5935 is not narrower than M2 (new_mode). */
5936 if (BYTES_BIG_ENDIAN
)
5937 byte
= (GET_MODE_SIZE (GET_MODE (dest
))
5938 - GET_MODE_SIZE (new_mode
));
5940 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
5941 GET_MODE (dest
), byte
);
5944 /* The call to simplify_gen_subreg fails if the value
5945 is VOIDmode, yet we can't do any simplification, e.g.
5946 for EXPR_LISTs denoting function call results.
5947 It is invalid to construct a SUBREG with a VOIDmode
5948 SUBREG_REG, hence a zero new_src means we can't do
5949 this substitution. */
5953 src_hash
= HASH (new_src
, new_mode
);
5954 src_elt
= lookup (new_src
, src_hash
, new_mode
);
5956 /* Put the new source in the hash table is if isn't
5960 if (insert_regs (new_src
, classp
, 0))
5962 rehash_using_reg (new_src
);
5963 src_hash
= HASH (new_src
, new_mode
);
5965 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
5966 src_elt
->in_memory
= elt
->in_memory
;
5967 if (GET_CODE (new_src
) == ASM_OPERANDS
5968 && elt
->cost
== MAX_COST
)
5969 src_elt
->cost
= MAX_COST
;
5971 else if (classp
&& classp
!= src_elt
->first_same_value
)
5972 /* Show that two things that we've seen before are
5973 actually the same. */
5974 merge_equiv_classes (src_elt
, classp
);
5976 classp
= src_elt
->first_same_value
;
5977 /* Ignore invalid entries. */
5979 && !REG_P (classp
->exp
)
5980 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
5981 classp
= classp
->next_same_value
;
5986 /* Special handling for (set REG0 REG1) where REG0 is the
5987 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5988 be used in the sequel, so (if easily done) change this insn to
5989 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5990 that computed their value. Then REG1 will become a dead store
5991 and won't cloud the situation for later optimizations.
5993 Do not make this change if REG1 is a hard register, because it will
5994 then be used in the sequel and we may be changing a two-operand insn
5995 into a three-operand insn.
5997 Also do not do this if we are operating on a copy of INSN. */
5999 if (n_sets
== 1 && sets
[0].rtl
)
6000 try_back_substitute_reg (sets
[0].rtl
, insn
);
6005 /* Remove from the hash table all expressions that reference memory. */
6008 invalidate_memory (void)
6011 struct table_elt
*p
, *next
;
6013 for (i
= 0; i
< HASH_SIZE
; i
++)
6014 for (p
= table
[i
]; p
; p
= next
)
6016 next
= p
->next_same_hash
;
6018 remove_from_table (p
, i
);
6022 /* Perform invalidation on the basis of everything about INSN,
6023 except for invalidating the actual places that are SET in it.
6024 This includes the places CLOBBERed, and anything that might
6025 alias with something that is SET or CLOBBERed. */
6028 invalidate_from_clobbers (rtx_insn
*insn
)
6030 rtx x
= PATTERN (insn
);
6032 if (GET_CODE (x
) == CLOBBER
)
6034 rtx ref
= XEXP (x
, 0);
6037 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6039 invalidate (ref
, VOIDmode
);
6040 else if (GET_CODE (ref
) == STRICT_LOW_PART
6041 || GET_CODE (ref
) == ZERO_EXTRACT
)
6042 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6045 else if (GET_CODE (x
) == PARALLEL
)
6048 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6050 rtx y
= XVECEXP (x
, 0, i
);
6051 if (GET_CODE (y
) == CLOBBER
)
6053 rtx ref
= XEXP (y
, 0);
6054 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6056 invalidate (ref
, VOIDmode
);
6057 else if (GET_CODE (ref
) == STRICT_LOW_PART
6058 || GET_CODE (ref
) == ZERO_EXTRACT
)
6059 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6065 /* Perform invalidation on the basis of everything about INSN.
6066 This includes the places CLOBBERed, and anything that might
6067 alias with something that is SET or CLOBBERed. */
6070 invalidate_from_sets_and_clobbers (rtx_insn
*insn
)
6073 rtx x
= PATTERN (insn
);
6077 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6078 if (GET_CODE (XEXP (tem
, 0)) == CLOBBER
)
6079 invalidate (SET_DEST (XEXP (tem
, 0)), VOIDmode
);
6082 /* Ensure we invalidate the destination register of a CALL insn.
6083 This is necessary for machines where this register is a fixed_reg,
6084 because no other code would invalidate it. */
6085 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
6086 invalidate (SET_DEST (x
), VOIDmode
);
6088 else if (GET_CODE (x
) == PARALLEL
)
6092 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6094 rtx y
= XVECEXP (x
, 0, i
);
6095 if (GET_CODE (y
) == CLOBBER
)
6097 rtx clobbered
= XEXP (y
, 0);
6099 if (REG_P (clobbered
)
6100 || GET_CODE (clobbered
) == SUBREG
)
6101 invalidate (clobbered
, VOIDmode
);
6102 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
6103 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
6104 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
6106 else if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
6107 invalidate (SET_DEST (y
), VOIDmode
);
6112 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6113 and replace any registers in them with either an equivalent constant
6114 or the canonical form of the register. If we are inside an address,
6115 only do this if the address remains valid.
6117 OBJECT is 0 except when within a MEM in which case it is the MEM.
6119 Return the replacement for X. */
6122 cse_process_notes_1 (rtx x
, rtx object
, bool *changed
)
6124 enum rtx_code code
= GET_CODE (x
);
6125 const char *fmt
= GET_RTX_FORMAT (code
);
6140 validate_change (x
, &XEXP (x
, 0),
6141 cse_process_notes (XEXP (x
, 0), x
, changed
), 0);
6145 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
6146 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
, changed
);
6152 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
, changed
);
6159 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6160 /* We don't substitute VOIDmode constants into these rtx,
6161 since they would impede folding. */
6162 if (GET_MODE (new_rtx
) != VOIDmode
)
6163 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6167 case UNSIGNED_FLOAT
:
6169 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6170 /* We don't substitute negative VOIDmode constants into these rtx,
6171 since they would impede folding. */
6172 if (GET_MODE (new_rtx
) != VOIDmode
6173 || (CONST_INT_P (new_rtx
) && INTVAL (new_rtx
) >= 0)
6174 || (CONST_DOUBLE_P (new_rtx
) && CONST_DOUBLE_HIGH (new_rtx
) >= 0))
6175 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6180 i
= REG_QTY (REGNO (x
));
6182 /* Return a constant or a constant register. */
6183 if (REGNO_QTY_VALID_P (REGNO (x
)))
6185 struct qty_table_elem
*ent
= &qty_table
[i
];
6187 if (ent
->const_rtx
!= NULL_RTX
6188 && (CONSTANT_P (ent
->const_rtx
)
6189 || REG_P (ent
->const_rtx
)))
6191 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
6193 return copy_rtx (new_rtx
);
6197 /* Otherwise, canonicalize this register. */
6198 return canon_reg (x
, NULL
);
6204 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6206 validate_change (object
, &XEXP (x
, i
),
6207 cse_process_notes (XEXP (x
, i
), object
, changed
), 0);
6213 cse_process_notes (rtx x
, rtx object
, bool *changed
)
6215 rtx new_rtx
= cse_process_notes_1 (x
, object
, changed
);
6222 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6224 DATA is a pointer to a struct cse_basic_block_data, that is used to
6226 It is filled with a queue of basic blocks, starting with FIRST_BB
6227 and following a trace through the CFG.
6229 If all paths starting at FIRST_BB have been followed, or no new path
6230 starting at FIRST_BB can be constructed, this function returns FALSE.
6231 Otherwise, DATA->path is filled and the function returns TRUE indicating
6232 that a path to follow was found.
6234 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6235 block in the path will be FIRST_BB. */
6238 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
6245 bitmap_set_bit (cse_visited_basic_blocks
, first_bb
->index
);
6247 /* See if there is a previous path. */
6248 path_size
= data
->path_size
;
6250 /* There is a previous path. Make sure it started with FIRST_BB. */
6252 gcc_assert (data
->path
[0].bb
== first_bb
);
6254 /* There was only one basic block in the last path. Clear the path and
6255 return, so that paths starting at another basic block can be tried. */
6262 /* If the path was empty from the beginning, construct a new path. */
6264 data
->path
[path_size
++].bb
= first_bb
;
6267 /* Otherwise, path_size must be equal to or greater than 2, because
6268 a previous path exists that is at least two basic blocks long.
6270 Update the previous branch path, if any. If the last branch was
6271 previously along the branch edge, take the fallthrough edge now. */
6272 while (path_size
>= 2)
6274 basic_block last_bb_in_path
, previous_bb_in_path
;
6278 last_bb_in_path
= data
->path
[path_size
].bb
;
6279 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
6281 /* If we previously followed a path along the branch edge, try
6282 the fallthru edge now. */
6283 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
6284 && any_condjump_p (BB_END (previous_bb_in_path
))
6285 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
6286 && e
== BRANCH_EDGE (previous_bb_in_path
))
6288 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
6289 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6290 && single_pred_p (bb
)
6291 /* We used to assert here that we would only see blocks
6292 that we have not visited yet. But we may end up
6293 visiting basic blocks twice if the CFG has changed
6294 in this run of cse_main, because when the CFG changes
6295 the topological sort of the CFG also changes. A basic
6296 blocks that previously had more than two predecessors
6297 may now have a single predecessor, and become part of
6298 a path that starts at another basic block.
6300 We still want to visit each basic block only once, so
6301 halt the path here if we have already visited BB. */
6302 && !bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
))
6304 bitmap_set_bit (cse_visited_basic_blocks
, bb
->index
);
6305 data
->path
[path_size
++].bb
= bb
;
6310 data
->path
[path_size
].bb
= NULL
;
6313 /* If only one block remains in the path, bail. */
6321 /* Extend the path if possible. */
6324 bb
= data
->path
[path_size
- 1].bb
;
6325 while (bb
&& path_size
< PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
))
6327 if (single_succ_p (bb
))
6328 e
= single_succ_edge (bb
);
6329 else if (EDGE_COUNT (bb
->succs
) == 2
6330 && any_condjump_p (BB_END (bb
)))
6332 /* First try to follow the branch. If that doesn't lead
6333 to a useful path, follow the fallthru edge. */
6334 e
= BRANCH_EDGE (bb
);
6335 if (!single_pred_p (e
->dest
))
6336 e
= FALLTHRU_EDGE (bb
);
6342 && !((e
->flags
& EDGE_ABNORMAL_CALL
) && cfun
->has_nonlocal_label
)
6343 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6344 && single_pred_p (e
->dest
)
6345 /* Avoid visiting basic blocks twice. The large comment
6346 above explains why this can happen. */
6347 && !bitmap_bit_p (cse_visited_basic_blocks
, e
->dest
->index
))
6349 basic_block bb2
= e
->dest
;
6350 bitmap_set_bit (cse_visited_basic_blocks
, bb2
->index
);
6351 data
->path
[path_size
++].bb
= bb2
;
6360 data
->path_size
= path_size
;
6361 return path_size
!= 0;
6364 /* Dump the path in DATA to file F. NSETS is the number of sets
6368 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
6372 fprintf (f
, ";; Following path with %d sets: ", nsets
);
6373 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
6374 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
6375 fputc ('\n', dump_file
);
6380 /* Return true if BB has exception handling successor edges. */
6383 have_eh_succ_edges (basic_block bb
)
6388 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6389 if (e
->flags
& EDGE_EH
)
6396 /* Scan to the end of the path described by DATA. Return an estimate of
6397 the total number of SETs of all insns in the path. */
6400 cse_prescan_path (struct cse_basic_block_data
*data
)
6403 int path_size
= data
->path_size
;
6406 /* Scan to end of each basic block in the path. */
6407 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6412 bb
= data
->path
[path_entry
].bb
;
6414 FOR_BB_INSNS (bb
, insn
)
6419 /* A PARALLEL can have lots of SETs in it,
6420 especially if it is really an ASM_OPERANDS. */
6421 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6422 nsets
+= XVECLEN (PATTERN (insn
), 0);
6428 data
->nsets
= nsets
;
6431 /* Return true if the pattern of INSN uses a LABEL_REF for which
6432 there isn't a REG_LABEL_OPERAND note. */
6435 check_for_label_ref (rtx_insn
*insn
)
6437 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6438 note for it, we must rerun jump since it needs to place the note. If
6439 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6440 don't do this since no REG_LABEL_OPERAND will be added. */
6441 subrtx_iterator::array_type array
;
6442 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
6444 const_rtx x
= *iter
;
6445 if (GET_CODE (x
) == LABEL_REF
6446 && !LABEL_REF_NONLOCAL_P (x
)
6448 || !label_is_jump_target_p (LABEL_REF_LABEL (x
), insn
))
6449 && LABEL_P (LABEL_REF_LABEL (x
))
6450 && INSN_UID (LABEL_REF_LABEL (x
)) != 0
6451 && !find_reg_note (insn
, REG_LABEL_OPERAND
, LABEL_REF_LABEL (x
)))
6457 /* Process a single extended basic block described by EBB_DATA. */
6460 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
6462 int path_size
= ebb_data
->path_size
;
6466 /* Allocate the space needed by qty_table. */
6467 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
6470 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
6471 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
6472 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6477 bb
= ebb_data
->path
[path_entry
].bb
;
6479 /* Invalidate recorded information for eh regs if there is an EH
6480 edge pointing to that bb. */
6481 if (bb_has_eh_pred (bb
))
6485 FOR_EACH_ARTIFICIAL_DEF (def
, bb
->index
)
6486 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
6487 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6490 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6491 FOR_BB_INSNS (bb
, insn
)
6493 /* If we have processed 1,000 insns, flush the hash table to
6494 avoid extreme quadratic behavior. We must not include NOTEs
6495 in the count since there may be more of them when generating
6496 debugging information. If we clear the table at different
6497 times, code generated with -g -O might be different than code
6498 generated with -O but not -g.
6500 FIXME: This is a real kludge and needs to be done some other
6502 if (NONDEBUG_INSN_P (insn
)
6503 && num_insns
++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS
))
6505 flush_hash_table ();
6511 /* Process notes first so we have all notes in canonical forms
6512 when looking for duplicate operations. */
6513 if (REG_NOTES (insn
))
6515 bool changed
= false;
6516 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
),
6517 NULL_RTX
, &changed
);
6519 df_notes_rescan (insn
);
6524 /* If we haven't already found an insn where we added a LABEL_REF,
6526 if (INSN_P (insn
) && !recorded_label_ref
6527 && check_for_label_ref (insn
))
6528 recorded_label_ref
= true;
6531 if (NONDEBUG_INSN_P (insn
))
6533 /* If the previous insn sets CC0 and this insn no
6534 longer references CC0, delete the previous insn.
6535 Here we use fact that nothing expects CC0 to be
6536 valid over an insn, which is true until the final
6538 rtx_insn
*prev_insn
;
6541 prev_insn
= prev_nonnote_nondebug_insn (insn
);
6542 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6543 && (tem
= single_set (prev_insn
)) != NULL_RTX
6544 && SET_DEST (tem
) == cc0_rtx
6545 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6546 delete_insn (prev_insn
);
6548 /* If this insn is not the last insn in the basic
6549 block, it will be PREV_INSN(insn) in the next
6550 iteration. If we recorded any CC0-related
6551 information for this insn, remember it. */
6552 if (insn
!= BB_END (bb
))
6554 prev_insn_cc0
= this_insn_cc0
;
6555 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6562 /* With non-call exceptions, we are not always able to update
6563 the CFG properly inside cse_insn. So clean up possibly
6564 redundant EH edges here. */
6565 if (cfun
->can_throw_non_call_exceptions
&& have_eh_succ_edges (bb
))
6566 cse_cfg_altered
|= purge_dead_edges (bb
);
6568 /* If we changed a conditional jump, we may have terminated
6569 the path we are following. Check that by verifying that
6570 the edge we would take still exists. If the edge does
6571 not exist anymore, purge the remainder of the path.
6572 Note that this will cause us to return to the caller. */
6573 if (path_entry
< path_size
- 1)
6575 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6576 if (!find_edge (bb
, next_bb
))
6582 /* If we truncate the path, we must also reset the
6583 visited bit on the remaining blocks in the path,
6584 or we will never visit them at all. */
6585 bitmap_clear_bit (cse_visited_basic_blocks
,
6586 ebb_data
->path
[path_size
].bb
->index
);
6587 ebb_data
->path
[path_size
].bb
= NULL
;
6589 while (path_size
- 1 != path_entry
);
6590 ebb_data
->path_size
= path_size
;
6594 /* If this is a conditional jump insn, record any known
6595 equivalences due to the condition being tested. */
6597 if (path_entry
< path_size
- 1
6599 && single_set (insn
)
6600 && any_condjump_p (insn
))
6602 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6603 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6604 record_jump_equiv (insn
, taken
);
6608 /* Clear the CC0-tracking related insns, they can't provide
6609 useful information across basic block boundaries. */
6614 gcc_assert (next_qty
<= max_qty
);
6620 /* Perform cse on the instructions of a function.
6621 F is the first instruction.
6622 NREGS is one plus the highest pseudo-reg number used in the instruction.
6624 Return 2 if jump optimizations should be redone due to simplifications
6625 in conditional jump instructions.
6626 Return 1 if the CFG should be cleaned up because it has been modified.
6627 Return 0 otherwise. */
6630 cse_main (rtx_insn
*f ATTRIBUTE_UNUSED
, int nregs
)
6632 struct cse_basic_block_data ebb_data
;
6634 int *rc_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
6637 df_set_flags (DF_LR_RUN_DCE
);
6638 df_note_add_problem ();
6640 df_set_flags (DF_DEFER_INSN_RESCAN
);
6642 reg_scan (get_insns (), max_reg_num ());
6643 init_cse_reg_info (nregs
);
6645 ebb_data
.path
= XNEWVEC (struct branch_path
,
6646 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
));
6648 cse_cfg_altered
= false;
6649 cse_jumps_altered
= false;
6650 recorded_label_ref
= false;
6651 constant_pool_entries_cost
= 0;
6652 constant_pool_entries_regcost
= 0;
6653 ebb_data
.path_size
= 0;
6655 rtl_hooks
= cse_rtl_hooks
;
6658 init_alias_analysis ();
6660 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6662 /* Set up the table of already visited basic blocks. */
6663 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6664 bitmap_clear (cse_visited_basic_blocks
);
6666 /* Loop over basic blocks in reverse completion order (RPO),
6667 excluding the ENTRY and EXIT blocks. */
6668 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6670 while (i
< n_blocks
)
6672 /* Find the first block in the RPO queue that we have not yet
6673 processed before. */
6676 bb
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[i
++]);
6678 while (bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
)
6681 /* Find all paths starting with BB, and process them. */
6682 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6684 /* Pre-scan the path. */
6685 cse_prescan_path (&ebb_data
);
6687 /* If this basic block has no sets, skip it. */
6688 if (ebb_data
.nsets
== 0)
6691 /* Get a reasonable estimate for the maximum number of qty's
6692 needed for this path. For this, we take the number of sets
6693 and multiply that by MAX_RECOG_OPERANDS. */
6694 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6696 /* Dump the path we're about to process. */
6698 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6700 cse_extended_basic_block (&ebb_data
);
6705 end_alias_analysis ();
6706 free (reg_eqv_table
);
6707 free (ebb_data
.path
);
6708 sbitmap_free (cse_visited_basic_blocks
);
6710 rtl_hooks
= general_rtl_hooks
;
6712 if (cse_jumps_altered
|| recorded_label_ref
)
6714 else if (cse_cfg_altered
)
6720 /* Count the number of times registers are used (not set) in X.
6721 COUNTS is an array in which we accumulate the count, INCR is how much
6722 we count each register usage.
6724 Don't count a usage of DEST, which is the SET_DEST of a SET which
6725 contains X in its SET_SRC. This is because such a SET does not
6726 modify the liveness of DEST.
6727 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6728 We must then count uses of a SET_DEST regardless, because the insn can't be
6732 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6742 switch (code
= GET_CODE (x
))
6746 counts
[REGNO (x
)] += incr
;
6758 /* If we are clobbering a MEM, mark any registers inside the address
6760 if (MEM_P (XEXP (x
, 0)))
6761 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6765 /* Unless we are setting a REG, count everything in SET_DEST. */
6766 if (!REG_P (SET_DEST (x
)))
6767 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6768 count_reg_usage (SET_SRC (x
), counts
,
6769 dest
? dest
: SET_DEST (x
),
6779 /* We expect dest to be NULL_RTX here. If the insn may throw,
6780 or if it cannot be deleted due to side-effects, mark this fact
6781 by setting DEST to pc_rtx. */
6782 if ((!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (x
))
6783 || side_effects_p (PATTERN (x
)))
6785 if (code
== CALL_INSN
)
6786 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6787 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6789 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6792 note
= find_reg_equal_equiv_note (x
);
6795 rtx eqv
= XEXP (note
, 0);
6797 if (GET_CODE (eqv
) == EXPR_LIST
)
6798 /* This REG_EQUAL note describes the result of a function call.
6799 Process all the arguments. */
6802 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6803 eqv
= XEXP (eqv
, 1);
6805 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6807 count_reg_usage (eqv
, counts
, dest
, incr
);
6812 if (REG_NOTE_KIND (x
) == REG_EQUAL
6813 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6814 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6815 involving registers in the address. */
6816 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6817 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6819 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6823 /* Iterate over just the inputs, not the constraints as well. */
6824 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6825 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6836 fmt
= GET_RTX_FORMAT (code
);
6837 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6840 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6841 else if (fmt
[i
] == 'E')
6842 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6843 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6847 /* Return true if X is a dead register. */
6850 is_dead_reg (const_rtx x
, int *counts
)
6853 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6854 && counts
[REGNO (x
)] == 0);
6857 /* Return true if set is live. */
6859 set_live_p (rtx set
, rtx_insn
*insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6866 if (set_noop_p (set
))
6870 else if (GET_CODE (SET_DEST (set
)) == CC0
6871 && !side_effects_p (SET_SRC (set
))
6872 && ((tem
= next_nonnote_nondebug_insn (insn
)) == NULL_RTX
6874 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6877 else if (!is_dead_reg (SET_DEST (set
), counts
)
6878 || side_effects_p (SET_SRC (set
)))
6883 /* Return true if insn is live. */
6886 insn_live_p (rtx_insn
*insn
, int *counts
)
6889 if (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
6891 else if (GET_CODE (PATTERN (insn
)) == SET
)
6892 return set_live_p (PATTERN (insn
), insn
, counts
);
6893 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6895 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6897 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6899 if (GET_CODE (elt
) == SET
)
6901 if (set_live_p (elt
, insn
, counts
))
6904 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6909 else if (DEBUG_INSN_P (insn
))
6913 for (next
= NEXT_INSN (insn
); next
; next
= NEXT_INSN (next
))
6916 else if (!DEBUG_INSN_P (next
))
6918 else if (INSN_VAR_LOCATION_DECL (insn
) == INSN_VAR_LOCATION_DECL (next
))
6927 /* Count the number of stores into pseudo. Callback for note_stores. */
6930 count_stores (rtx x
, const_rtx set ATTRIBUTE_UNUSED
, void *data
)
6932 int *counts
= (int *) data
;
6933 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
6934 counts
[REGNO (x
)]++;
6937 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6938 pseudo doesn't have a replacement. COUNTS[X] is zero if register X
6939 is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6940 Set *SEEN_REPL to true if we see a dead register that does have
6944 is_dead_debug_insn (const_rtx pat
, int *counts
, rtx
*replacements
,
6947 subrtx_iterator::array_type array
;
6948 FOR_EACH_SUBRTX (iter
, array
, pat
, NONCONST
)
6950 const_rtx x
= *iter
;
6951 if (is_dead_reg (x
, counts
))
6953 if (replacements
&& replacements
[REGNO (x
)] != NULL_RTX
)
6962 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6963 Callback for simplify_replace_fn_rtx. */
6966 replace_dead_reg (rtx x
, const_rtx old_rtx ATTRIBUTE_UNUSED
, void *data
)
6968 rtx
*replacements
= (rtx
*) data
;
6971 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6972 && replacements
[REGNO (x
)] != NULL_RTX
)
6974 if (GET_MODE (x
) == GET_MODE (replacements
[REGNO (x
)]))
6975 return replacements
[REGNO (x
)];
6976 return lowpart_subreg (GET_MODE (x
), replacements
[REGNO (x
)],
6977 GET_MODE (replacements
[REGNO (x
)]));
6982 /* Scan all the insns and delete any that are dead; i.e., they store a register
6983 that is never used or they copy a register to itself.
6985 This is used to remove insns made obviously dead by cse, loop or other
6986 optimizations. It improves the heuristics in loop since it won't try to
6987 move dead invariants out of loops or make givs for dead quantities. The
6988 remaining passes of the compilation are also sped up. */
6991 delete_trivially_dead_insns (rtx_insn
*insns
, int nreg
)
6994 rtx_insn
*insn
, *prev
;
6995 rtx
*replacements
= NULL
;
6998 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
6999 /* First count the number of times each register is used. */
7000 if (MAY_HAVE_DEBUG_INSNS
)
7002 counts
= XCNEWVEC (int, nreg
* 3);
7003 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7004 if (DEBUG_INSN_P (insn
))
7005 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7007 else if (INSN_P (insn
))
7009 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7010 note_stores (PATTERN (insn
), count_stores
, counts
+ nreg
* 2);
7012 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7013 First one counts how many times each pseudo is used outside
7014 of debug insns, second counts how many times each pseudo is
7015 used in debug insns and third counts how many times a pseudo
7020 counts
= XCNEWVEC (int, nreg
);
7021 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7023 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7024 /* If no debug insns can be present, COUNTS is just an array
7025 which counts how many times each pseudo is used. */
7027 /* Pseudo PIC register should be considered as used due to possible
7028 new usages generated. */
7029 if (!reload_completed
7030 && pic_offset_table_rtx
7031 && REGNO (pic_offset_table_rtx
) >= FIRST_PSEUDO_REGISTER
)
7032 counts
[REGNO (pic_offset_table_rtx
)]++;
7033 /* Go from the last insn to the first and delete insns that only set unused
7034 registers or copy a register to itself. As we delete an insn, remove
7035 usage counts for registers it uses.
7037 The first jump optimization pass may leave a real insn as the last
7038 insn in the function. We must not skip that insn or we may end
7039 up deleting code that is not really dead.
7041 If some otherwise unused register is only used in DEBUG_INSNs,
7042 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7043 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7044 has been created for the unused register, replace it with
7045 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
7046 for (insn
= get_last_insn (); insn
; insn
= prev
)
7050 prev
= PREV_INSN (insn
);
7054 live_insn
= insn_live_p (insn
, counts
);
7056 /* If this is a dead insn, delete it and show registers in it aren't
7059 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
7061 if (DEBUG_INSN_P (insn
))
7062 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7067 if (MAY_HAVE_DEBUG_INSNS
7068 && (set
= single_set (insn
)) != NULL_RTX
7069 && is_dead_reg (SET_DEST (set
), counts
)
7070 /* Used at least once in some DEBUG_INSN. */
7071 && counts
[REGNO (SET_DEST (set
)) + nreg
] > 0
7072 /* And set exactly once. */
7073 && counts
[REGNO (SET_DEST (set
)) + nreg
* 2] == 1
7074 && !side_effects_p (SET_SRC (set
))
7075 && asm_noperands (PATTERN (insn
)) < 0)
7077 rtx dval
, bind_var_loc
;
7080 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7081 dval
= make_debug_expr_from_rtl (SET_DEST (set
));
7083 /* Emit a debug bind insn before the insn in which
7086 gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set
)),
7087 DEBUG_EXPR_TREE_DECL (dval
),
7089 VAR_INIT_STATUS_INITIALIZED
);
7090 count_reg_usage (bind_var_loc
, counts
+ nreg
, NULL_RTX
, 1);
7092 bind
= emit_debug_insn_before (bind_var_loc
, insn
);
7093 df_insn_rescan (bind
);
7095 if (replacements
== NULL
)
7096 replacements
= XCNEWVEC (rtx
, nreg
);
7097 replacements
[REGNO (SET_DEST (set
))] = dval
;
7100 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
7103 delete_insn_and_edges (insn
);
7107 if (MAY_HAVE_DEBUG_INSNS
)
7109 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
7110 if (DEBUG_INSN_P (insn
))
7112 /* If this debug insn references a dead register that wasn't replaced
7113 with an DEBUG_EXPR, reset the DEBUG_INSN. */
7114 bool seen_repl
= false;
7115 if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn
),
7116 counts
, replacements
, &seen_repl
))
7118 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
7119 df_insn_rescan (insn
);
7123 INSN_VAR_LOCATION_LOC (insn
)
7124 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn
),
7125 NULL_RTX
, replace_dead_reg
,
7127 df_insn_rescan (insn
);
7130 free (replacements
);
7133 if (dump_file
&& ndead
)
7134 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
7138 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
7142 /* If LOC contains references to NEWREG in a different mode, change them
7143 to use NEWREG instead. */
7146 cse_change_cc_mode (subrtx_ptr_iterator::array_type
&array
,
7147 rtx
*loc
, rtx insn
, rtx newreg
)
7149 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
7155 && REGNO (x
) == REGNO (newreg
)
7156 && GET_MODE (x
) != GET_MODE (newreg
))
7158 validate_change (insn
, loc
, newreg
, 1);
7159 iter
.skip_subrtxes ();
7164 /* Change the mode of any reference to the register REGNO (NEWREG) to
7165 GET_MODE (NEWREG) in INSN. */
7168 cse_change_cc_mode_insn (rtx_insn
*insn
, rtx newreg
)
7175 subrtx_ptr_iterator::array_type array
;
7176 cse_change_cc_mode (array
, &PATTERN (insn
), insn
, newreg
);
7177 cse_change_cc_mode (array
, ®_NOTES (insn
), insn
, newreg
);
7179 /* If the following assertion was triggered, there is most probably
7180 something wrong with the cc_modes_compatible back end function.
7181 CC modes only can be considered compatible if the insn - with the mode
7182 replaced by any of the compatible modes - can still be recognized. */
7183 success
= apply_change_group ();
7184 gcc_assert (success
);
7187 /* Change the mode of any reference to the register REGNO (NEWREG) to
7188 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7189 any instruction which modifies NEWREG. */
7192 cse_change_cc_mode_insns (rtx_insn
*start
, rtx_insn
*end
, rtx newreg
)
7196 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
7198 if (! INSN_P (insn
))
7201 if (reg_set_p (newreg
, insn
))
7204 cse_change_cc_mode_insn (insn
, newreg
);
7208 /* BB is a basic block which finishes with CC_REG as a condition code
7209 register which is set to CC_SRC. Look through the successors of BB
7210 to find blocks which have a single predecessor (i.e., this one),
7211 and look through those blocks for an assignment to CC_REG which is
7212 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7213 permitted to change the mode of CC_SRC to a compatible mode. This
7214 returns VOIDmode if no equivalent assignments were found.
7215 Otherwise it returns the mode which CC_SRC should wind up with.
7216 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7217 but is passed unmodified down to recursive calls in order to prevent
7220 The main complexity in this function is handling the mode issues.
7221 We may have more than one duplicate which we can eliminate, and we
7222 try to find a mode which will work for multiple duplicates. */
7225 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
7226 bool can_change_mode
)
7230 unsigned int insn_count
;
7233 machine_mode modes
[2];
7234 rtx_insn
*last_insns
[2];
7239 /* We expect to have two successors. Look at both before picking
7240 the final mode for the comparison. If we have more successors
7241 (i.e., some sort of table jump, although that seems unlikely),
7242 then we require all beyond the first two to use the same
7245 found_equiv
= false;
7246 mode
= GET_MODE (cc_src
);
7248 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7253 if (e
->flags
& EDGE_COMPLEX
)
7256 if (EDGE_COUNT (e
->dest
->preds
) != 1
7257 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
7258 /* Avoid endless recursion on unreachable blocks. */
7259 || e
->dest
== orig_bb
)
7262 end
= NEXT_INSN (BB_END (e
->dest
));
7263 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
7267 if (! INSN_P (insn
))
7270 /* If CC_SRC is modified, we have to stop looking for
7271 something which uses it. */
7272 if (modified_in_p (cc_src
, insn
))
7275 /* Check whether INSN sets CC_REG to CC_SRC. */
7276 set
= single_set (insn
);
7278 && REG_P (SET_DEST (set
))
7279 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7282 machine_mode set_mode
;
7283 machine_mode comp_mode
;
7286 set_mode
= GET_MODE (SET_SRC (set
));
7287 comp_mode
= set_mode
;
7288 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
7290 else if (GET_CODE (cc_src
) == COMPARE
7291 && GET_CODE (SET_SRC (set
)) == COMPARE
7293 && rtx_equal_p (XEXP (cc_src
, 0),
7294 XEXP (SET_SRC (set
), 0))
7295 && rtx_equal_p (XEXP (cc_src
, 1),
7296 XEXP (SET_SRC (set
), 1)))
7299 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
7300 if (comp_mode
!= VOIDmode
7301 && (can_change_mode
|| comp_mode
== mode
))
7308 if (insn_count
< ARRAY_SIZE (insns
))
7310 insns
[insn_count
] = insn
;
7311 modes
[insn_count
] = set_mode
;
7312 last_insns
[insn_count
] = end
;
7315 if (mode
!= comp_mode
)
7317 gcc_assert (can_change_mode
);
7320 /* The modified insn will be re-recognized later. */
7321 PUT_MODE (cc_src
, mode
);
7326 if (set_mode
!= mode
)
7328 /* We found a matching expression in the
7329 wrong mode, but we don't have room to
7330 store it in the array. Punt. This case
7334 /* INSN sets CC_REG to a value equal to CC_SRC
7335 with the right mode. We can simply delete
7340 /* We found an instruction to delete. Keep looking,
7341 in the hopes of finding a three-way jump. */
7345 /* We found an instruction which sets the condition
7346 code, so don't look any farther. */
7350 /* If INSN sets CC_REG in some other way, don't look any
7352 if (reg_set_p (cc_reg
, insn
))
7356 /* If we fell off the bottom of the block, we can keep looking
7357 through successors. We pass CAN_CHANGE_MODE as false because
7358 we aren't prepared to handle compatibility between the
7359 further blocks and this block. */
7362 machine_mode submode
;
7364 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
7365 if (submode
!= VOIDmode
)
7367 gcc_assert (submode
== mode
);
7369 can_change_mode
= false;
7377 /* Now INSN_COUNT is the number of instructions we found which set
7378 CC_REG to a value equivalent to CC_SRC. The instructions are in
7379 INSNS. The modes used by those instructions are in MODES. */
7382 for (i
= 0; i
< insn_count
; ++i
)
7384 if (modes
[i
] != mode
)
7386 /* We need to change the mode of CC_REG in INSNS[i] and
7387 subsequent instructions. */
7390 if (GET_MODE (cc_reg
) == mode
)
7393 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7395 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
7399 delete_insn_and_edges (insns
[i
]);
7405 /* If we have a fixed condition code register (or two), walk through
7406 the instructions and try to eliminate duplicate assignments. */
7409 cse_condition_code_reg (void)
7411 unsigned int cc_regno_1
;
7412 unsigned int cc_regno_2
;
7417 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
7420 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
7421 if (cc_regno_2
!= INVALID_REGNUM
)
7422 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
7424 cc_reg_2
= NULL_RTX
;
7426 FOR_EACH_BB_FN (bb
, cfun
)
7428 rtx_insn
*last_insn
;
7431 rtx_insn
*cc_src_insn
;
7434 machine_mode orig_mode
;
7436 /* Look for blocks which end with a conditional jump based on a
7437 condition code register. Then look for the instruction which
7438 sets the condition code register. Then look through the
7439 successor blocks for instructions which set the condition
7440 code register to the same value. There are other possible
7441 uses of the condition code register, but these are by far the
7442 most common and the ones which we are most likely to be able
7445 last_insn
= BB_END (bb
);
7446 if (!JUMP_P (last_insn
))
7449 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
7451 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
7458 for (insn
= PREV_INSN (last_insn
);
7459 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
7460 insn
= PREV_INSN (insn
))
7464 if (! INSN_P (insn
))
7466 set
= single_set (insn
);
7468 && REG_P (SET_DEST (set
))
7469 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7472 cc_src
= SET_SRC (set
);
7475 else if (reg_set_p (cc_reg
, insn
))
7482 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
7485 /* Now CC_REG is a condition code register used for a
7486 conditional jump at the end of the block, and CC_SRC, in
7487 CC_SRC_INSN, is the value to which that condition code
7488 register is set, and CC_SRC is still meaningful at the end of
7491 orig_mode
= GET_MODE (cc_src
);
7492 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
7493 if (mode
!= VOIDmode
)
7495 gcc_assert (mode
== GET_MODE (cc_src
));
7496 if (mode
!= orig_mode
)
7498 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7500 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
7502 /* Do the same in the following insns that use the
7503 current value of CC_REG within BB. */
7504 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
7505 NEXT_INSN (last_insn
),
7513 /* Perform common subexpression elimination. Nonzero value from
7514 `cse_main' means that jumps were simplified and some code may now
7515 be unreachable, so do jump optimization again. */
7517 rest_of_handle_cse (void)
7522 dump_flow_info (dump_file
, dump_flags
);
7524 tem
= cse_main (get_insns (), max_reg_num ());
7526 /* If we are not running more CSE passes, then we are no longer
7527 expecting CSE to be run. But always rerun it in a cheap mode. */
7528 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
7532 timevar_push (TV_JUMP
);
7533 rebuild_jump_labels (get_insns ());
7534 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7535 timevar_pop (TV_JUMP
);
7537 else if (tem
== 1 || optimize
> 1)
7545 const pass_data pass_data_cse
=
7547 RTL_PASS
, /* type */
7549 OPTGROUP_NONE
, /* optinfo_flags */
7551 0, /* properties_required */
7552 0, /* properties_provided */
7553 0, /* properties_destroyed */
7554 0, /* todo_flags_start */
7555 TODO_df_finish
, /* todo_flags_finish */
7558 class pass_cse
: public rtl_opt_pass
7561 pass_cse (gcc::context
*ctxt
)
7562 : rtl_opt_pass (pass_data_cse
, ctxt
)
7565 /* opt_pass methods: */
7566 virtual bool gate (function
*) { return optimize
> 0; }
7567 virtual unsigned int execute (function
*) { return rest_of_handle_cse (); }
7569 }; // class pass_cse
7574 make_pass_cse (gcc::context
*ctxt
)
7576 return new pass_cse (ctxt
);
7580 /* Run second CSE pass after loop optimizations. */
7582 rest_of_handle_cse2 (void)
7587 dump_flow_info (dump_file
, dump_flags
);
7589 tem
= cse_main (get_insns (), max_reg_num ());
7591 /* Run a pass to eliminate duplicated assignments to condition code
7592 registers. We have to run this after bypass_jumps, because it
7593 makes it harder for that pass to determine whether a jump can be
7595 cse_condition_code_reg ();
7597 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7601 timevar_push (TV_JUMP
);
7602 rebuild_jump_labels (get_insns ());
7603 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7604 timevar_pop (TV_JUMP
);
7609 cse_not_expected
= 1;
7616 const pass_data pass_data_cse2
=
7618 RTL_PASS
, /* type */
7620 OPTGROUP_NONE
, /* optinfo_flags */
7621 TV_CSE2
, /* tv_id */
7622 0, /* properties_required */
7623 0, /* properties_provided */
7624 0, /* properties_destroyed */
7625 0, /* todo_flags_start */
7626 TODO_df_finish
, /* todo_flags_finish */
7629 class pass_cse2
: public rtl_opt_pass
7632 pass_cse2 (gcc::context
*ctxt
)
7633 : rtl_opt_pass (pass_data_cse2
, ctxt
)
7636 /* opt_pass methods: */
7637 virtual bool gate (function
*)
7639 return optimize
> 0 && flag_rerun_cse_after_loop
;
7642 virtual unsigned int execute (function
*) { return rest_of_handle_cse2 (); }
7644 }; // class pass_cse2
7649 make_pass_cse2 (gcc::context
*ctxt
)
7651 return new pass_cse2 (ctxt
);
7654 /* Run second CSE pass after loop optimizations. */
7656 rest_of_handle_cse_after_global_opts (void)
7661 /* We only want to do local CSE, so don't follow jumps. */
7662 save_cfj
= flag_cse_follow_jumps
;
7663 flag_cse_follow_jumps
= 0;
7665 rebuild_jump_labels (get_insns ());
7666 tem
= cse_main (get_insns (), max_reg_num ());
7667 purge_all_dead_edges ();
7668 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7670 cse_not_expected
= !flag_rerun_cse_after_loop
;
7672 /* If cse altered any jumps, rerun jump opts to clean things up. */
7675 timevar_push (TV_JUMP
);
7676 rebuild_jump_labels (get_insns ());
7677 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7678 timevar_pop (TV_JUMP
);
7683 flag_cse_follow_jumps
= save_cfj
;
7689 const pass_data pass_data_cse_after_global_opts
=
7691 RTL_PASS
, /* type */
7692 "cse_local", /* name */
7693 OPTGROUP_NONE
, /* optinfo_flags */
7695 0, /* properties_required */
7696 0, /* properties_provided */
7697 0, /* properties_destroyed */
7698 0, /* todo_flags_start */
7699 TODO_df_finish
, /* todo_flags_finish */
7702 class pass_cse_after_global_opts
: public rtl_opt_pass
7705 pass_cse_after_global_opts (gcc::context
*ctxt
)
7706 : rtl_opt_pass (pass_data_cse_after_global_opts
, ctxt
)
7709 /* opt_pass methods: */
7710 virtual bool gate (function
*)
7712 return optimize
> 0 && flag_rerun_cse_after_global_opts
;
7715 virtual unsigned int execute (function
*)
7717 return rest_of_handle_cse_after_global_opts ();
7720 }; // class pass_cse_after_global_opts
7725 make_pass_cse_after_global_opts (gcc::context
*ctxt
)
7727 return new pass_cse_after_global_opts (ctxt
);