1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "hard-reg-set.h"
35 #include "dominance.h"
39 #include "cfgcleanup.h"
40 #include "basic-block.h"
42 #include "insn-config.h"
45 #include "diagnostic-core.h"
51 #include "rtlhooks-def.h"
52 #include "tree-pass.h"
57 /* The basic idea of common subexpression elimination is to go
58 through the code, keeping a record of expressions that would
59 have the same value at the current scan point, and replacing
60 expressions encountered with the cheapest equivalent expression.
62 It is too complicated to keep track of the different possibilities
63 when control paths merge in this code; so, at each label, we forget all
64 that is known and start fresh. This can be described as processing each
65 extended basic block separately. We have a separate pass to perform
68 Note CSE can turn a conditional or computed jump into a nop or
69 an unconditional jump. When this occurs we arrange to run the jump
70 optimizer after CSE to delete the unreachable code.
72 We use two data structures to record the equivalent expressions:
73 a hash table for most expressions, and a vector of "quantity
74 numbers" to record equivalent (pseudo) registers.
76 The use of the special data structure for registers is desirable
77 because it is faster. It is possible because registers references
78 contain a fairly small number, the register number, taken from
79 a contiguously allocated series, and two register references are
80 identical if they have the same number. General expressions
81 do not have any such thing, so the only way to retrieve the
82 information recorded on an expression other than a register
83 is to keep it in a hash table.
85 Registers and "quantity numbers":
87 At the start of each basic block, all of the (hardware and pseudo)
88 registers used in the function are given distinct quantity
89 numbers to indicate their contents. During scan, when the code
90 copies one register into another, we copy the quantity number.
91 When a register is loaded in any other way, we allocate a new
92 quantity number to describe the value generated by this operation.
93 `REG_QTY (N)' records what quantity register N is currently thought
96 All real quantity numbers are greater than or equal to zero.
97 If register N has not been assigned a quantity, `REG_QTY (N)' will
98 equal -N - 1, which is always negative.
100 Quantity numbers below zero do not exist and none of the `qty_table'
101 entries should be referenced with a negative index.
103 We also maintain a bidirectional chain of registers for each
104 quantity number. The `qty_table` members `first_reg' and `last_reg',
105 and `reg_eqv_table' members `next' and `prev' hold these chains.
107 The first register in a chain is the one whose lifespan is least local.
108 Among equals, it is the one that was seen first.
109 We replace any equivalent register with that one.
111 If two registers have the same quantity number, it must be true that
112 REG expressions with qty_table `mode' must be in the hash table for both
113 registers and must be in the same class.
115 The converse is not true. Since hard registers may be referenced in
116 any mode, two REG expressions might be equivalent in the hash table
117 but not have the same quantity number if the quantity number of one
118 of the registers is not the same mode as those expressions.
120 Constants and quantity numbers
122 When a quantity has a known constant value, that value is stored
123 in the appropriate qty_table `const_rtx'. This is in addition to
124 putting the constant in the hash table as is usual for non-regs.
126 Whether a reg or a constant is preferred is determined by the configuration
127 macro CONST_COSTS and will often depend on the constant value. In any
128 event, expressions containing constants can be simplified, by fold_rtx.
130 When a quantity has a known nearly constant value (such as an address
131 of a stack slot), that value is stored in the appropriate qty_table
134 Integer constants don't have a machine mode. However, cse
135 determines the intended machine mode from the destination
136 of the instruction that moves the constant. The machine mode
137 is recorded in the hash table along with the actual RTL
138 constant expression so that different modes are kept separate.
142 To record known equivalences among expressions in general
143 we use a hash table called `table'. It has a fixed number of buckets
144 that contain chains of `struct table_elt' elements for expressions.
145 These chains connect the elements whose expressions have the same
148 Other chains through the same elements connect the elements which
149 currently have equivalent values.
151 Register references in an expression are canonicalized before hashing
152 the expression. This is done using `reg_qty' and qty_table `first_reg'.
153 The hash code of a register reference is computed using the quantity
154 number, not the register number.
156 When the value of an expression changes, it is necessary to remove from the
157 hash table not just that expression but all expressions whose values
158 could be different as a result.
160 1. If the value changing is in memory, except in special cases
161 ANYTHING referring to memory could be changed. That is because
162 nobody knows where a pointer does not point.
163 The function `invalidate_memory' removes what is necessary.
165 The special cases are when the address is constant or is
166 a constant plus a fixed register such as the frame pointer
167 or a static chain pointer. When such addresses are stored in,
168 we can tell exactly which other such addresses must be invalidated
169 due to overlap. `invalidate' does this.
170 All expressions that refer to non-constant
171 memory addresses are also invalidated. `invalidate_memory' does this.
173 2. If the value changing is a register, all expressions
174 containing references to that register, and only those,
177 Because searching the entire hash table for expressions that contain
178 a register is very slow, we try to figure out when it isn't necessary.
179 Precisely, this is necessary only when expressions have been
180 entered in the hash table using this register, and then the value has
181 changed, and then another expression wants to be added to refer to
182 the register's new value. This sequence of circumstances is rare
183 within any one basic block.
185 `REG_TICK' and `REG_IN_TABLE', accessors for members of
186 cse_reg_info, are used to detect this case. REG_TICK (i) is
187 incremented whenever a value is stored in register i.
188 REG_IN_TABLE (i) holds -1 if no references to register i have been
189 entered in the table; otherwise, it contains the value REG_TICK (i)
190 had when the references were entered. If we want to enter a
191 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
192 remove old references. Until we want to enter a new entry, the
193 mere fact that the two vectors don't match makes the entries be
194 ignored if anyone tries to match them.
196 Registers themselves are entered in the hash table as well as in
197 the equivalent-register chains. However, `REG_TICK' and
198 `REG_IN_TABLE' do not apply to expressions which are simple
199 register references. These expressions are removed from the table
200 immediately when they become invalid, and this can be done even if
201 we do not immediately search for all the expressions that refer to
204 A CLOBBER rtx in an instruction invalidates its operand for further
205 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
206 invalidates everything that resides in memory.
210 Constant expressions that differ only by an additive integer
211 are called related. When a constant expression is put in
212 the table, the related expression with no constant term
213 is also entered. These are made to point at each other
214 so that it is possible to find out if there exists any
215 register equivalent to an expression related to a given expression. */
217 /* Length of qty_table vector. We know in advance we will not need
218 a quantity number this big. */
222 /* Next quantity number to be allocated.
223 This is 1 + the largest number needed so far. */
227 /* Per-qty information tracking.
229 `first_reg' and `last_reg' track the head and tail of the
230 chain of registers which currently contain this quantity.
232 `mode' contains the machine mode of this quantity.
234 `const_rtx' holds the rtx of the constant value of this
235 quantity, if known. A summations of the frame/arg pointer
236 and a constant can also be entered here. When this holds
237 a known value, `const_insn' is the insn which stored the
240 `comparison_{code,const,qty}' are used to track when a
241 comparison between a quantity and some constant or register has
242 been passed. In such a case, we know the results of the comparison
243 in case we see it again. These members record a comparison that
244 is known to be true. `comparison_code' holds the rtx code of such
245 a comparison, else it is set to UNKNOWN and the other two
246 comparison members are undefined. `comparison_const' holds
247 the constant being compared against, or zero if the comparison
248 is not against a constant. `comparison_qty' holds the quantity
249 being compared against when the result is known. If the comparison
250 is not with a register, `comparison_qty' is -1. */
252 struct qty_table_elem
255 rtx_insn
*const_insn
;
256 rtx comparison_const
;
258 unsigned int first_reg
, last_reg
;
259 /* The sizes of these fields should match the sizes of the
260 code and mode fields of struct rtx_def (see rtl.h). */
261 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
262 ENUM_BITFIELD(machine_mode
) mode
: 8;
265 /* The table of all qtys, indexed by qty number. */
266 static struct qty_table_elem
*qty_table
;
269 /* For machines that have a CC0, we do not record its value in the hash
270 table since its use is guaranteed to be the insn immediately following
271 its definition and any other insn is presumed to invalidate it.
273 Instead, we store below the current and last value assigned to CC0.
274 If it should happen to be a constant, it is stored in preference
275 to the actual assigned value. In case it is a constant, we store
276 the mode in which the constant should be interpreted. */
278 static rtx this_insn_cc0
, prev_insn_cc0
;
279 static machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
282 /* Insn being scanned. */
284 static rtx_insn
*this_insn
;
285 static bool optimize_this_for_speed_p
;
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
291 Or -1 if this register is at the end of the chain.
293 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
295 /* Per-register equivalence chain. */
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem
*reg_eqv_table
;
306 /* The timestamp at which this register is initialized. */
307 unsigned int timestamp
;
309 /* The quantity number of the register's current contents. */
312 /* The number of times the register has been altered in the current
316 /* The REG_TICK value at which rtx's containing this register are
317 valid in the hash table. If this does not equal the current
318 reg_tick value, such expressions existing in the hash table are
322 /* The SUBREG that was set when REG_TICK was last incremented. Set
323 to -1 if the last store was to the whole register, not a subreg. */
324 unsigned int subreg_ticked
;
327 /* A table of cse_reg_info indexed by register numbers. */
328 static struct cse_reg_info
*cse_reg_info_table
;
330 /* The size of the above table. */
331 static unsigned int cse_reg_info_table_size
;
333 /* The index of the first entry that has not been initialized. */
334 static unsigned int cse_reg_info_table_first_uninitialized
;
336 /* The timestamp at the beginning of the current run of
337 cse_extended_basic_block. We increment this variable at the beginning of
338 the current run of cse_extended_basic_block. The timestamp field of a
339 cse_reg_info entry matches the value of this variable if and only
340 if the entry has been initialized during the current run of
341 cse_extended_basic_block. */
342 static unsigned int cse_reg_info_timestamp
;
344 /* A HARD_REG_SET containing all the hard registers for which there is
345 currently a REG expression in the hash table. Note the difference
346 from the above variables, which indicate if the REG is mentioned in some
347 expression in the table. */
349 static HARD_REG_SET hard_regs_in_table
;
351 /* True if CSE has altered the CFG. */
352 static bool cse_cfg_altered
;
354 /* True if CSE has altered conditional jump insns in such a way
355 that jump optimization should be redone. */
356 static bool cse_jumps_altered
;
358 /* True if we put a LABEL_REF into the hash table for an INSN
359 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
360 to put in the note. */
361 static bool recorded_label_ref
;
363 /* canon_hash stores 1 in do_not_record
364 if it notices a reference to CC0, PC, or some other volatile
367 static int do_not_record
;
369 /* canon_hash stores 1 in hash_arg_in_memory
370 if it notices a reference to memory within the expression being hashed. */
372 static int hash_arg_in_memory
;
374 /* The hash table contains buckets which are chains of `struct table_elt's,
375 each recording one expression's information.
376 That expression is in the `exp' field.
378 The canon_exp field contains a canonical (from the point of view of
379 alias analysis) version of the `exp' field.
381 Those elements with the same hash code are chained in both directions
382 through the `next_same_hash' and `prev_same_hash' fields.
384 Each set of expressions with equivalent values
385 are on a two-way chain through the `next_same_value'
386 and `prev_same_value' fields, and all point with
387 the `first_same_value' field at the first element in
388 that chain. The chain is in order of increasing cost.
389 Each element's cost value is in its `cost' field.
391 The `in_memory' field is nonzero for elements that
392 involve any reference to memory. These elements are removed
393 whenever a write is done to an unidentified location in memory.
394 To be safe, we assume that a memory address is unidentified unless
395 the address is either a symbol constant or a constant plus
396 the frame pointer or argument pointer.
398 The `related_value' field is used to connect related expressions
399 (that differ by adding an integer).
400 The related expressions are chained in a circular fashion.
401 `related_value' is zero for expressions for which this
404 The `cost' field stores the cost of this element's expression.
405 The `regcost' field stores the value returned by approx_reg_cost for
406 this element's expression.
408 The `is_const' flag is set if the element is a constant (including
411 The `flag' field is used as a temporary during some search routines.
413 The `mode' field is usually the same as GET_MODE (`exp'), but
414 if `exp' is a CONST_INT and has no machine mode then the `mode'
415 field is the mode it was being used as. Each constant is
416 recorded separately for each mode it is used with. */
422 struct table_elt
*next_same_hash
;
423 struct table_elt
*prev_same_hash
;
424 struct table_elt
*next_same_value
;
425 struct table_elt
*prev_same_value
;
426 struct table_elt
*first_same_value
;
427 struct table_elt
*related_value
;
430 /* The size of this field should match the size
431 of the mode field of struct rtx_def (see rtl.h). */
432 ENUM_BITFIELD(machine_mode
) mode
: 8;
438 /* We don't want a lot of buckets, because we rarely have very many
439 things stored in the hash table, and a lot of buckets slows
440 down a lot of loops that happen frequently. */
442 #define HASH_SIZE (1 << HASH_SHIFT)
443 #define HASH_MASK (HASH_SIZE - 1)
445 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
449 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
450 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
451 : canon_hash (X, M)) & HASH_MASK)
453 /* Like HASH, but without side-effects. */
454 #define SAFE_HASH(X, M) \
455 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
457 : safe_hash (X, M)) & HASH_MASK)
459 /* Determine whether register number N is considered a fixed register for the
460 purpose of approximating register costs.
461 It is desirable to replace other regs with fixed regs, to reduce need for
463 A reg wins if it is either the frame pointer or designated as fixed. */
464 #define FIXED_REGNO_P(N) \
465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466 || fixed_regs[N] || global_regs[N])
468 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
469 hard registers and pointers into the frame are the cheapest with a cost
470 of 0. Next come pseudos with a cost of one and other hard registers with
471 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473 #define CHEAP_REGNO(N) \
474 (REGNO_PTR_FRAME_P (N) \
475 || (HARD_REGISTER_NUM_P (N) \
476 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
478 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
479 #define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
481 /* Get the number of times this register has been updated in this
484 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
486 /* Get the point at which REG was recorded in the table. */
488 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
490 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
493 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
495 /* Get the quantity number for REG. */
497 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
499 /* Determine if the quantity number for register X represents a valid index
500 into the qty_table. */
502 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
504 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
506 #define CHEAPER(X, Y) \
507 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
509 static struct table_elt
*table
[HASH_SIZE
];
511 /* Chain of `struct table_elt's made so far for this function
512 but currently removed from the table. */
514 static struct table_elt
*free_element_chain
;
516 /* Set to the cost of a constant pool reference if one was found for a
517 symbolic constant. If this was found, it means we should try to
518 convert constants into constant pool entries if they don't fit in
521 static int constant_pool_entries_cost
;
522 static int constant_pool_entries_regcost
;
524 /* Trace a patch through the CFG. */
528 /* The basic block for this path entry. */
532 /* This data describes a block that will be processed by
533 cse_extended_basic_block. */
535 struct cse_basic_block_data
537 /* Total number of SETs in block. */
539 /* Size of current branch path, if any. */
541 /* Current path, indicating which basic_blocks will be processed. */
542 struct branch_path
*path
;
546 /* Pointers to the live in/live out bitmaps for the boundaries of the
548 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
550 /* A simple bitmap to track which basic blocks have been visited
551 already as part of an already processed extended basic block. */
552 static sbitmap cse_visited_basic_blocks
;
554 static bool fixed_base_plus_p (rtx x
);
555 static int notreg_cost (rtx
, enum rtx_code
, int);
556 static int preferable (int, int, int, int);
557 static void new_basic_block (void);
558 static void make_new_qty (unsigned int, machine_mode
);
559 static void make_regs_eqv (unsigned int, unsigned int);
560 static void delete_reg_equiv (unsigned int);
561 static int mention_regs (rtx
);
562 static int insert_regs (rtx
, struct table_elt
*, int);
563 static void remove_from_table (struct table_elt
*, unsigned);
564 static void remove_pseudo_from_table (rtx
, unsigned);
565 static struct table_elt
*lookup (rtx
, unsigned, machine_mode
);
566 static struct table_elt
*lookup_for_remove (rtx
, unsigned, machine_mode
);
567 static rtx
lookup_as_function (rtx
, enum rtx_code
);
568 static struct table_elt
*insert_with_costs (rtx
, struct table_elt
*, unsigned,
569 machine_mode
, int, int);
570 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
572 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
573 static void invalidate (rtx
, machine_mode
);
574 static void remove_invalid_refs (unsigned int);
575 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
577 static void rehash_using_reg (rtx
);
578 static void invalidate_memory (void);
579 static void invalidate_for_call (void);
580 static rtx
use_related_value (rtx
, struct table_elt
*);
582 static inline unsigned canon_hash (rtx
, machine_mode
);
583 static inline unsigned safe_hash (rtx
, machine_mode
);
584 static inline unsigned hash_rtx_string (const char *);
586 static rtx
canon_reg (rtx
, rtx_insn
*);
587 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
590 static rtx
fold_rtx (rtx
, rtx_insn
*);
591 static rtx
equiv_constant (rtx
);
592 static void record_jump_equiv (rtx_insn
*, bool);
593 static void record_jump_cond (enum rtx_code
, machine_mode
, rtx
, rtx
,
595 static void cse_insn (rtx_insn
*);
596 static void cse_prescan_path (struct cse_basic_block_data
*);
597 static void invalidate_from_clobbers (rtx_insn
*);
598 static void invalidate_from_sets_and_clobbers (rtx_insn
*);
599 static rtx
cse_process_notes (rtx
, rtx
, bool *);
600 static void cse_extended_basic_block (struct cse_basic_block_data
*);
601 extern void dump_class (struct table_elt
*);
602 static void get_cse_reg_info_1 (unsigned int regno
);
603 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
605 static void flush_hash_table (void);
606 static bool insn_live_p (rtx_insn
*, int *);
607 static bool set_live_p (rtx
, rtx_insn
*, int *);
608 static void cse_change_cc_mode_insn (rtx_insn
*, rtx
);
609 static void cse_change_cc_mode_insns (rtx_insn
*, rtx_insn
*, rtx
);
610 static machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
614 #undef RTL_HOOKS_GEN_LOWPART
615 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
617 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
619 /* Nonzero if X has the form (PLUS frame-pointer integer). */
622 fixed_base_plus_p (rtx x
)
624 switch (GET_CODE (x
))
627 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
629 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
634 if (!CONST_INT_P (XEXP (x
, 1)))
636 return fixed_base_plus_p (XEXP (x
, 0));
643 /* Dump the expressions in the equivalence class indicated by CLASSP.
644 This function is used only for debugging. */
646 dump_class (struct table_elt
*classp
)
648 struct table_elt
*elt
;
650 fprintf (stderr
, "Equivalence chain for ");
651 print_rtl (stderr
, classp
->exp
);
652 fprintf (stderr
, ": \n");
654 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
656 print_rtl (stderr
, elt
->exp
);
657 fprintf (stderr
, "\n");
661 /* Return an estimate of the cost of the registers used in an rtx.
662 This is mostly the number of different REG expressions in the rtx;
663 however for some exceptions like fixed registers we use a cost of
664 0. If any other hard register reference occurs, return MAX_COST. */
667 approx_reg_cost (const_rtx x
)
670 subrtx_iterator::array_type array
;
671 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
676 unsigned int regno
= REGNO (x
);
677 if (!CHEAP_REGNO (regno
))
679 if (regno
< FIRST_PSEUDO_REGISTER
)
681 if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
693 /* Return a negative value if an rtx A, whose costs are given by COST_A
694 and REGCOST_A, is more desirable than an rtx B.
695 Return a positive value if A is less desirable, or 0 if the two are
698 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
700 /* First, get rid of cases involving expressions that are entirely
702 if (cost_a
!= cost_b
)
704 if (cost_a
== MAX_COST
)
706 if (cost_b
== MAX_COST
)
710 /* Avoid extending lifetimes of hardregs. */
711 if (regcost_a
!= regcost_b
)
713 if (regcost_a
== MAX_COST
)
715 if (regcost_b
== MAX_COST
)
719 /* Normal operation costs take precedence. */
720 if (cost_a
!= cost_b
)
721 return cost_a
- cost_b
;
722 /* Only if these are identical consider effects on register pressure. */
723 if (regcost_a
!= regcost_b
)
724 return regcost_a
- regcost_b
;
728 /* Internal function, to compute cost when X is not a register; called
729 from COST macro to keep it simple. */
732 notreg_cost (rtx x
, enum rtx_code outer
, int opno
)
734 return ((GET_CODE (x
) == SUBREG
735 && REG_P (SUBREG_REG (x
))
736 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
737 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_INT
738 && (GET_MODE_SIZE (GET_MODE (x
))
739 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
740 && subreg_lowpart_p (x
)
741 && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x
),
742 GET_MODE (SUBREG_REG (x
))))
744 : rtx_cost (x
, outer
, opno
, optimize_this_for_speed_p
) * 2);
748 /* Initialize CSE_REG_INFO_TABLE. */
751 init_cse_reg_info (unsigned int nregs
)
753 /* Do we need to grow the table? */
754 if (nregs
> cse_reg_info_table_size
)
756 unsigned int new_size
;
758 if (cse_reg_info_table_size
< 2048)
760 /* Compute a new size that is a power of 2 and no smaller
761 than the large of NREGS and 64. */
762 new_size
= (cse_reg_info_table_size
763 ? cse_reg_info_table_size
: 64);
765 while (new_size
< nregs
)
770 /* If we need a big table, allocate just enough to hold
775 /* Reallocate the table with NEW_SIZE entries. */
776 free (cse_reg_info_table
);
777 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
778 cse_reg_info_table_size
= new_size
;
779 cse_reg_info_table_first_uninitialized
= 0;
782 /* Do we have all of the first NREGS entries initialized? */
783 if (cse_reg_info_table_first_uninitialized
< nregs
)
785 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
788 /* Put the old timestamp on newly allocated entries so that they
789 will all be considered out of date. We do not touch those
790 entries beyond the first NREGS entries to be nice to the
792 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
793 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
795 cse_reg_info_table_first_uninitialized
= nregs
;
799 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
802 get_cse_reg_info_1 (unsigned int regno
)
804 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
805 entry will be considered to have been initialized. */
806 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
808 /* Initialize the rest of the entry. */
809 cse_reg_info_table
[regno
].reg_tick
= 1;
810 cse_reg_info_table
[regno
].reg_in_table
= -1;
811 cse_reg_info_table
[regno
].subreg_ticked
= -1;
812 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
815 /* Find a cse_reg_info entry for REGNO. */
817 static inline struct cse_reg_info
*
818 get_cse_reg_info (unsigned int regno
)
820 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
822 /* If this entry has not been initialized, go ahead and initialize
824 if (p
->timestamp
!= cse_reg_info_timestamp
)
825 get_cse_reg_info_1 (regno
);
830 /* Clear the hash table and initialize each register with its own quantity,
831 for a new basic block. */
834 new_basic_block (void)
840 /* Invalidate cse_reg_info_table. */
841 cse_reg_info_timestamp
++;
843 /* Clear out hash table state for this pass. */
844 CLEAR_HARD_REG_SET (hard_regs_in_table
);
846 /* The per-quantity values used to be initialized here, but it is
847 much faster to initialize each as it is made in `make_new_qty'. */
849 for (i
= 0; i
< HASH_SIZE
; i
++)
851 struct table_elt
*first
;
856 struct table_elt
*last
= first
;
860 while (last
->next_same_hash
!= NULL
)
861 last
= last
->next_same_hash
;
863 /* Now relink this hash entire chain into
864 the free element list. */
866 last
->next_same_hash
= free_element_chain
;
867 free_element_chain
= first
;
876 /* Say that register REG contains a quantity in mode MODE not in any
877 register before and initialize that quantity. */
880 make_new_qty (unsigned int reg
, machine_mode mode
)
883 struct qty_table_elem
*ent
;
884 struct reg_eqv_elem
*eqv
;
886 gcc_assert (next_qty
< max_qty
);
888 q
= REG_QTY (reg
) = next_qty
++;
890 ent
->first_reg
= reg
;
893 ent
->const_rtx
= ent
->const_insn
= NULL
;
894 ent
->comparison_code
= UNKNOWN
;
896 eqv
= ®_eqv_table
[reg
];
897 eqv
->next
= eqv
->prev
= -1;
900 /* Make reg NEW equivalent to reg OLD.
901 OLD is not changing; NEW is. */
904 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
906 unsigned int lastr
, firstr
;
907 int q
= REG_QTY (old_reg
);
908 struct qty_table_elem
*ent
;
912 /* Nothing should become eqv until it has a "non-invalid" qty number. */
913 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
915 REG_QTY (new_reg
) = q
;
916 firstr
= ent
->first_reg
;
917 lastr
= ent
->last_reg
;
919 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
920 hard regs. Among pseudos, if NEW will live longer than any other reg
921 of the same qty, and that is beyond the current basic block,
922 make it the new canonical replacement for this qty. */
923 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
924 /* Certain fixed registers might be of the class NO_REGS. This means
925 that not only can they not be allocated by the compiler, but
926 they cannot be used in substitutions or canonicalizations
928 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
929 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
930 || (new_reg
>= FIRST_PSEUDO_REGISTER
931 && (firstr
< FIRST_PSEUDO_REGISTER
932 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
933 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
934 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
935 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
937 reg_eqv_table
[firstr
].prev
= new_reg
;
938 reg_eqv_table
[new_reg
].next
= firstr
;
939 reg_eqv_table
[new_reg
].prev
= -1;
940 ent
->first_reg
= new_reg
;
944 /* If NEW is a hard reg (known to be non-fixed), insert at end.
945 Otherwise, insert before any non-fixed hard regs that are at the
946 end. Registers of class NO_REGS cannot be used as an
947 equivalent for anything. */
948 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
949 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
950 && new_reg
>= FIRST_PSEUDO_REGISTER
)
951 lastr
= reg_eqv_table
[lastr
].prev
;
952 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
953 if (reg_eqv_table
[lastr
].next
>= 0)
954 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
956 qty_table
[q
].last_reg
= new_reg
;
957 reg_eqv_table
[lastr
].next
= new_reg
;
958 reg_eqv_table
[new_reg
].prev
= lastr
;
962 /* Remove REG from its equivalence class. */
965 delete_reg_equiv (unsigned int reg
)
967 struct qty_table_elem
*ent
;
968 int q
= REG_QTY (reg
);
971 /* If invalid, do nothing. */
972 if (! REGNO_QTY_VALID_P (reg
))
977 p
= reg_eqv_table
[reg
].prev
;
978 n
= reg_eqv_table
[reg
].next
;
981 reg_eqv_table
[n
].prev
= p
;
985 reg_eqv_table
[p
].next
= n
;
989 REG_QTY (reg
) = -reg
- 1;
992 /* Remove any invalid expressions from the hash table
993 that refer to any of the registers contained in expression X.
995 Make sure that newly inserted references to those registers
996 as subexpressions will be considered valid.
998 mention_regs is not called when a register itself
999 is being stored in the table.
1001 Return 1 if we have done something that may have changed the hash code
1005 mention_regs (rtx x
)
1015 code
= GET_CODE (x
);
1018 unsigned int regno
= REGNO (x
);
1019 unsigned int endregno
= END_REGNO (x
);
1022 for (i
= regno
; i
< endregno
; i
++)
1024 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1025 remove_invalid_refs (i
);
1027 REG_IN_TABLE (i
) = REG_TICK (i
);
1028 SUBREG_TICKED (i
) = -1;
1034 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1035 pseudo if they don't use overlapping words. We handle only pseudos
1036 here for simplicity. */
1037 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1038 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1040 unsigned int i
= REGNO (SUBREG_REG (x
));
1042 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1044 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1045 the last store to this register really stored into this
1046 subreg, then remove the memory of this subreg.
1047 Otherwise, remove any memory of the entire register and
1048 all its subregs from the table. */
1049 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1050 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1051 remove_invalid_refs (i
);
1053 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1056 REG_IN_TABLE (i
) = REG_TICK (i
);
1057 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1061 /* If X is a comparison or a COMPARE and either operand is a register
1062 that does not have a quantity, give it one. This is so that a later
1063 call to record_jump_equiv won't cause X to be assigned a different
1064 hash code and not found in the table after that call.
1066 It is not necessary to do this here, since rehash_using_reg can
1067 fix up the table later, but doing this here eliminates the need to
1068 call that expensive function in the most common case where the only
1069 use of the register is in the comparison. */
1071 if (code
== COMPARE
|| COMPARISON_P (x
))
1073 if (REG_P (XEXP (x
, 0))
1074 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1075 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1077 rehash_using_reg (XEXP (x
, 0));
1081 if (REG_P (XEXP (x
, 1))
1082 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1083 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1085 rehash_using_reg (XEXP (x
, 1));
1090 fmt
= GET_RTX_FORMAT (code
);
1091 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1093 changed
|= mention_regs (XEXP (x
, i
));
1094 else if (fmt
[i
] == 'E')
1095 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1096 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1101 /* Update the register quantities for inserting X into the hash table
1102 with a value equivalent to CLASSP.
1103 (If the class does not contain a REG, it is irrelevant.)
1104 If MODIFIED is nonzero, X is a destination; it is being modified.
1105 Note that delete_reg_equiv should be called on a register
1106 before insert_regs is done on that register with MODIFIED != 0.
1108 Nonzero value means that elements of reg_qty have changed
1109 so X's hash code may be different. */
1112 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1116 unsigned int regno
= REGNO (x
);
1119 /* If REGNO is in the equivalence table already but is of the
1120 wrong mode for that equivalence, don't do anything here. */
1122 qty_valid
= REGNO_QTY_VALID_P (regno
);
1125 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1127 if (ent
->mode
!= GET_MODE (x
))
1131 if (modified
|| ! qty_valid
)
1134 for (classp
= classp
->first_same_value
;
1136 classp
= classp
->next_same_value
)
1137 if (REG_P (classp
->exp
)
1138 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1140 unsigned c_regno
= REGNO (classp
->exp
);
1142 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1144 /* Suppose that 5 is hard reg and 100 and 101 are
1147 (set (reg:si 100) (reg:si 5))
1148 (set (reg:si 5) (reg:si 100))
1149 (set (reg:di 101) (reg:di 5))
1151 We would now set REG_QTY (101) = REG_QTY (5), but the
1152 entry for 5 is in SImode. When we use this later in
1153 copy propagation, we get the register in wrong mode. */
1154 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1157 make_regs_eqv (regno
, c_regno
);
1161 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1162 than REG_IN_TABLE to find out if there was only a single preceding
1163 invalidation - for the SUBREG - or another one, which would be
1164 for the full register. However, if we find here that REG_TICK
1165 indicates that the register is invalid, it means that it has
1166 been invalidated in a separate operation. The SUBREG might be used
1167 now (then this is a recursive call), or we might use the full REG
1168 now and a SUBREG of it later. So bump up REG_TICK so that
1169 mention_regs will do the right thing. */
1171 && REG_IN_TABLE (regno
) >= 0
1172 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1174 make_new_qty (regno
, GET_MODE (x
));
1181 /* If X is a SUBREG, we will likely be inserting the inner register in the
1182 table. If that register doesn't have an assigned quantity number at
1183 this point but does later, the insertion that we will be doing now will
1184 not be accessible because its hash code will have changed. So assign
1185 a quantity number now. */
1187 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1188 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1190 insert_regs (SUBREG_REG (x
), NULL
, 0);
1195 return mention_regs (x
);
1199 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1200 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1201 CST is equal to an anchor. */
1204 compute_const_anchors (rtx cst
,
1205 HOST_WIDE_INT
*lower_base
, HOST_WIDE_INT
*lower_offs
,
1206 HOST_WIDE_INT
*upper_base
, HOST_WIDE_INT
*upper_offs
)
1208 HOST_WIDE_INT n
= INTVAL (cst
);
1210 *lower_base
= n
& ~(targetm
.const_anchor
- 1);
1211 if (*lower_base
== n
)
1215 (n
+ (targetm
.const_anchor
- 1)) & ~(targetm
.const_anchor
- 1);
1216 *upper_offs
= n
- *upper_base
;
1217 *lower_offs
= n
- *lower_base
;
1221 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1224 insert_const_anchor (HOST_WIDE_INT anchor
, rtx reg
, HOST_WIDE_INT offs
,
1227 struct table_elt
*elt
;
1232 anchor_exp
= GEN_INT (anchor
);
1233 hash
= HASH (anchor_exp
, mode
);
1234 elt
= lookup (anchor_exp
, hash
, mode
);
1236 elt
= insert (anchor_exp
, NULL
, hash
, mode
);
1238 exp
= plus_constant (mode
, reg
, offs
);
1239 /* REG has just been inserted and the hash codes recomputed. */
1241 hash
= HASH (exp
, mode
);
1243 /* Use the cost of the register rather than the whole expression. When
1244 looking up constant anchors we will further offset the corresponding
1245 expression therefore it does not make sense to prefer REGs over
1246 reg-immediate additions. Prefer instead the oldest expression. Also
1247 don't prefer pseudos over hard regs so that we derive constants in
1248 argument registers from other argument registers rather than from the
1249 original pseudo that was used to synthesize the constant. */
1250 insert_with_costs (exp
, elt
, hash
, mode
, COST (reg
), 1);
1253 /* The constant CST is equivalent to the register REG. Create
1254 equivalences between the two anchors of CST and the corresponding
1255 register-offset expressions using REG. */
1258 insert_const_anchors (rtx reg
, rtx cst
, machine_mode mode
)
1260 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1262 if (!compute_const_anchors (cst
, &lower_base
, &lower_offs
,
1263 &upper_base
, &upper_offs
))
1266 /* Ignore anchors of value 0. Constants accessible from zero are
1268 if (lower_base
!= 0)
1269 insert_const_anchor (lower_base
, reg
, -lower_offs
, mode
);
1271 if (upper_base
!= 0)
1272 insert_const_anchor (upper_base
, reg
, -upper_offs
, mode
);
1275 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1276 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1277 valid expression. Return the cheapest and oldest of such expressions. In
1278 *OLD, return how old the resulting expression is compared to the other
1279 equivalent expressions. */
1282 find_reg_offset_for_const (struct table_elt
*anchor_elt
, HOST_WIDE_INT offs
,
1285 struct table_elt
*elt
;
1287 struct table_elt
*match_elt
;
1290 /* Find the cheapest and *oldest* expression to maximize the chance of
1291 reusing the same pseudo. */
1295 for (elt
= anchor_elt
->first_same_value
, idx
= 0;
1297 elt
= elt
->next_same_value
, idx
++)
1299 if (match_elt
&& CHEAPER (match_elt
, elt
))
1302 if (REG_P (elt
->exp
)
1303 || (GET_CODE (elt
->exp
) == PLUS
1304 && REG_P (XEXP (elt
->exp
, 0))
1305 && GET_CODE (XEXP (elt
->exp
, 1)) == CONST_INT
))
1309 /* Ignore expressions that are no longer valid. */
1310 if (!REG_P (elt
->exp
) && !exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
1313 x
= plus_constant (GET_MODE (elt
->exp
), elt
->exp
, offs
);
1315 || (GET_CODE (x
) == PLUS
1316 && IN_RANGE (INTVAL (XEXP (x
, 1)),
1317 -targetm
.const_anchor
,
1318 targetm
.const_anchor
- 1)))
1330 /* Try to express the constant SRC_CONST using a register+offset expression
1331 derived from a constant anchor. Return it if successful or NULL_RTX,
1335 try_const_anchors (rtx src_const
, machine_mode mode
)
1337 struct table_elt
*lower_elt
, *upper_elt
;
1338 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1339 rtx lower_anchor_rtx
, upper_anchor_rtx
;
1340 rtx lower_exp
= NULL_RTX
, upper_exp
= NULL_RTX
;
1341 unsigned lower_old
, upper_old
;
1343 /* CONST_INT is used for CC modes, but we should leave those alone. */
1344 if (GET_MODE_CLASS (mode
) == MODE_CC
)
1347 gcc_assert (SCALAR_INT_MODE_P (mode
));
1348 if (!compute_const_anchors (src_const
, &lower_base
, &lower_offs
,
1349 &upper_base
, &upper_offs
))
1352 lower_anchor_rtx
= GEN_INT (lower_base
);
1353 upper_anchor_rtx
= GEN_INT (upper_base
);
1354 lower_elt
= lookup (lower_anchor_rtx
, HASH (lower_anchor_rtx
, mode
), mode
);
1355 upper_elt
= lookup (upper_anchor_rtx
, HASH (upper_anchor_rtx
, mode
), mode
);
1358 lower_exp
= find_reg_offset_for_const (lower_elt
, lower_offs
, &lower_old
);
1360 upper_exp
= find_reg_offset_for_const (upper_elt
, upper_offs
, &upper_old
);
1367 /* Return the older expression. */
1368 return (upper_old
> lower_old
? upper_exp
: lower_exp
);
1371 /* Look in or update the hash table. */
1373 /* Remove table element ELT from use in the table.
1374 HASH is its hash code, made using the HASH macro.
1375 It's an argument because often that is known in advance
1376 and we save much time not recomputing it. */
1379 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1384 /* Mark this element as removed. See cse_insn. */
1385 elt
->first_same_value
= 0;
1387 /* Remove the table element from its equivalence class. */
1390 struct table_elt
*prev
= elt
->prev_same_value
;
1391 struct table_elt
*next
= elt
->next_same_value
;
1394 next
->prev_same_value
= prev
;
1397 prev
->next_same_value
= next
;
1400 struct table_elt
*newfirst
= next
;
1403 next
->first_same_value
= newfirst
;
1404 next
= next
->next_same_value
;
1409 /* Remove the table element from its hash bucket. */
1412 struct table_elt
*prev
= elt
->prev_same_hash
;
1413 struct table_elt
*next
= elt
->next_same_hash
;
1416 next
->prev_same_hash
= prev
;
1419 prev
->next_same_hash
= next
;
1420 else if (table
[hash
] == elt
)
1424 /* This entry is not in the proper hash bucket. This can happen
1425 when two classes were merged by `merge_equiv_classes'. Search
1426 for the hash bucket that it heads. This happens only very
1427 rarely, so the cost is acceptable. */
1428 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1429 if (table
[hash
] == elt
)
1434 /* Remove the table element from its related-value circular chain. */
1436 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1438 struct table_elt
*p
= elt
->related_value
;
1440 while (p
->related_value
!= elt
)
1441 p
= p
->related_value
;
1442 p
->related_value
= elt
->related_value
;
1443 if (p
->related_value
== p
)
1444 p
->related_value
= 0;
1447 /* Now add it to the free element chain. */
1448 elt
->next_same_hash
= free_element_chain
;
1449 free_element_chain
= elt
;
1452 /* Same as above, but X is a pseudo-register. */
1455 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1457 struct table_elt
*elt
;
1459 /* Because a pseudo-register can be referenced in more than one
1460 mode, we might have to remove more than one table entry. */
1461 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1462 remove_from_table (elt
, hash
);
1465 /* Look up X in the hash table and return its table element,
1466 or 0 if X is not in the table.
1468 MODE is the machine-mode of X, or if X is an integer constant
1469 with VOIDmode then MODE is the mode with which X will be used.
1471 Here we are satisfied to find an expression whose tree structure
1474 static struct table_elt
*
1475 lookup (rtx x
, unsigned int hash
, machine_mode mode
)
1477 struct table_elt
*p
;
1479 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1480 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1481 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1487 /* Like `lookup' but don't care whether the table element uses invalid regs.
1488 Also ignore discrepancies in the machine mode of a register. */
1490 static struct table_elt
*
1491 lookup_for_remove (rtx x
, unsigned int hash
, machine_mode mode
)
1493 struct table_elt
*p
;
1497 unsigned int regno
= REGNO (x
);
1499 /* Don't check the machine mode when comparing registers;
1500 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1501 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1503 && REGNO (p
->exp
) == regno
)
1508 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1510 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1517 /* Look for an expression equivalent to X and with code CODE.
1518 If one is found, return that expression. */
1521 lookup_as_function (rtx x
, enum rtx_code code
)
1524 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1529 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1530 if (GET_CODE (p
->exp
) == code
1531 /* Make sure this is a valid entry in the table. */
1532 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1538 /* Insert X in the hash table, assuming HASH is its hash code and
1539 CLASSP is an element of the class it should go in (or 0 if a new
1540 class should be made). COST is the code of X and reg_cost is the
1541 cost of registers in X. It is inserted at the proper position to
1542 keep the class in the order cheapest first.
1544 MODE is the machine-mode of X, or if X is an integer constant
1545 with VOIDmode then MODE is the mode with which X will be used.
1547 For elements of equal cheapness, the most recent one
1548 goes in front, except that the first element in the list
1549 remains first unless a cheaper element is added. The order of
1550 pseudo-registers does not matter, as canon_reg will be called to
1551 find the cheapest when a register is retrieved from the table.
1553 The in_memory field in the hash table element is set to 0.
1554 The caller must set it nonzero if appropriate.
1556 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1557 and if insert_regs returns a nonzero value
1558 you must then recompute its hash code before calling here.
1560 If necessary, update table showing constant values of quantities. */
1562 static struct table_elt
*
1563 insert_with_costs (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1564 machine_mode mode
, int cost
, int reg_cost
)
1566 struct table_elt
*elt
;
1568 /* If X is a register and we haven't made a quantity for it,
1569 something is wrong. */
1570 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1572 /* If X is a hard register, show it is being put in the table. */
1573 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1574 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1576 /* Put an element for X into the right hash bucket. */
1578 elt
= free_element_chain
;
1580 free_element_chain
= elt
->next_same_hash
;
1582 elt
= XNEW (struct table_elt
);
1585 elt
->canon_exp
= NULL_RTX
;
1587 elt
->regcost
= reg_cost
;
1588 elt
->next_same_value
= 0;
1589 elt
->prev_same_value
= 0;
1590 elt
->next_same_hash
= table
[hash
];
1591 elt
->prev_same_hash
= 0;
1592 elt
->related_value
= 0;
1595 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1598 table
[hash
]->prev_same_hash
= elt
;
1601 /* Put it into the proper value-class. */
1604 classp
= classp
->first_same_value
;
1605 if (CHEAPER (elt
, classp
))
1606 /* Insert at the head of the class. */
1608 struct table_elt
*p
;
1609 elt
->next_same_value
= classp
;
1610 classp
->prev_same_value
= elt
;
1611 elt
->first_same_value
= elt
;
1613 for (p
= classp
; p
; p
= p
->next_same_value
)
1614 p
->first_same_value
= elt
;
1618 /* Insert not at head of the class. */
1619 /* Put it after the last element cheaper than X. */
1620 struct table_elt
*p
, *next
;
1623 (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1627 /* Put it after P and before NEXT. */
1628 elt
->next_same_value
= next
;
1630 next
->prev_same_value
= elt
;
1632 elt
->prev_same_value
= p
;
1633 p
->next_same_value
= elt
;
1634 elt
->first_same_value
= classp
;
1638 elt
->first_same_value
= elt
;
1640 /* If this is a constant being set equivalent to a register or a register
1641 being set equivalent to a constant, note the constant equivalence.
1643 If this is a constant, it cannot be equivalent to a different constant,
1644 and a constant is the only thing that can be cheaper than a register. So
1645 we know the register is the head of the class (before the constant was
1648 If this is a register that is not already known equivalent to a
1649 constant, we must check the entire class.
1651 If this is a register that is already known equivalent to an insn,
1652 update the qtys `const_insn' to show that `this_insn' is the latest
1653 insn making that quantity equivalent to the constant. */
1655 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1658 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1659 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1661 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1662 exp_ent
->const_insn
= this_insn
;
1667 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1670 struct table_elt
*p
;
1672 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1674 if (p
->is_const
&& !REG_P (p
->exp
))
1676 int x_q
= REG_QTY (REGNO (x
));
1677 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1680 = gen_lowpart (GET_MODE (x
), p
->exp
);
1681 x_ent
->const_insn
= this_insn
;
1688 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1689 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1690 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1692 /* If this is a constant with symbolic value,
1693 and it has a term with an explicit integer value,
1694 link it up with related expressions. */
1695 if (GET_CODE (x
) == CONST
)
1697 rtx subexp
= get_related_value (x
);
1699 struct table_elt
*subelt
, *subelt_prev
;
1703 /* Get the integer-free subexpression in the hash table. */
1704 subhash
= SAFE_HASH (subexp
, mode
);
1705 subelt
= lookup (subexp
, subhash
, mode
);
1707 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1708 /* Initialize SUBELT's circular chain if it has none. */
1709 if (subelt
->related_value
== 0)
1710 subelt
->related_value
= subelt
;
1711 /* Find the element in the circular chain that precedes SUBELT. */
1712 subelt_prev
= subelt
;
1713 while (subelt_prev
->related_value
!= subelt
)
1714 subelt_prev
= subelt_prev
->related_value
;
1715 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1716 This way the element that follows SUBELT is the oldest one. */
1717 elt
->related_value
= subelt_prev
->related_value
;
1718 subelt_prev
->related_value
= elt
;
1725 /* Wrap insert_with_costs by passing the default costs. */
1727 static struct table_elt
*
1728 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1732 insert_with_costs (x
, classp
, hash
, mode
, COST (x
), approx_reg_cost (x
));
1736 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1737 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1738 the two classes equivalent.
1740 CLASS1 will be the surviving class; CLASS2 should not be used after this
1743 Any invalid entries in CLASS2 will not be copied. */
1746 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1748 struct table_elt
*elt
, *next
, *new_elt
;
1750 /* Ensure we start with the head of the classes. */
1751 class1
= class1
->first_same_value
;
1752 class2
= class2
->first_same_value
;
1754 /* If they were already equal, forget it. */
1755 if (class1
== class2
)
1758 for (elt
= class2
; elt
; elt
= next
)
1762 machine_mode mode
= elt
->mode
;
1764 next
= elt
->next_same_value
;
1766 /* Remove old entry, make a new one in CLASS1's class.
1767 Don't do this for invalid entries as we cannot find their
1768 hash code (it also isn't necessary). */
1769 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1771 bool need_rehash
= false;
1773 hash_arg_in_memory
= 0;
1774 hash
= HASH (exp
, mode
);
1778 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1779 delete_reg_equiv (REGNO (exp
));
1782 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1783 remove_pseudo_from_table (exp
, hash
);
1785 remove_from_table (elt
, hash
);
1787 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1789 rehash_using_reg (exp
);
1790 hash
= HASH (exp
, mode
);
1792 new_elt
= insert (exp
, class1
, hash
, mode
);
1793 new_elt
->in_memory
= hash_arg_in_memory
;
1798 /* Flush the entire hash table. */
1801 flush_hash_table (void)
1804 struct table_elt
*p
;
1806 for (i
= 0; i
< HASH_SIZE
; i
++)
1807 for (p
= table
[i
]; p
; p
= table
[i
])
1809 /* Note that invalidate can remove elements
1810 after P in the current hash chain. */
1812 invalidate (p
->exp
, VOIDmode
);
1814 remove_from_table (p
, i
);
1818 /* Check whether an anti dependence exists between X and EXP. MODE and
1819 ADDR are as for canon_anti_dependence. */
1822 check_dependence (const_rtx x
, rtx exp
, machine_mode mode
, rtx addr
)
1824 subrtx_iterator::array_type array
;
1825 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
1827 const_rtx x
= *iter
;
1828 if (MEM_P (x
) && canon_anti_dependence (x
, true, exp
, mode
, addr
))
1834 /* Remove from the hash table, or mark as invalid, all expressions whose
1835 values could be altered by storing in X. X is a register, a subreg, or
1836 a memory reference with nonvarying address (because, when a memory
1837 reference with a varying address is stored in, all memory references are
1838 removed by invalidate_memory so specific invalidation is superfluous).
1839 FULL_MODE, if not VOIDmode, indicates that this much should be
1840 invalidated instead of just the amount indicated by the mode of X. This
1841 is only used for bitfield stores into memory.
1843 A nonvarying address may be just a register or just a symbol reference,
1844 or it may be either of those plus a numeric offset. */
1847 invalidate (rtx x
, machine_mode full_mode
)
1850 struct table_elt
*p
;
1853 switch (GET_CODE (x
))
1857 /* If X is a register, dependencies on its contents are recorded
1858 through the qty number mechanism. Just change the qty number of
1859 the register, mark it as invalid for expressions that refer to it,
1860 and remove it itself. */
1861 unsigned int regno
= REGNO (x
);
1862 unsigned int hash
= HASH (x
, GET_MODE (x
));
1864 /* Remove REGNO from any quantity list it might be on and indicate
1865 that its value might have changed. If it is a pseudo, remove its
1866 entry from the hash table.
1868 For a hard register, we do the first two actions above for any
1869 additional hard registers corresponding to X. Then, if any of these
1870 registers are in the table, we must remove any REG entries that
1871 overlap these registers. */
1873 delete_reg_equiv (regno
);
1875 SUBREG_TICKED (regno
) = -1;
1877 if (regno
>= FIRST_PSEUDO_REGISTER
)
1878 remove_pseudo_from_table (x
, hash
);
1881 HOST_WIDE_INT in_table
1882 = TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1883 unsigned int endregno
= END_HARD_REGNO (x
);
1884 unsigned int tregno
, tendregno
, rn
;
1885 struct table_elt
*p
, *next
;
1887 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1889 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1891 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1892 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1893 delete_reg_equiv (rn
);
1895 SUBREG_TICKED (rn
) = -1;
1899 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1900 for (p
= table
[hash
]; p
; p
= next
)
1902 next
= p
->next_same_hash
;
1905 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1908 tregno
= REGNO (p
->exp
);
1909 tendregno
= END_HARD_REGNO (p
->exp
);
1910 if (tendregno
> regno
&& tregno
< endregno
)
1911 remove_from_table (p
, hash
);
1918 invalidate (SUBREG_REG (x
), VOIDmode
);
1922 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1923 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1927 /* This is part of a disjoint return value; extract the location in
1928 question ignoring the offset. */
1929 invalidate (XEXP (x
, 0), VOIDmode
);
1933 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1934 /* Calculate the canonical version of X here so that
1935 true_dependence doesn't generate new RTL for X on each call. */
1938 /* Remove all hash table elements that refer to overlapping pieces of
1940 if (full_mode
== VOIDmode
)
1941 full_mode
= GET_MODE (x
);
1943 for (i
= 0; i
< HASH_SIZE
; i
++)
1945 struct table_elt
*next
;
1947 for (p
= table
[i
]; p
; p
= next
)
1949 next
= p
->next_same_hash
;
1952 /* Just canonicalize the expression once;
1953 otherwise each time we call invalidate
1954 true_dependence will canonicalize the
1955 expression again. */
1957 p
->canon_exp
= canon_rtx (p
->exp
);
1958 if (check_dependence (p
->canon_exp
, x
, full_mode
, addr
))
1959 remove_from_table (p
, i
);
1970 /* Remove all expressions that refer to register REGNO,
1971 since they are already invalid, and we are about to
1972 mark that register valid again and don't want the old
1973 expressions to reappear as valid. */
1976 remove_invalid_refs (unsigned int regno
)
1979 struct table_elt
*p
, *next
;
1981 for (i
= 0; i
< HASH_SIZE
; i
++)
1982 for (p
= table
[i
]; p
; p
= next
)
1984 next
= p
->next_same_hash
;
1985 if (!REG_P (p
->exp
) && refers_to_regno_p (regno
, p
->exp
))
1986 remove_from_table (p
, i
);
1990 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1993 remove_invalid_subreg_refs (unsigned int regno
, unsigned int offset
,
1997 struct table_elt
*p
, *next
;
1998 unsigned int end
= offset
+ (GET_MODE_SIZE (mode
) - 1);
2000 for (i
= 0; i
< HASH_SIZE
; i
++)
2001 for (p
= table
[i
]; p
; p
= next
)
2004 next
= p
->next_same_hash
;
2007 && (GET_CODE (exp
) != SUBREG
2008 || !REG_P (SUBREG_REG (exp
))
2009 || REGNO (SUBREG_REG (exp
)) != regno
2010 || (((SUBREG_BYTE (exp
)
2011 + (GET_MODE_SIZE (GET_MODE (exp
)) - 1)) >= offset
)
2012 && SUBREG_BYTE (exp
) <= end
))
2013 && refers_to_regno_p (regno
, p
->exp
))
2014 remove_from_table (p
, i
);
2018 /* Recompute the hash codes of any valid entries in the hash table that
2019 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2021 This is called when we make a jump equivalence. */
2024 rehash_using_reg (rtx x
)
2027 struct table_elt
*p
, *next
;
2030 if (GET_CODE (x
) == SUBREG
)
2033 /* If X is not a register or if the register is known not to be in any
2034 valid entries in the table, we have no work to do. */
2037 || REG_IN_TABLE (REGNO (x
)) < 0
2038 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
2041 /* Scan all hash chains looking for valid entries that mention X.
2042 If we find one and it is in the wrong hash chain, move it. */
2044 for (i
= 0; i
< HASH_SIZE
; i
++)
2045 for (p
= table
[i
]; p
; p
= next
)
2047 next
= p
->next_same_hash
;
2048 if (reg_mentioned_p (x
, p
->exp
)
2049 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
2050 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
2052 if (p
->next_same_hash
)
2053 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
2055 if (p
->prev_same_hash
)
2056 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
2058 table
[i
] = p
->next_same_hash
;
2060 p
->next_same_hash
= table
[hash
];
2061 p
->prev_same_hash
= 0;
2063 table
[hash
]->prev_same_hash
= p
;
2069 /* Remove from the hash table any expression that is a call-clobbered
2070 register. Also update their TICK values. */
2073 invalidate_for_call (void)
2075 unsigned int regno
, endregno
;
2078 struct table_elt
*p
, *next
;
2080 hard_reg_set_iterator hrsi
;
2082 /* Go through all the hard registers. For each that is clobbered in
2083 a CALL_INSN, remove the register from quantity chains and update
2084 reg_tick if defined. Also see if any of these registers is currently
2086 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, regno
, hrsi
)
2088 delete_reg_equiv (regno
);
2089 if (REG_TICK (regno
) >= 0)
2092 SUBREG_TICKED (regno
) = -1;
2094 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
2097 /* In the case where we have no call-clobbered hard registers in the
2098 table, we are done. Otherwise, scan the table and remove any
2099 entry that overlaps a call-clobbered register. */
2102 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
2103 for (p
= table
[hash
]; p
; p
= next
)
2105 next
= p
->next_same_hash
;
2108 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
2111 regno
= REGNO (p
->exp
);
2112 endregno
= END_HARD_REGNO (p
->exp
);
2114 for (i
= regno
; i
< endregno
; i
++)
2115 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
2117 remove_from_table (p
, hash
);
2123 /* Given an expression X of type CONST,
2124 and ELT which is its table entry (or 0 if it
2125 is not in the hash table),
2126 return an alternate expression for X as a register plus integer.
2127 If none can be found, return 0. */
2130 use_related_value (rtx x
, struct table_elt
*elt
)
2132 struct table_elt
*relt
= 0;
2133 struct table_elt
*p
, *q
;
2134 HOST_WIDE_INT offset
;
2136 /* First, is there anything related known?
2137 If we have a table element, we can tell from that.
2138 Otherwise, must look it up. */
2140 if (elt
!= 0 && elt
->related_value
!= 0)
2142 else if (elt
== 0 && GET_CODE (x
) == CONST
)
2144 rtx subexp
= get_related_value (x
);
2146 relt
= lookup (subexp
,
2147 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2154 /* Search all related table entries for one that has an
2155 equivalent register. */
2160 /* This loop is strange in that it is executed in two different cases.
2161 The first is when X is already in the table. Then it is searching
2162 the RELATED_VALUE list of X's class (RELT). The second case is when
2163 X is not in the table. Then RELT points to a class for the related
2166 Ensure that, whatever case we are in, that we ignore classes that have
2167 the same value as X. */
2169 if (rtx_equal_p (x
, p
->exp
))
2172 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2179 p
= p
->related_value
;
2181 /* We went all the way around, so there is nothing to be found.
2182 Alternatively, perhaps RELT was in the table for some other reason
2183 and it has no related values recorded. */
2184 if (p
== relt
|| p
== 0)
2191 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2192 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2193 return plus_constant (q
->mode
, q
->exp
, offset
);
2197 /* Hash a string. Just add its bytes up. */
2198 static inline unsigned
2199 hash_rtx_string (const char *ps
)
2202 const unsigned char *p
= (const unsigned char *) ps
;
2211 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2212 When the callback returns true, we continue with the new rtx. */
2215 hash_rtx_cb (const_rtx x
, machine_mode mode
,
2216 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2217 bool have_reg_qty
, hash_rtx_callback_function cb
)
2223 machine_mode newmode
;
2226 /* Used to turn recursion into iteration. We can't rely on GCC's
2227 tail-recursion elimination since we need to keep accumulating values
2233 /* Invoke the callback first. */
2235 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2237 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2238 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2242 code
= GET_CODE (x
);
2247 unsigned int regno
= REGNO (x
);
2249 if (do_not_record_p
&& !reload_completed
)
2251 /* On some machines, we can't record any non-fixed hard register,
2252 because extending its life will cause reload problems. We
2253 consider ap, fp, sp, gp to be fixed for this purpose.
2255 We also consider CCmode registers to be fixed for this purpose;
2256 failure to do so leads to failure to simplify 0<100 type of
2259 On all machines, we can't record any global registers.
2260 Nor should we record any register that is in a small
2261 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2264 if (regno
>= FIRST_PSEUDO_REGISTER
)
2266 else if (x
== frame_pointer_rtx
2267 || x
== hard_frame_pointer_rtx
2268 || x
== arg_pointer_rtx
2269 || x
== stack_pointer_rtx
2270 || x
== pic_offset_table_rtx
)
2272 else if (global_regs
[regno
])
2274 else if (fixed_regs
[regno
])
2276 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2278 else if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
2280 else if (targetm
.class_likely_spilled_p (REGNO_REG_CLASS (regno
)))
2287 *do_not_record_p
= 1;
2292 hash
+= ((unsigned int) REG
<< 7);
2293 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2297 /* We handle SUBREG of a REG specially because the underlying
2298 reg changes its hash value with every value change; we don't
2299 want to have to forget unrelated subregs when one subreg changes. */
2302 if (REG_P (SUBREG_REG (x
)))
2304 hash
+= (((unsigned int) SUBREG
<< 7)
2305 + REGNO (SUBREG_REG (x
))
2306 + (SUBREG_BYTE (x
) / UNITS_PER_WORD
));
2313 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2314 + (unsigned int) INTVAL (x
));
2317 case CONST_WIDE_INT
:
2318 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
2319 hash
+= CONST_WIDE_INT_ELT (x
, i
);
2323 /* This is like the general case, except that it only counts
2324 the integers representing the constant. */
2325 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2326 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
2327 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2328 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2330 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2334 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2335 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2343 units
= CONST_VECTOR_NUNITS (x
);
2345 for (i
= 0; i
< units
; ++i
)
2347 elt
= CONST_VECTOR_ELT (x
, i
);
2348 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2349 do_not_record_p
, hash_arg_in_memory_p
,
2356 /* Assume there is only one rtx object for any given label. */
2358 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2359 differences and differences between each stage's debugging dumps. */
2360 hash
+= (((unsigned int) LABEL_REF
<< 7)
2361 + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x
)));
2366 /* Don't hash on the symbol's address to avoid bootstrap differences.
2367 Different hash values may cause expressions to be recorded in
2368 different orders and thus different registers to be used in the
2369 final assembler. This also avoids differences in the dump files
2370 between various stages. */
2372 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2375 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2377 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2382 /* We don't record if marked volatile or if BLKmode since we don't
2383 know the size of the move. */
2384 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2386 *do_not_record_p
= 1;
2389 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2390 *hash_arg_in_memory_p
= 1;
2392 /* Now that we have already found this special case,
2393 might as well speed it up as much as possible. */
2394 hash
+= (unsigned) MEM
;
2399 /* A USE that mentions non-volatile memory needs special
2400 handling since the MEM may be BLKmode which normally
2401 prevents an entry from being made. Pure calls are
2402 marked by a USE which mentions BLKmode memory.
2403 See calls.c:emit_call_1. */
2404 if (MEM_P (XEXP (x
, 0))
2405 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2407 hash
+= (unsigned) USE
;
2410 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2411 *hash_arg_in_memory_p
= 1;
2413 /* Now that we have already found this special case,
2414 might as well speed it up as much as possible. */
2415 hash
+= (unsigned) MEM
;
2430 case UNSPEC_VOLATILE
:
2431 if (do_not_record_p
) {
2432 *do_not_record_p
= 1;
2440 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2442 *do_not_record_p
= 1;
2447 /* We don't want to take the filename and line into account. */
2448 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2449 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2450 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2451 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2453 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2455 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2457 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2458 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2459 do_not_record_p
, hash_arg_in_memory_p
,
2462 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2465 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2466 x
= ASM_OPERANDS_INPUT (x
, 0);
2467 mode
= GET_MODE (x
);
2479 i
= GET_RTX_LENGTH (code
) - 1;
2480 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2481 fmt
= GET_RTX_FORMAT (code
);
2487 /* If we are about to do the last recursive call
2488 needed at this level, change it into iteration.
2489 This function is called enough to be worth it. */
2496 hash
+= hash_rtx_cb (XEXP (x
, i
), VOIDmode
, do_not_record_p
,
2497 hash_arg_in_memory_p
,
2502 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2503 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), VOIDmode
, do_not_record_p
,
2504 hash_arg_in_memory_p
,
2509 hash
+= hash_rtx_string (XSTR (x
, i
));
2513 hash
+= (unsigned int) XINT (x
, i
);
2528 /* Hash an rtx. We are careful to make sure the value is never negative.
2529 Equivalent registers hash identically.
2530 MODE is used in hashing for CONST_INTs only;
2531 otherwise the mode of X is used.
2533 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2535 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2536 a MEM rtx which does not have the MEM_READONLY_P flag set.
2538 Note that cse_insn knows that the hash code of a MEM expression
2539 is just (int) MEM plus the hash code of the address. */
2542 hash_rtx (const_rtx x
, machine_mode mode
, int *do_not_record_p
,
2543 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2545 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2546 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2549 /* Hash an rtx X for cse via hash_rtx.
2550 Stores 1 in do_not_record if any subexpression is volatile.
2551 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2552 does not have the MEM_READONLY_P flag set. */
2554 static inline unsigned
2555 canon_hash (rtx x
, machine_mode mode
)
2557 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2560 /* Like canon_hash but with no side effects, i.e. do_not_record
2561 and hash_arg_in_memory are not changed. */
2563 static inline unsigned
2564 safe_hash (rtx x
, machine_mode mode
)
2566 int dummy_do_not_record
;
2567 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2570 /* Return 1 iff X and Y would canonicalize into the same thing,
2571 without actually constructing the canonicalization of either one.
2572 If VALIDATE is nonzero,
2573 we assume X is an expression being processed from the rtl
2574 and Y was found in the hash table. We check register refs
2575 in Y for being marked as valid.
2577 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2580 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2586 /* Note: it is incorrect to assume an expression is equivalent to itself
2587 if VALIDATE is nonzero. */
2588 if (x
== y
&& !validate
)
2591 if (x
== 0 || y
== 0)
2594 code
= GET_CODE (x
);
2595 if (code
!= GET_CODE (y
))
2598 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2599 if (GET_MODE (x
) != GET_MODE (y
))
2602 /* MEMs referring to different address space are not equivalent. */
2603 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2614 return LABEL_REF_LABEL (x
) == LABEL_REF_LABEL (y
);
2617 return XSTR (x
, 0) == XSTR (y
, 0);
2621 return REGNO (x
) == REGNO (y
);
2624 unsigned int regno
= REGNO (y
);
2626 unsigned int endregno
= END_REGNO (y
);
2628 /* If the quantities are not the same, the expressions are not
2629 equivalent. If there are and we are not to validate, they
2630 are equivalent. Otherwise, ensure all regs are up-to-date. */
2632 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2638 for (i
= regno
; i
< endregno
; i
++)
2639 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2648 /* A volatile mem should not be considered equivalent to any
2650 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2653 /* Can't merge two expressions in different alias sets, since we
2654 can decide that the expression is transparent in a block when
2655 it isn't, due to it being set with the different alias set.
2657 Also, can't merge two expressions with different MEM_ATTRS.
2658 They could e.g. be two different entities allocated into the
2659 same space on the stack (see e.g. PR25130). In that case, the
2660 MEM addresses can be the same, even though the two MEMs are
2661 absolutely not equivalent.
2663 But because really all MEM attributes should be the same for
2664 equivalent MEMs, we just use the invariant that MEMs that have
2665 the same attributes share the same mem_attrs data structure. */
2666 if (!mem_attrs_eq_p (MEM_ATTRS (x
), MEM_ATTRS (y
)))
2669 /* If we are handling exceptions, we cannot consider two expressions
2670 with different trapping status as equivalent, because simple_mem
2671 might accept one and reject the other. */
2672 if (cfun
->can_throw_non_call_exceptions
2673 && (MEM_NOTRAP_P (x
) != MEM_NOTRAP_P (y
)))
2678 /* For commutative operations, check both orders. */
2686 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2688 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2689 validate
, for_gcse
))
2690 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2692 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2693 validate
, for_gcse
)));
2696 /* We don't use the generic code below because we want to
2697 disregard filename and line numbers. */
2699 /* A volatile asm isn't equivalent to any other. */
2700 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2703 if (GET_MODE (x
) != GET_MODE (y
)
2704 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2705 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2706 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2707 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2708 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2711 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2713 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2714 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2715 ASM_OPERANDS_INPUT (y
, i
),
2717 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2718 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2728 /* Compare the elements. If any pair of corresponding elements
2729 fail to match, return 0 for the whole thing. */
2731 fmt
= GET_RTX_FORMAT (code
);
2732 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2737 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2738 validate
, for_gcse
))
2743 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2745 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2746 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2747 validate
, for_gcse
))
2752 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2757 if (XINT (x
, i
) != XINT (y
, i
))
2762 if (XWINT (x
, i
) != XWINT (y
, i
))
2778 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2779 the result if necessary. INSN is as for canon_reg. */
2782 validate_canon_reg (rtx
*xloc
, rtx_insn
*insn
)
2786 rtx new_rtx
= canon_reg (*xloc
, insn
);
2788 /* If replacing pseudo with hard reg or vice versa, ensure the
2789 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2790 gcc_assert (insn
&& new_rtx
);
2791 validate_change (insn
, xloc
, new_rtx
, 1);
2795 /* Canonicalize an expression:
2796 replace each register reference inside it
2797 with the "oldest" equivalent register.
2799 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2800 after we make our substitution. The calls are made with IN_GROUP nonzero
2801 so apply_change_group must be called upon the outermost return from this
2802 function (unless INSN is zero). The result of apply_change_group can
2803 generally be discarded since the changes we are making are optional. */
2806 canon_reg (rtx x
, rtx_insn
*insn
)
2815 code
= GET_CODE (x
);
2832 struct qty_table_elem
*ent
;
2834 /* Never replace a hard reg, because hard regs can appear
2835 in more than one machine mode, and we must preserve the mode
2836 of each occurrence. Also, some hard regs appear in
2837 MEMs that are shared and mustn't be altered. Don't try to
2838 replace any reg that maps to a reg of class NO_REGS. */
2839 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2840 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2843 q
= REG_QTY (REGNO (x
));
2844 ent
= &qty_table
[q
];
2845 first
= ent
->first_reg
;
2846 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2847 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2848 : gen_rtx_REG (ent
->mode
, first
));
2855 fmt
= GET_RTX_FORMAT (code
);
2856 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2861 validate_canon_reg (&XEXP (x
, i
), insn
);
2862 else if (fmt
[i
] == 'E')
2863 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2864 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2870 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2871 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2872 what values are being compared.
2874 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2875 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2876 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2877 compared to produce cc0.
2879 The return value is the comparison operator and is either the code of
2880 A or the code corresponding to the inverse of the comparison. */
2882 static enum rtx_code
2883 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2884 machine_mode
*pmode1
, machine_mode
*pmode2
)
2887 hash_set
<rtx
> *visited
= NULL
;
2888 /* Set nonzero when we find something of interest. */
2891 arg1
= *parg1
, arg2
= *parg2
;
2893 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2895 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2897 int reverse_code
= 0;
2898 struct table_elt
*p
= 0;
2900 /* Remember state from previous iteration. */
2904 visited
= new hash_set
<rtx
>;
2909 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2910 On machines with CC0, this is the only case that can occur, since
2911 fold_rtx will return the COMPARE or item being compared with zero
2914 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2917 /* If ARG1 is a comparison operator and CODE is testing for
2918 STORE_FLAG_VALUE, get the inner arguments. */
2920 else if (COMPARISON_P (arg1
))
2922 #ifdef FLOAT_STORE_FLAG_VALUE
2923 REAL_VALUE_TYPE fsfv
;
2927 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2928 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2929 #ifdef FLOAT_STORE_FLAG_VALUE
2930 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2931 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2932 REAL_VALUE_NEGATIVE (fsfv
)))
2937 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2938 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2939 #ifdef FLOAT_STORE_FLAG_VALUE
2940 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2941 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2942 REAL_VALUE_NEGATIVE (fsfv
)))
2945 x
= arg1
, reverse_code
= 1;
2948 /* ??? We could also check for
2950 (ne (and (eq (...) (const_int 1))) (const_int 0))
2952 and related forms, but let's wait until we see them occurring. */
2955 /* Look up ARG1 in the hash table and see if it has an equivalence
2956 that lets us see what is being compared. */
2957 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
2960 p
= p
->first_same_value
;
2962 /* If what we compare is already known to be constant, that is as
2964 We need to break the loop in this case, because otherwise we
2965 can have an infinite loop when looking at a reg that is known
2966 to be a constant which is the same as a comparison of a reg
2967 against zero which appears later in the insn stream, which in
2968 turn is constant and the same as the comparison of the first reg
2974 for (; p
; p
= p
->next_same_value
)
2976 machine_mode inner_mode
= GET_MODE (p
->exp
);
2977 #ifdef FLOAT_STORE_FLAG_VALUE
2978 REAL_VALUE_TYPE fsfv
;
2981 /* If the entry isn't valid, skip it. */
2982 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
2985 /* If it's a comparison we've used before, skip it. */
2986 if (visited
&& visited
->contains (p
->exp
))
2989 if (GET_CODE (p
->exp
) == COMPARE
2990 /* Another possibility is that this machine has a compare insn
2991 that includes the comparison code. In that case, ARG1 would
2992 be equivalent to a comparison operation that would set ARG1 to
2993 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2994 ORIG_CODE is the actual comparison being done; if it is an EQ,
2995 we must reverse ORIG_CODE. On machine with a negative value
2996 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2999 && val_signbit_known_set_p (inner_mode
,
3001 #ifdef FLOAT_STORE_FLAG_VALUE
3003 && SCALAR_FLOAT_MODE_P (inner_mode
)
3004 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3005 REAL_VALUE_NEGATIVE (fsfv
)))
3008 && COMPARISON_P (p
->exp
)))
3013 else if ((code
== EQ
3015 && val_signbit_known_set_p (inner_mode
,
3017 #ifdef FLOAT_STORE_FLAG_VALUE
3019 && SCALAR_FLOAT_MODE_P (inner_mode
)
3020 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3021 REAL_VALUE_NEGATIVE (fsfv
)))
3024 && COMPARISON_P (p
->exp
))
3031 /* If this non-trapping address, e.g. fp + constant, the
3032 equivalent is a better operand since it may let us predict
3033 the value of the comparison. */
3034 else if (!rtx_addr_can_trap_p (p
->exp
))
3041 /* If we didn't find a useful equivalence for ARG1, we are done.
3042 Otherwise, set up for the next iteration. */
3046 /* If we need to reverse the comparison, make sure that that is
3047 possible -- we can't necessarily infer the value of GE from LT
3048 with floating-point operands. */
3051 enum rtx_code reversed
= reversed_comparison_code (x
, NULL_RTX
);
3052 if (reversed
== UNKNOWN
)
3057 else if (COMPARISON_P (x
))
3058 code
= GET_CODE (x
);
3059 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
3062 /* Return our results. Return the modes from before fold_rtx
3063 because fold_rtx might produce const_int, and then it's too late. */
3064 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
3065 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
3072 /* If X is a nontrivial arithmetic operation on an argument for which
3073 a constant value can be determined, return the result of operating
3074 on that value, as a constant. Otherwise, return X, possibly with
3075 one or more operands changed to a forward-propagated constant.
3077 If X is a register whose contents are known, we do NOT return
3078 those contents here; equiv_constant is called to perform that task.
3079 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3081 INSN is the insn that we may be modifying. If it is 0, make a copy
3082 of X before modifying it. */
3085 fold_rtx (rtx x
, rtx_insn
*insn
)
3094 /* Operands of X. */
3098 /* Constant equivalents of first three operands of X;
3099 0 when no such equivalent is known. */
3104 /* The mode of the first operand of X. We need this for sign and zero
3106 machine_mode mode_arg0
;
3111 /* Try to perform some initial simplifications on X. */
3112 code
= GET_CODE (x
);
3117 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3127 /* No use simplifying an EXPR_LIST
3128 since they are used only for lists of args
3129 in a function call's REG_EQUAL note. */
3135 return prev_insn_cc0
;
3141 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3142 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3143 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3147 #ifdef NO_FUNCTION_CSE
3149 if (CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3154 /* Anything else goes through the loop below. */
3159 mode
= GET_MODE (x
);
3163 mode_arg0
= VOIDmode
;
3165 /* Try folding our operands.
3166 Then see which ones have constant values known. */
3168 fmt
= GET_RTX_FORMAT (code
);
3169 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3172 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3173 machine_mode mode_arg
= GET_MODE (folded_arg
);
3175 switch (GET_CODE (folded_arg
))
3180 const_arg
= equiv_constant (folded_arg
);
3187 const_arg
= folded_arg
;
3192 /* The cc0-user and cc0-setter may be in different blocks if
3193 the cc0-setter potentially traps. In that case PREV_INSN_CC0
3194 will have been cleared as we exited the block with the
3197 While we could potentially track cc0 in this case, it just
3198 doesn't seem to be worth it given that cc0 targets are not
3199 terribly common or important these days and trapping math
3200 is rarely used. The combination of those two conditions
3201 necessary to trip this situation is exceedingly rare in the
3205 const_arg
= NULL_RTX
;
3209 folded_arg
= prev_insn_cc0
;
3210 mode_arg
= prev_insn_cc0_mode
;
3211 const_arg
= equiv_constant (folded_arg
);
3217 folded_arg
= fold_rtx (folded_arg
, insn
);
3218 const_arg
= equiv_constant (folded_arg
);
3222 /* For the first three operands, see if the operand
3223 is constant or equivalent to a constant. */
3227 folded_arg0
= folded_arg
;
3228 const_arg0
= const_arg
;
3229 mode_arg0
= mode_arg
;
3232 folded_arg1
= folded_arg
;
3233 const_arg1
= const_arg
;
3236 const_arg2
= const_arg
;
3240 /* Pick the least expensive of the argument and an equivalent constant
3243 && const_arg
!= folded_arg
3244 && COST_IN (const_arg
, code
, i
) <= COST_IN (folded_arg
, code
, i
)
3246 /* It's not safe to substitute the operand of a conversion
3247 operator with a constant, as the conversion's identity
3248 depends upon the mode of its operand. This optimization
3249 is handled by the call to simplify_unary_operation. */
3250 && (GET_RTX_CLASS (code
) != RTX_UNARY
3251 || GET_MODE (const_arg
) == mode_arg0
3252 || (code
!= ZERO_EXTEND
3253 && code
!= SIGN_EXTEND
3255 && code
!= FLOAT_TRUNCATE
3256 && code
!= FLOAT_EXTEND
3259 && code
!= UNSIGNED_FLOAT
3260 && code
!= UNSIGNED_FIX
)))
3261 folded_arg
= const_arg
;
3263 if (folded_arg
== XEXP (x
, i
))
3266 if (insn
== NULL_RTX
&& !changed
)
3269 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3274 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3275 consistent with the order in X. */
3276 if (canonicalize_change_group (insn
, x
))
3279 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
3280 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
3283 apply_change_group ();
3286 /* If X is an arithmetic operation, see if we can simplify it. */
3288 switch (GET_RTX_CLASS (code
))
3292 /* We can't simplify extension ops unless we know the
3294 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3295 && mode_arg0
== VOIDmode
)
3298 new_rtx
= simplify_unary_operation (code
, mode
,
3299 const_arg0
? const_arg0
: folded_arg0
,
3305 case RTX_COMM_COMPARE
:
3306 /* See what items are actually being compared and set FOLDED_ARG[01]
3307 to those values and CODE to the actual comparison code. If any are
3308 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3309 do anything if both operands are already known to be constant. */
3311 /* ??? Vector mode comparisons are not supported yet. */
3312 if (VECTOR_MODE_P (mode
))
3315 if (const_arg0
== 0 || const_arg1
== 0)
3317 struct table_elt
*p0
, *p1
;
3318 rtx true_rtx
, false_rtx
;
3319 machine_mode mode_arg1
;
3321 if (SCALAR_FLOAT_MODE_P (mode
))
3323 #ifdef FLOAT_STORE_FLAG_VALUE
3324 true_rtx
= (CONST_DOUBLE_FROM_REAL_VALUE
3325 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3327 true_rtx
= NULL_RTX
;
3329 false_rtx
= CONST0_RTX (mode
);
3333 true_rtx
= const_true_rtx
;
3334 false_rtx
= const0_rtx
;
3337 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3338 &mode_arg0
, &mode_arg1
);
3340 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3341 what kinds of things are being compared, so we can't do
3342 anything with this comparison. */
3344 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3347 const_arg0
= equiv_constant (folded_arg0
);
3348 const_arg1
= equiv_constant (folded_arg1
);
3350 /* If we do not now have two constants being compared, see
3351 if we can nevertheless deduce some things about the
3353 if (const_arg0
== 0 || const_arg1
== 0)
3355 if (const_arg1
!= NULL
)
3357 rtx cheapest_simplification
;
3360 struct table_elt
*p
;
3362 /* See if we can find an equivalent of folded_arg0
3363 that gets us a cheaper expression, possibly a
3364 constant through simplifications. */
3365 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3370 cheapest_simplification
= x
;
3371 cheapest_cost
= COST (x
);
3373 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3377 /* If the entry isn't valid, skip it. */
3378 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3381 /* Try to simplify using this equivalence. */
3383 = simplify_relational_operation (code
, mode
,
3388 if (simp_result
== NULL
)
3391 cost
= COST (simp_result
);
3392 if (cost
< cheapest_cost
)
3394 cheapest_cost
= cost
;
3395 cheapest_simplification
= simp_result
;
3399 /* If we have a cheaper expression now, use that
3400 and try folding it further, from the top. */
3401 if (cheapest_simplification
!= x
)
3402 return fold_rtx (copy_rtx (cheapest_simplification
),
3407 /* See if the two operands are the same. */
3409 if ((REG_P (folded_arg0
)
3410 && REG_P (folded_arg1
)
3411 && (REG_QTY (REGNO (folded_arg0
))
3412 == REG_QTY (REGNO (folded_arg1
))))
3413 || ((p0
= lookup (folded_arg0
,
3414 SAFE_HASH (folded_arg0
, mode_arg0
),
3416 && (p1
= lookup (folded_arg1
,
3417 SAFE_HASH (folded_arg1
, mode_arg0
),
3419 && p0
->first_same_value
== p1
->first_same_value
))
3420 folded_arg1
= folded_arg0
;
3422 /* If FOLDED_ARG0 is a register, see if the comparison we are
3423 doing now is either the same as we did before or the reverse
3424 (we only check the reverse if not floating-point). */
3425 else if (REG_P (folded_arg0
))
3427 int qty
= REG_QTY (REGNO (folded_arg0
));
3429 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3431 struct qty_table_elem
*ent
= &qty_table
[qty
];
3433 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3434 || (! FLOAT_MODE_P (mode_arg0
)
3435 && comparison_dominates_p (ent
->comparison_code
,
3436 reverse_condition (code
))))
3437 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3439 && rtx_equal_p (ent
->comparison_const
,
3441 || (REG_P (folded_arg1
)
3442 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3444 if (comparison_dominates_p (ent
->comparison_code
, code
))
3459 /* If we are comparing against zero, see if the first operand is
3460 equivalent to an IOR with a constant. If so, we may be able to
3461 determine the result of this comparison. */
3462 if (const_arg1
== const0_rtx
&& !const_arg0
)
3464 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3468 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3469 && CONST_INT_P (inner_const
)
3470 && INTVAL (inner_const
) != 0)
3471 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3475 rtx op0
= const_arg0
? const_arg0
: copy_rtx (folded_arg0
);
3476 rtx op1
= const_arg1
? const_arg1
: copy_rtx (folded_arg1
);
3477 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
,
3483 case RTX_COMM_ARITH
:
3487 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3488 with that LABEL_REF as its second operand. If so, the result is
3489 the first operand of that MINUS. This handles switches with an
3490 ADDR_DIFF_VEC table. */
3491 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3494 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3495 : lookup_as_function (folded_arg0
, MINUS
);
3497 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3498 && LABEL_REF_LABEL (XEXP (y
, 1)) == LABEL_REF_LABEL (const_arg1
))
3501 /* Now try for a CONST of a MINUS like the above. */
3502 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3503 : lookup_as_function (folded_arg0
, CONST
))) != 0
3504 && GET_CODE (XEXP (y
, 0)) == MINUS
3505 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3506 && LABEL_REF_LABEL (XEXP (XEXP (y
, 0), 1)) == LABEL_REF_LABEL (const_arg1
))
3507 return XEXP (XEXP (y
, 0), 0);
3510 /* Likewise if the operands are in the other order. */
3511 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3514 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3515 : lookup_as_function (folded_arg1
, MINUS
);
3517 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3518 && LABEL_REF_LABEL (XEXP (y
, 1)) == LABEL_REF_LABEL (const_arg0
))
3521 /* Now try for a CONST of a MINUS like the above. */
3522 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3523 : lookup_as_function (folded_arg1
, CONST
))) != 0
3524 && GET_CODE (XEXP (y
, 0)) == MINUS
3525 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3526 && LABEL_REF_LABEL (XEXP (XEXP (y
, 0), 1)) == LABEL_REF_LABEL (const_arg0
))
3527 return XEXP (XEXP (y
, 0), 0);
3530 /* If second operand is a register equivalent to a negative
3531 CONST_INT, see if we can find a register equivalent to the
3532 positive constant. Make a MINUS if so. Don't do this for
3533 a non-negative constant since we might then alternate between
3534 choosing positive and negative constants. Having the positive
3535 constant previously-used is the more common case. Be sure
3536 the resulting constant is non-negative; if const_arg1 were
3537 the smallest negative number this would overflow: depending
3538 on the mode, this would either just be the same value (and
3539 hence not save anything) or be incorrect. */
3540 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
)
3541 && INTVAL (const_arg1
) < 0
3542 /* This used to test
3544 -INTVAL (const_arg1) >= 0
3546 But The Sun V5.0 compilers mis-compiled that test. So
3547 instead we test for the problematic value in a more direct
3548 manner and hope the Sun compilers get it correct. */
3549 && INTVAL (const_arg1
) !=
3550 ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1))
3551 && REG_P (folded_arg1
))
3553 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3555 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3558 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3560 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3561 canon_reg (p
->exp
, NULL
));
3566 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3567 If so, produce (PLUS Z C2-C). */
3568 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
))
3570 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3571 if (y
&& CONST_INT_P (XEXP (y
, 1)))
3572 return fold_rtx (plus_constant (mode
, copy_rtx (y
),
3573 -INTVAL (const_arg1
)),
3580 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3581 case IOR
: case AND
: case XOR
:
3583 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3584 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3585 is known to be of similar form, we may be able to replace the
3586 operation with a combined operation. This may eliminate the
3587 intermediate operation if every use is simplified in this way.
3588 Note that the similar optimization done by combine.c only works
3589 if the intermediate operation's result has only one reference. */
3591 if (REG_P (folded_arg0
)
3592 && const_arg1
&& CONST_INT_P (const_arg1
))
3595 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3596 rtx y
, inner_const
, new_const
;
3597 rtx canon_const_arg1
= const_arg1
;
3598 enum rtx_code associate_code
;
3601 && (INTVAL (const_arg1
) >= GET_MODE_PRECISION (mode
)
3602 || INTVAL (const_arg1
) < 0))
3604 if (SHIFT_COUNT_TRUNCATED
)
3605 canon_const_arg1
= GEN_INT (INTVAL (const_arg1
)
3606 & (GET_MODE_BITSIZE (mode
)
3612 y
= lookup_as_function (folded_arg0
, code
);
3616 /* If we have compiled a statement like
3617 "if (x == (x & mask1))", and now are looking at
3618 "x & mask2", we will have a case where the first operand
3619 of Y is the same as our first operand. Unless we detect
3620 this case, an infinite loop will result. */
3621 if (XEXP (y
, 0) == folded_arg0
)
3624 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3625 if (!inner_const
|| !CONST_INT_P (inner_const
))
3628 /* Don't associate these operations if they are a PLUS with the
3629 same constant and it is a power of two. These might be doable
3630 with a pre- or post-increment. Similarly for two subtracts of
3631 identical powers of two with post decrement. */
3633 if (code
== PLUS
&& const_arg1
== inner_const
3634 && ((HAVE_PRE_INCREMENT
3635 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3636 || (HAVE_POST_INCREMENT
3637 && exact_log2 (INTVAL (const_arg1
)) >= 0)
3638 || (HAVE_PRE_DECREMENT
3639 && exact_log2 (- INTVAL (const_arg1
)) >= 0)
3640 || (HAVE_POST_DECREMENT
3641 && exact_log2 (- INTVAL (const_arg1
)) >= 0)))
3644 /* ??? Vector mode shifts by scalar
3645 shift operand are not supported yet. */
3646 if (is_shift
&& VECTOR_MODE_P (mode
))
3650 && (INTVAL (inner_const
) >= GET_MODE_PRECISION (mode
)
3651 || INTVAL (inner_const
) < 0))
3653 if (SHIFT_COUNT_TRUNCATED
)
3654 inner_const
= GEN_INT (INTVAL (inner_const
)
3655 & (GET_MODE_BITSIZE (mode
) - 1));
3660 /* Compute the code used to compose the constants. For example,
3661 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3663 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3665 new_const
= simplify_binary_operation (associate_code
, mode
,
3672 /* If we are associating shift operations, don't let this
3673 produce a shift of the size of the object or larger.
3674 This could occur when we follow a sign-extend by a right
3675 shift on a machine that does a sign-extend as a pair
3679 && CONST_INT_P (new_const
)
3680 && INTVAL (new_const
) >= GET_MODE_PRECISION (mode
))
3682 /* As an exception, we can turn an ASHIFTRT of this
3683 form into a shift of the number of bits - 1. */
3684 if (code
== ASHIFTRT
)
3685 new_const
= GEN_INT (GET_MODE_BITSIZE (mode
) - 1);
3686 else if (!side_effects_p (XEXP (y
, 0)))
3687 return CONST0_RTX (mode
);
3692 y
= copy_rtx (XEXP (y
, 0));
3694 /* If Y contains our first operand (the most common way this
3695 can happen is if Y is a MEM), we would do into an infinite
3696 loop if we tried to fold it. So don't in that case. */
3698 if (! reg_mentioned_p (folded_arg0
, y
))
3699 y
= fold_rtx (y
, insn
);
3701 return simplify_gen_binary (code
, mode
, y
, new_const
);
3705 case DIV
: case UDIV
:
3706 /* ??? The associative optimization performed immediately above is
3707 also possible for DIV and UDIV using associate_code of MULT.
3708 However, we would need extra code to verify that the
3709 multiplication does not overflow, that is, there is no overflow
3710 in the calculation of new_const. */
3717 new_rtx
= simplify_binary_operation (code
, mode
,
3718 const_arg0
? const_arg0
: folded_arg0
,
3719 const_arg1
? const_arg1
: folded_arg1
);
3723 /* (lo_sum (high X) X) is simply X. */
3724 if (code
== LO_SUM
&& const_arg0
!= 0
3725 && GET_CODE (const_arg0
) == HIGH
3726 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3731 case RTX_BITFIELD_OPS
:
3732 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3733 const_arg0
? const_arg0
: folded_arg0
,
3734 const_arg1
? const_arg1
: folded_arg1
,
3735 const_arg2
? const_arg2
: XEXP (x
, 2));
3742 return new_rtx
? new_rtx
: x
;
3745 /* Return a constant value currently equivalent to X.
3746 Return 0 if we don't know one. */
3749 equiv_constant (rtx x
)
3752 && REGNO_QTY_VALID_P (REGNO (x
)))
3754 int x_q
= REG_QTY (REGNO (x
));
3755 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3757 if (x_ent
->const_rtx
)
3758 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3761 if (x
== 0 || CONSTANT_P (x
))
3764 if (GET_CODE (x
) == SUBREG
)
3766 machine_mode mode
= GET_MODE (x
);
3767 machine_mode imode
= GET_MODE (SUBREG_REG (x
));
3770 /* See if we previously assigned a constant value to this SUBREG. */
3771 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3772 || (new_rtx
= lookup_as_function (x
, CONST_WIDE_INT
)) != 0
3773 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3774 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3777 /* If we didn't and if doing so makes sense, see if we previously
3778 assigned a constant value to the enclosing word mode SUBREG. */
3779 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
)
3780 && GET_MODE_SIZE (word_mode
) < GET_MODE_SIZE (imode
))
3782 int byte
= SUBREG_BYTE (x
) - subreg_lowpart_offset (mode
, word_mode
);
3783 if (byte
>= 0 && (byte
% UNITS_PER_WORD
) == 0)
3785 rtx y
= gen_rtx_SUBREG (word_mode
, SUBREG_REG (x
), byte
);
3786 new_rtx
= lookup_as_function (y
, CONST_INT
);
3788 return gen_lowpart (mode
, new_rtx
);
3792 /* Otherwise see if we already have a constant for the inner REG,
3793 and if that is enough to calculate an equivalent constant for
3794 the subreg. Note that the upper bits of paradoxical subregs
3795 are undefined, so they cannot be said to equal anything. */
3796 if (REG_P (SUBREG_REG (x
))
3797 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (imode
)
3798 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3799 return simplify_subreg (mode
, new_rtx
, imode
, SUBREG_BYTE (x
));
3804 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3805 the hash table in case its value was seen before. */
3809 struct table_elt
*elt
;
3811 x
= avoid_constant_pool_reference (x
);
3815 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3819 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3820 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3827 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3830 In certain cases, this can cause us to add an equivalence. For example,
3831 if we are following the taken case of
3833 we can add the fact that `i' and '2' are now equivalent.
3835 In any case, we can record that this comparison was passed. If the same
3836 comparison is seen later, we will know its value. */
3839 record_jump_equiv (rtx_insn
*insn
, bool taken
)
3841 int cond_known_true
;
3844 machine_mode mode
, mode0
, mode1
;
3845 int reversed_nonequality
= 0;
3848 /* Ensure this is the right kind of insn. */
3849 gcc_assert (any_condjump_p (insn
));
3851 set
= pc_set (insn
);
3853 /* See if this jump condition is known true or false. */
3855 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3857 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3859 /* Get the type of comparison being done and the operands being compared.
3860 If we had to reverse a non-equality condition, record that fact so we
3861 know that it isn't valid for floating-point. */
3862 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3863 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3864 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3866 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3867 if (! cond_known_true
)
3869 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3871 /* Don't remember if we can't find the inverse. */
3872 if (code
== UNKNOWN
)
3876 /* The mode is the mode of the non-constant. */
3878 if (mode1
!= VOIDmode
)
3881 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3884 /* Yet another form of subreg creation. In this case, we want something in
3885 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3888 record_jump_cond_subreg (machine_mode mode
, rtx op
)
3890 machine_mode op_mode
= GET_MODE (op
);
3891 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3893 return lowpart_subreg (mode
, op
, op_mode
);
3896 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3897 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3898 Make any useful entries we can with that information. Called from
3899 above function and called recursively. */
3902 record_jump_cond (enum rtx_code code
, machine_mode mode
, rtx op0
,
3903 rtx op1
, int reversed_nonequality
)
3905 unsigned op0_hash
, op1_hash
;
3906 int op0_in_memory
, op1_in_memory
;
3907 struct table_elt
*op0_elt
, *op1_elt
;
3909 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3910 we know that they are also equal in the smaller mode (this is also
3911 true for all smaller modes whether or not there is a SUBREG, but
3912 is not worth testing for with no SUBREG). */
3914 /* Note that GET_MODE (op0) may not equal MODE. */
3915 if (code
== EQ
&& paradoxical_subreg_p (op0
))
3917 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3918 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3920 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3921 reversed_nonequality
);
3924 if (code
== EQ
&& paradoxical_subreg_p (op1
))
3926 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3927 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3929 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3930 reversed_nonequality
);
3933 /* Similarly, if this is an NE comparison, and either is a SUBREG
3934 making a smaller mode, we know the whole thing is also NE. */
3936 /* Note that GET_MODE (op0) may not equal MODE;
3937 if we test MODE instead, we can get an infinite recursion
3938 alternating between two modes each wider than MODE. */
3940 if (code
== NE
&& GET_CODE (op0
) == SUBREG
3941 && subreg_lowpart_p (op0
)
3942 && (GET_MODE_SIZE (GET_MODE (op0
))
3943 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
)))))
3945 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3946 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3948 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3949 reversed_nonequality
);
3952 if (code
== NE
&& GET_CODE (op1
) == SUBREG
3953 && subreg_lowpart_p (op1
)
3954 && (GET_MODE_SIZE (GET_MODE (op1
))
3955 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
)))))
3957 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3958 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3960 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3961 reversed_nonequality
);
3964 /* Hash both operands. */
3967 hash_arg_in_memory
= 0;
3968 op0_hash
= HASH (op0
, mode
);
3969 op0_in_memory
= hash_arg_in_memory
;
3975 hash_arg_in_memory
= 0;
3976 op1_hash
= HASH (op1
, mode
);
3977 op1_in_memory
= hash_arg_in_memory
;
3982 /* Look up both operands. */
3983 op0_elt
= lookup (op0
, op0_hash
, mode
);
3984 op1_elt
= lookup (op1
, op1_hash
, mode
);
3986 /* If both operands are already equivalent or if they are not in the
3987 table but are identical, do nothing. */
3988 if ((op0_elt
!= 0 && op1_elt
!= 0
3989 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
3990 || op0
== op1
|| rtx_equal_p (op0
, op1
))
3993 /* If we aren't setting two things equal all we can do is save this
3994 comparison. Similarly if this is floating-point. In the latter
3995 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
3996 If we record the equality, we might inadvertently delete code
3997 whose intent was to change -0 to +0. */
3999 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
4001 struct qty_table_elem
*ent
;
4004 /* If we reversed a floating-point comparison, if OP0 is not a
4005 register, or if OP1 is neither a register or constant, we can't
4009 op1
= equiv_constant (op1
);
4011 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
4012 || !REG_P (op0
) || op1
== 0)
4015 /* Put OP0 in the hash table if it isn't already. This gives it a
4016 new quantity number. */
4019 if (insert_regs (op0
, NULL
, 0))
4021 rehash_using_reg (op0
);
4022 op0_hash
= HASH (op0
, mode
);
4024 /* If OP0 is contained in OP1, this changes its hash code
4025 as well. Faster to rehash than to check, except
4026 for the simple case of a constant. */
4027 if (! CONSTANT_P (op1
))
4028 op1_hash
= HASH (op1
,mode
);
4031 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4032 op0_elt
->in_memory
= op0_in_memory
;
4035 qty
= REG_QTY (REGNO (op0
));
4036 ent
= &qty_table
[qty
];
4038 ent
->comparison_code
= code
;
4041 /* Look it up again--in case op0 and op1 are the same. */
4042 op1_elt
= lookup (op1
, op1_hash
, mode
);
4044 /* Put OP1 in the hash table so it gets a new quantity number. */
4047 if (insert_regs (op1
, NULL
, 0))
4049 rehash_using_reg (op1
);
4050 op1_hash
= HASH (op1
, mode
);
4053 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4054 op1_elt
->in_memory
= op1_in_memory
;
4057 ent
->comparison_const
= NULL_RTX
;
4058 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
4062 ent
->comparison_const
= op1
;
4063 ent
->comparison_qty
= -1;
4069 /* If either side is still missing an equivalence, make it now,
4070 then merge the equivalences. */
4074 if (insert_regs (op0
, NULL
, 0))
4076 rehash_using_reg (op0
);
4077 op0_hash
= HASH (op0
, mode
);
4080 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4081 op0_elt
->in_memory
= op0_in_memory
;
4086 if (insert_regs (op1
, NULL
, 0))
4088 rehash_using_reg (op1
);
4089 op1_hash
= HASH (op1
, mode
);
4092 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4093 op1_elt
->in_memory
= op1_in_memory
;
4096 merge_equiv_classes (op0_elt
, op1_elt
);
4099 /* CSE processing for one instruction.
4101 Most "true" common subexpressions are mostly optimized away in GIMPLE,
4102 but the few that "leak through" are cleaned up by cse_insn, and complex
4103 addressing modes are often formed here.
4105 The main function is cse_insn, and between here and that function
4106 a couple of helper functions is defined to keep the size of cse_insn
4107 within reasonable proportions.
4109 Data is shared between the main and helper functions via STRUCT SET,
4110 that contains all data related for every set in the instruction that
4113 Note that cse_main processes all sets in the instruction. Most
4114 passes in GCC only process simple SET insns or single_set insns, but
4115 CSE processes insns with multiple sets as well. */
4117 /* Data on one SET contained in the instruction. */
4121 /* The SET rtx itself. */
4123 /* The SET_SRC of the rtx (the original value, if it is changing). */
4125 /* The hash-table element for the SET_SRC of the SET. */
4126 struct table_elt
*src_elt
;
4127 /* Hash value for the SET_SRC. */
4129 /* Hash value for the SET_DEST. */
4131 /* The SET_DEST, with SUBREG, etc., stripped. */
4133 /* Nonzero if the SET_SRC is in memory. */
4135 /* Nonzero if the SET_SRC contains something
4136 whose value cannot be predicted and understood. */
4138 /* Original machine mode, in case it becomes a CONST_INT.
4139 The size of this field should match the size of the mode
4140 field of struct rtx_def (see rtl.h). */
4141 ENUM_BITFIELD(machine_mode
) mode
: 8;
4142 /* A constant equivalent for SET_SRC, if any. */
4144 /* Hash value of constant equivalent for SET_SRC. */
4145 unsigned src_const_hash
;
4146 /* Table entry for constant equivalent for SET_SRC, if any. */
4147 struct table_elt
*src_const_elt
;
4148 /* Table entry for the destination address. */
4149 struct table_elt
*dest_addr_elt
;
4152 /* Special handling for (set REG0 REG1) where REG0 is the
4153 "cheapest", cheaper than REG1. After cse, REG1 will probably not
4154 be used in the sequel, so (if easily done) change this insn to
4155 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4156 that computed their value. Then REG1 will become a dead store
4157 and won't cloud the situation for later optimizations.
4159 Do not make this change if REG1 is a hard register, because it will
4160 then be used in the sequel and we may be changing a two-operand insn
4161 into a three-operand insn.
4163 This is the last transformation that cse_insn will try to do. */
4166 try_back_substitute_reg (rtx set
, rtx_insn
*insn
)
4168 rtx dest
= SET_DEST (set
);
4169 rtx src
= SET_SRC (set
);
4172 && REG_P (src
) && ! HARD_REGISTER_P (src
)
4173 && REGNO_QTY_VALID_P (REGNO (src
)))
4175 int src_q
= REG_QTY (REGNO (src
));
4176 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
4178 if (src_ent
->first_reg
== REGNO (dest
))
4180 /* Scan for the previous nonnote insn, but stop at a basic
4182 rtx_insn
*prev
= insn
;
4183 rtx_insn
*bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
4186 prev
= PREV_INSN (prev
);
4188 while (prev
!= bb_head
&& (NOTE_P (prev
) || DEBUG_INSN_P (prev
)));
4190 /* Do not swap the registers around if the previous instruction
4191 attaches a REG_EQUIV note to REG1.
4193 ??? It's not entirely clear whether we can transfer a REG_EQUIV
4194 from the pseudo that originally shadowed an incoming argument
4195 to another register. Some uses of REG_EQUIV might rely on it
4196 being attached to REG1 rather than REG2.
4198 This section previously turned the REG_EQUIV into a REG_EQUAL
4199 note. We cannot do that because REG_EQUIV may provide an
4200 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4201 if (NONJUMP_INSN_P (prev
)
4202 && GET_CODE (PATTERN (prev
)) == SET
4203 && SET_DEST (PATTERN (prev
)) == src
4204 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
4208 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
4209 validate_change (insn
, &SET_DEST (set
), src
, 1);
4210 validate_change (insn
, &SET_SRC (set
), dest
, 1);
4211 apply_change_group ();
4213 /* If INSN has a REG_EQUAL note, and this note mentions
4214 REG0, then we must delete it, because the value in
4215 REG0 has changed. If the note's value is REG1, we must
4216 also delete it because that is now this insn's dest. */
4217 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
4219 && (reg_mentioned_p (dest
, XEXP (note
, 0))
4220 || rtx_equal_p (src
, XEXP (note
, 0))))
4221 remove_note (insn
, note
);
4227 /* Record all the SETs in this instruction into SETS_PTR,
4228 and return the number of recorded sets. */
4230 find_sets_in_insn (rtx_insn
*insn
, struct set
**psets
)
4232 struct set
*sets
= *psets
;
4234 rtx x
= PATTERN (insn
);
4236 if (GET_CODE (x
) == SET
)
4238 /* Ignore SETs that are unconditional jumps.
4239 They never need cse processing, so this does not hurt.
4240 The reason is not efficiency but rather
4241 so that we can test at the end for instructions
4242 that have been simplified to unconditional jumps
4243 and not be misled by unchanged instructions
4244 that were unconditional jumps to begin with. */
4245 if (SET_DEST (x
) == pc_rtx
4246 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4248 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4249 The hard function value register is used only once, to copy to
4250 someplace else, so it isn't worth cse'ing. */
4251 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4254 sets
[n_sets
++].rtl
= x
;
4256 else if (GET_CODE (x
) == PARALLEL
)
4258 int i
, lim
= XVECLEN (x
, 0);
4260 /* Go over the epressions of the PARALLEL in forward order, to
4261 put them in the same order in the SETS array. */
4262 for (i
= 0; i
< lim
; i
++)
4264 rtx y
= XVECEXP (x
, 0, i
);
4265 if (GET_CODE (y
) == SET
)
4267 /* As above, we ignore unconditional jumps and call-insns and
4268 ignore the result of apply_change_group. */
4269 if (SET_DEST (y
) == pc_rtx
4270 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4272 else if (GET_CODE (SET_SRC (y
)) == CALL
)
4275 sets
[n_sets
++].rtl
= y
;
4283 /* Where possible, substitute every register reference in the N_SETS
4284 number of SETS in INSN with the the canonical register.
4286 Register canonicalization propagatest the earliest register (i.e.
4287 one that is set before INSN) with the same value. This is a very
4288 useful, simple form of CSE, to clean up warts from expanding GIMPLE
4289 to RTL. For instance, a CONST for an address is usually expanded
4290 multiple times to loads into different registers, thus creating many
4291 subexpressions of the form:
4293 (set (reg1) (some_const))
4294 (set (mem (... reg1 ...) (thing)))
4295 (set (reg2) (some_const))
4296 (set (mem (... reg2 ...) (thing)))
4298 After canonicalizing, the code takes the following form:
4300 (set (reg1) (some_const))
4301 (set (mem (... reg1 ...) (thing)))
4302 (set (reg2) (some_const))
4303 (set (mem (... reg1 ...) (thing)))
4305 The set to reg2 is now trivially dead, and the memory reference (or
4306 address, or whatever) may be a candidate for further CSEing.
4308 In this function, the result of apply_change_group can be ignored;
4312 canonicalize_insn (rtx_insn
*insn
, struct set
**psets
, int n_sets
)
4314 struct set
*sets
= *psets
;
4316 rtx x
= PATTERN (insn
);
4321 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4322 if (GET_CODE (XEXP (tem
, 0)) != SET
)
4323 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4326 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
4328 canon_reg (SET_SRC (x
), insn
);
4329 apply_change_group ();
4330 fold_rtx (SET_SRC (x
), insn
);
4332 else if (GET_CODE (x
) == CLOBBER
)
4334 /* If we clobber memory, canon the address.
4335 This does nothing when a register is clobbered
4336 because we have already invalidated the reg. */
4337 if (MEM_P (XEXP (x
, 0)))
4338 canon_reg (XEXP (x
, 0), insn
);
4340 else if (GET_CODE (x
) == USE
4341 && ! (REG_P (XEXP (x
, 0))
4342 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4343 /* Canonicalize a USE of a pseudo register or memory location. */
4344 canon_reg (x
, insn
);
4345 else if (GET_CODE (x
) == ASM_OPERANDS
)
4347 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
4349 rtx input
= ASM_OPERANDS_INPUT (x
, i
);
4350 if (!(REG_P (input
) && REGNO (input
) < FIRST_PSEUDO_REGISTER
))
4352 input
= canon_reg (input
, insn
);
4353 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
), input
, 1);
4357 else if (GET_CODE (x
) == CALL
)
4359 canon_reg (x
, insn
);
4360 apply_change_group ();
4363 else if (DEBUG_INSN_P (insn
))
4364 canon_reg (PATTERN (insn
), insn
);
4365 else if (GET_CODE (x
) == PARALLEL
)
4367 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4369 rtx y
= XVECEXP (x
, 0, i
);
4370 if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
4372 canon_reg (SET_SRC (y
), insn
);
4373 apply_change_group ();
4374 fold_rtx (SET_SRC (y
), insn
);
4376 else if (GET_CODE (y
) == CLOBBER
)
4378 if (MEM_P (XEXP (y
, 0)))
4379 canon_reg (XEXP (y
, 0), insn
);
4381 else if (GET_CODE (y
) == USE
4382 && ! (REG_P (XEXP (y
, 0))
4383 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4384 canon_reg (y
, insn
);
4385 else if (GET_CODE (y
) == CALL
)
4387 canon_reg (y
, insn
);
4388 apply_change_group ();
4394 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4395 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4397 /* We potentially will process this insn many times. Therefore,
4398 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4401 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4402 because cse_insn handles those specially. */
4403 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != STRICT_LOW_PART
4404 && rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
4405 remove_note (insn
, tem
);
4408 canon_reg (XEXP (tem
, 0), insn
);
4409 apply_change_group ();
4410 XEXP (tem
, 0) = fold_rtx (XEXP (tem
, 0), insn
);
4411 df_notes_rescan (insn
);
4415 /* Canonicalize sources and addresses of destinations.
4416 We do this in a separate pass to avoid problems when a MATCH_DUP is
4417 present in the insn pattern. In that case, we want to ensure that
4418 we don't break the duplicate nature of the pattern. So we will replace
4419 both operands at the same time. Otherwise, we would fail to find an
4420 equivalent substitution in the loop calling validate_change below.
4422 We used to suppress canonicalization of DEST if it appears in SRC,
4423 but we don't do this any more. */
4425 for (i
= 0; i
< n_sets
; i
++)
4427 rtx dest
= SET_DEST (sets
[i
].rtl
);
4428 rtx src
= SET_SRC (sets
[i
].rtl
);
4429 rtx new_rtx
= canon_reg (src
, insn
);
4431 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4433 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4435 validate_change (insn
, &XEXP (dest
, 1),
4436 canon_reg (XEXP (dest
, 1), insn
), 1);
4437 validate_change (insn
, &XEXP (dest
, 2),
4438 canon_reg (XEXP (dest
, 2), insn
), 1);
4441 while (GET_CODE (dest
) == SUBREG
4442 || GET_CODE (dest
) == ZERO_EXTRACT
4443 || GET_CODE (dest
) == STRICT_LOW_PART
)
4444 dest
= XEXP (dest
, 0);
4447 canon_reg (dest
, insn
);
4450 /* Now that we have done all the replacements, we can apply the change
4451 group and see if they all work. Note that this will cause some
4452 canonicalizations that would have worked individually not to be applied
4453 because some other canonicalization didn't work, but this should not
4456 The result of apply_change_group can be ignored; see canon_reg. */
4458 apply_change_group ();
4461 /* Main function of CSE.
4462 First simplify sources and addresses of all assignments
4463 in the instruction, using previously-computed equivalents values.
4464 Then install the new sources and destinations in the table
4465 of available values. */
4468 cse_insn (rtx_insn
*insn
)
4470 rtx x
= PATTERN (insn
);
4476 struct table_elt
*src_eqv_elt
= 0;
4477 int src_eqv_volatile
= 0;
4478 int src_eqv_in_memory
= 0;
4479 unsigned src_eqv_hash
= 0;
4481 struct set
*sets
= (struct set
*) 0;
4483 if (GET_CODE (x
) == SET
)
4484 sets
= XALLOCA (struct set
);
4485 else if (GET_CODE (x
) == PARALLEL
)
4486 sets
= XALLOCAVEC (struct set
, XVECLEN (x
, 0));
4490 /* Records what this insn does to set CC0. */
4492 this_insn_cc0_mode
= VOIDmode
;
4495 /* Find all regs explicitly clobbered in this insn,
4496 to ensure they are not replaced with any other regs
4497 elsewhere in this insn. */
4498 invalidate_from_sets_and_clobbers (insn
);
4500 /* Record all the SETs in this instruction. */
4501 n_sets
= find_sets_in_insn (insn
, &sets
);
4503 /* Substitute the canonical register where possible. */
4504 canonicalize_insn (insn
, &sets
, n_sets
);
4506 /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4507 if different, or if the DEST is a STRICT_LOW_PART. The latter condition
4508 is necessary because SRC_EQV is handled specially for this case, and if
4509 it isn't set, then there will be no equivalence for the destination. */
4510 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4511 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0
4512 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4513 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4514 src_eqv
= copy_rtx (XEXP (tem
, 0));
4516 /* Set sets[i].src_elt to the class each source belongs to.
4517 Detect assignments from or to volatile things
4518 and set set[i] to zero so they will be ignored
4519 in the rest of this function.
4521 Nothing in this loop changes the hash table or the register chains. */
4523 for (i
= 0; i
< n_sets
; i
++)
4525 bool repeat
= false;
4528 struct table_elt
*elt
= 0, *p
;
4532 rtx src_related
= 0;
4533 bool src_related_is_const_anchor
= false;
4534 struct table_elt
*src_const_elt
= 0;
4535 int src_cost
= MAX_COST
;
4536 int src_eqv_cost
= MAX_COST
;
4537 int src_folded_cost
= MAX_COST
;
4538 int src_related_cost
= MAX_COST
;
4539 int src_elt_cost
= MAX_COST
;
4540 int src_regcost
= MAX_COST
;
4541 int src_eqv_regcost
= MAX_COST
;
4542 int src_folded_regcost
= MAX_COST
;
4543 int src_related_regcost
= MAX_COST
;
4544 int src_elt_regcost
= MAX_COST
;
4545 /* Set nonzero if we need to call force_const_mem on with the
4546 contents of src_folded before using it. */
4547 int src_folded_force_flag
= 0;
4549 dest
= SET_DEST (sets
[i
].rtl
);
4550 src
= SET_SRC (sets
[i
].rtl
);
4552 /* If SRC is a constant that has no machine mode,
4553 hash it with the destination's machine mode.
4554 This way we can keep different modes separate. */
4556 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4557 sets
[i
].mode
= mode
;
4561 machine_mode eqvmode
= mode
;
4562 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4563 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4565 hash_arg_in_memory
= 0;
4566 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4568 /* Find the equivalence class for the equivalent expression. */
4571 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4573 src_eqv_volatile
= do_not_record
;
4574 src_eqv_in_memory
= hash_arg_in_memory
;
4577 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4578 value of the INNER register, not the destination. So it is not
4579 a valid substitution for the source. But save it for later. */
4580 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4583 src_eqv_here
= src_eqv
;
4585 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4586 simplified result, which may not necessarily be valid. */
4587 src_folded
= fold_rtx (src
, insn
);
4590 /* ??? This caused bad code to be generated for the m68k port with -O2.
4591 Suppose src is (CONST_INT -1), and that after truncation src_folded
4592 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4593 At the end we will add src and src_const to the same equivalence
4594 class. We now have 3 and -1 on the same equivalence class. This
4595 causes later instructions to be mis-optimized. */
4596 /* If storing a constant in a bitfield, pre-truncate the constant
4597 so we will be able to record it later. */
4598 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4600 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4602 if (CONST_INT_P (src
)
4603 && CONST_INT_P (width
)
4604 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4605 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4607 = GEN_INT (INTVAL (src
) & (((HOST_WIDE_INT
) 1
4608 << INTVAL (width
)) - 1));
4612 /* Compute SRC's hash code, and also notice if it
4613 should not be recorded at all. In that case,
4614 prevent any further processing of this assignment. */
4616 hash_arg_in_memory
= 0;
4619 sets
[i
].src_hash
= HASH (src
, mode
);
4620 sets
[i
].src_volatile
= do_not_record
;
4621 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4623 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4624 a pseudo, do not record SRC. Using SRC as a replacement for
4625 anything else will be incorrect in that situation. Note that
4626 this usually occurs only for stack slots, in which case all the
4627 RTL would be referring to SRC, so we don't lose any optimization
4628 opportunities by not having SRC in the hash table. */
4631 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4633 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4634 sets
[i
].src_volatile
= 1;
4636 /* Also do not record result of a non-volatile inline asm with
4637 more than one result or with clobbers, we do not want CSE to
4638 break the inline asm apart. */
4639 else if (GET_CODE (src
) == ASM_OPERANDS
4640 && GET_CODE (x
) == PARALLEL
)
4641 sets
[i
].src_volatile
= 1;
4644 /* It is no longer clear why we used to do this, but it doesn't
4645 appear to still be needed. So let's try without it since this
4646 code hurts cse'ing widened ops. */
4647 /* If source is a paradoxical subreg (such as QI treated as an SI),
4648 treat it as volatile. It may do the work of an SI in one context
4649 where the extra bits are not being used, but cannot replace an SI
4651 if (paradoxical_subreg_p (src
))
4652 sets
[i
].src_volatile
= 1;
4655 /* Locate all possible equivalent forms for SRC. Try to replace
4656 SRC in the insn with each cheaper equivalent.
4658 We have the following types of equivalents: SRC itself, a folded
4659 version, a value given in a REG_EQUAL note, or a value related
4662 Each of these equivalents may be part of an additional class
4663 of equivalents (if more than one is in the table, they must be in
4664 the same class; we check for this).
4666 If the source is volatile, we don't do any table lookups.
4668 We note any constant equivalent for possible later use in a
4671 if (!sets
[i
].src_volatile
)
4672 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4674 sets
[i
].src_elt
= elt
;
4676 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4678 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4680 /* The REG_EQUAL is indicating that two formerly distinct
4681 classes are now equivalent. So merge them. */
4682 merge_equiv_classes (elt
, src_eqv_elt
);
4683 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4684 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4690 else if (src_eqv_elt
)
4693 /* Try to find a constant somewhere and record it in `src_const'.
4694 Record its table element, if any, in `src_const_elt'. Look in
4695 any known equivalences first. (If the constant is not in the
4696 table, also set `sets[i].src_const_hash'). */
4698 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4702 src_const_elt
= elt
;
4707 && (CONSTANT_P (src_folded
)
4708 /* Consider (minus (label_ref L1) (label_ref L2)) as
4709 "constant" here so we will record it. This allows us
4710 to fold switch statements when an ADDR_DIFF_VEC is used. */
4711 || (GET_CODE (src_folded
) == MINUS
4712 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4713 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4714 src_const
= src_folded
, src_const_elt
= elt
;
4715 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4716 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4718 /* If we don't know if the constant is in the table, get its
4719 hash code and look it up. */
4720 if (src_const
&& src_const_elt
== 0)
4722 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4723 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4726 sets
[i
].src_const
= src_const
;
4727 sets
[i
].src_const_elt
= src_const_elt
;
4729 /* If the constant and our source are both in the table, mark them as
4730 equivalent. Otherwise, if a constant is in the table but the source
4731 isn't, set ELT to it. */
4732 if (src_const_elt
&& elt
4733 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4734 merge_equiv_classes (elt
, src_const_elt
);
4735 else if (src_const_elt
&& elt
== 0)
4736 elt
= src_const_elt
;
4738 /* See if there is a register linearly related to a constant
4739 equivalent of SRC. */
4741 && (GET_CODE (src_const
) == CONST
4742 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4744 src_related
= use_related_value (src_const
, src_const_elt
);
4747 struct table_elt
*src_related_elt
4748 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4749 if (src_related_elt
&& elt
)
4751 if (elt
->first_same_value
4752 != src_related_elt
->first_same_value
)
4753 /* This can occur when we previously saw a CONST
4754 involving a SYMBOL_REF and then see the SYMBOL_REF
4755 twice. Merge the involved classes. */
4756 merge_equiv_classes (elt
, src_related_elt
);
4759 src_related_elt
= 0;
4761 else if (src_related_elt
&& elt
== 0)
4762 elt
= src_related_elt
;
4766 /* See if we have a CONST_INT that is already in a register in a
4769 if (src_const
&& src_related
== 0 && CONST_INT_P (src_const
)
4770 && GET_MODE_CLASS (mode
) == MODE_INT
4771 && GET_MODE_PRECISION (mode
) < BITS_PER_WORD
)
4773 machine_mode wider_mode
;
4775 for (wider_mode
= GET_MODE_WIDER_MODE (mode
);
4776 wider_mode
!= VOIDmode
4777 && GET_MODE_PRECISION (wider_mode
) <= BITS_PER_WORD
4778 && src_related
== 0;
4779 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
4781 struct table_elt
*const_elt
4782 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4787 for (const_elt
= const_elt
->first_same_value
;
4788 const_elt
; const_elt
= const_elt
->next_same_value
)
4789 if (REG_P (const_elt
->exp
))
4791 src_related
= gen_lowpart (mode
, const_elt
->exp
);
4797 /* Another possibility is that we have an AND with a constant in
4798 a mode narrower than a word. If so, it might have been generated
4799 as part of an "if" which would narrow the AND. If we already
4800 have done the AND in a wider mode, we can use a SUBREG of that
4803 if (flag_expensive_optimizations
&& ! src_related
4804 && GET_CODE (src
) == AND
&& CONST_INT_P (XEXP (src
, 1))
4805 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4808 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4810 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4811 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4812 tmode
= GET_MODE_WIDER_MODE (tmode
))
4814 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4815 struct table_elt
*larger_elt
;
4819 PUT_MODE (new_and
, tmode
);
4820 XEXP (new_and
, 0) = inner
;
4821 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4822 if (larger_elt
== 0)
4825 for (larger_elt
= larger_elt
->first_same_value
;
4826 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4827 if (REG_P (larger_elt
->exp
))
4830 = gen_lowpart (mode
, larger_elt
->exp
);
4840 #ifdef LOAD_EXTEND_OP
4841 /* See if a MEM has already been loaded with a widening operation;
4842 if it has, we can use a subreg of that. Many CISC machines
4843 also have such operations, but this is only likely to be
4844 beneficial on these machines. */
4846 if (flag_expensive_optimizations
&& src_related
== 0
4847 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
4848 && GET_MODE_CLASS (mode
) == MODE_INT
4849 && MEM_P (src
) && ! do_not_record
4850 && LOAD_EXTEND_OP (mode
) != UNKNOWN
)
4852 struct rtx_def memory_extend_buf
;
4853 rtx memory_extend_rtx
= &memory_extend_buf
;
4856 /* Set what we are trying to extend and the operation it might
4857 have been extended with. */
4858 memset (memory_extend_rtx
, 0, sizeof (*memory_extend_rtx
));
4859 PUT_CODE (memory_extend_rtx
, LOAD_EXTEND_OP (mode
));
4860 XEXP (memory_extend_rtx
, 0) = src
;
4862 for (tmode
= GET_MODE_WIDER_MODE (mode
);
4863 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
4864 tmode
= GET_MODE_WIDER_MODE (tmode
))
4866 struct table_elt
*larger_elt
;
4868 PUT_MODE (memory_extend_rtx
, tmode
);
4869 larger_elt
= lookup (memory_extend_rtx
,
4870 HASH (memory_extend_rtx
, tmode
), tmode
);
4871 if (larger_elt
== 0)
4874 for (larger_elt
= larger_elt
->first_same_value
;
4875 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4876 if (REG_P (larger_elt
->exp
))
4878 src_related
= gen_lowpart (mode
, larger_elt
->exp
);
4886 #endif /* LOAD_EXTEND_OP */
4888 /* Try to express the constant using a register+offset expression
4889 derived from a constant anchor. */
4891 if (targetm
.const_anchor
4894 && GET_CODE (src_const
) == CONST_INT
)
4896 src_related
= try_const_anchors (src_const
, mode
);
4897 src_related_is_const_anchor
= src_related
!= NULL_RTX
;
4901 if (src
== src_folded
)
4904 /* At this point, ELT, if nonzero, points to a class of expressions
4905 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4906 and SRC_RELATED, if nonzero, each contain additional equivalent
4907 expressions. Prune these latter expressions by deleting expressions
4908 already in the equivalence class.
4910 Check for an equivalent identical to the destination. If found,
4911 this is the preferred equivalent since it will likely lead to
4912 elimination of the insn. Indicate this by placing it in
4916 elt
= elt
->first_same_value
;
4917 for (p
= elt
; p
; p
= p
->next_same_value
)
4919 enum rtx_code code
= GET_CODE (p
->exp
);
4921 /* If the expression is not valid, ignore it. Then we do not
4922 have to check for validity below. In most cases, we can use
4923 `rtx_equal_p', since canonicalization has already been done. */
4924 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
4927 /* Also skip paradoxical subregs, unless that's what we're
4929 if (paradoxical_subreg_p (p
->exp
)
4931 && GET_CODE (src
) == SUBREG
4932 && GET_MODE (src
) == GET_MODE (p
->exp
)
4933 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
4934 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p
->exp
))))))
4937 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
4939 else if (src_folded
&& GET_CODE (src_folded
) == code
4940 && rtx_equal_p (src_folded
, p
->exp
))
4942 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
4943 && rtx_equal_p (src_eqv_here
, p
->exp
))
4945 else if (src_related
&& GET_CODE (src_related
) == code
4946 && rtx_equal_p (src_related
, p
->exp
))
4949 /* This is the same as the destination of the insns, we want
4950 to prefer it. Copy it to src_related. The code below will
4951 then give it a negative cost. */
4952 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
4956 /* Find the cheapest valid equivalent, trying all the available
4957 possibilities. Prefer items not in the hash table to ones
4958 that are when they are equal cost. Note that we can never
4959 worsen an insn as the current contents will also succeed.
4960 If we find an equivalent identical to the destination, use it as best,
4961 since this insn will probably be eliminated in that case. */
4964 if (rtx_equal_p (src
, dest
))
4965 src_cost
= src_regcost
= -1;
4968 src_cost
= COST (src
);
4969 src_regcost
= approx_reg_cost (src
);
4975 if (rtx_equal_p (src_eqv_here
, dest
))
4976 src_eqv_cost
= src_eqv_regcost
= -1;
4979 src_eqv_cost
= COST (src_eqv_here
);
4980 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
4986 if (rtx_equal_p (src_folded
, dest
))
4987 src_folded_cost
= src_folded_regcost
= -1;
4990 src_folded_cost
= COST (src_folded
);
4991 src_folded_regcost
= approx_reg_cost (src_folded
);
4997 if (rtx_equal_p (src_related
, dest
))
4998 src_related_cost
= src_related_regcost
= -1;
5001 src_related_cost
= COST (src_related
);
5002 src_related_regcost
= approx_reg_cost (src_related
);
5004 /* If a const-anchor is used to synthesize a constant that
5005 normally requires multiple instructions then slightly prefer
5006 it over the original sequence. These instructions are likely
5007 to become redundant now. We can't compare against the cost
5008 of src_eqv_here because, on MIPS for example, multi-insn
5009 constants have zero cost; they are assumed to be hoisted from
5011 if (src_related_is_const_anchor
5012 && src_related_cost
== src_cost
5018 /* If this was an indirect jump insn, a known label will really be
5019 cheaper even though it looks more expensive. */
5020 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
5021 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
5023 /* Terminate loop when replacement made. This must terminate since
5024 the current contents will be tested and will always be valid. */
5029 /* Skip invalid entries. */
5030 while (elt
&& !REG_P (elt
->exp
)
5031 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5032 elt
= elt
->next_same_value
;
5034 /* A paradoxical subreg would be bad here: it'll be the right
5035 size, but later may be adjusted so that the upper bits aren't
5036 what we want. So reject it. */
5038 && paradoxical_subreg_p (elt
->exp
)
5039 /* It is okay, though, if the rtx we're trying to match
5040 will ignore any of the bits we can't predict. */
5042 && GET_CODE (src
) == SUBREG
5043 && GET_MODE (src
) == GET_MODE (elt
->exp
)
5044 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
5045 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt
->exp
))))))
5047 elt
= elt
->next_same_value
;
5053 src_elt_cost
= elt
->cost
;
5054 src_elt_regcost
= elt
->regcost
;
5057 /* Find cheapest and skip it for the next time. For items
5058 of equal cost, use this order:
5059 src_folded, src, src_eqv, src_related and hash table entry. */
5061 && preferable (src_folded_cost
, src_folded_regcost
,
5062 src_cost
, src_regcost
) <= 0
5063 && preferable (src_folded_cost
, src_folded_regcost
,
5064 src_eqv_cost
, src_eqv_regcost
) <= 0
5065 && preferable (src_folded_cost
, src_folded_regcost
,
5066 src_related_cost
, src_related_regcost
) <= 0
5067 && preferable (src_folded_cost
, src_folded_regcost
,
5068 src_elt_cost
, src_elt_regcost
) <= 0)
5070 trial
= src_folded
, src_folded_cost
= MAX_COST
;
5071 if (src_folded_force_flag
)
5073 rtx forced
= force_const_mem (mode
, trial
);
5079 && preferable (src_cost
, src_regcost
,
5080 src_eqv_cost
, src_eqv_regcost
) <= 0
5081 && preferable (src_cost
, src_regcost
,
5082 src_related_cost
, src_related_regcost
) <= 0
5083 && preferable (src_cost
, src_regcost
,
5084 src_elt_cost
, src_elt_regcost
) <= 0)
5085 trial
= src
, src_cost
= MAX_COST
;
5086 else if (src_eqv_here
5087 && preferable (src_eqv_cost
, src_eqv_regcost
,
5088 src_related_cost
, src_related_regcost
) <= 0
5089 && preferable (src_eqv_cost
, src_eqv_regcost
,
5090 src_elt_cost
, src_elt_regcost
) <= 0)
5091 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
5092 else if (src_related
5093 && preferable (src_related_cost
, src_related_regcost
,
5094 src_elt_cost
, src_elt_regcost
) <= 0)
5095 trial
= src_related
, src_related_cost
= MAX_COST
;
5099 elt
= elt
->next_same_value
;
5100 src_elt_cost
= MAX_COST
;
5103 /* Avoid creation of overlapping memory moves. */
5104 if (MEM_P (trial
) && MEM_P (SET_DEST (sets
[i
].rtl
)))
5108 /* BLKmode moves are not handled by cse anyway. */
5109 if (GET_MODE (trial
) == BLKmode
)
5112 src
= canon_rtx (trial
);
5113 dest
= canon_rtx (SET_DEST (sets
[i
].rtl
));
5115 if (!MEM_P (src
) || !MEM_P (dest
)
5116 || !nonoverlapping_memrefs_p (src
, dest
, false))
5121 (set (reg:M N) (const_int A))
5122 (set (reg:M2 O) (const_int B))
5123 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5125 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5126 && CONST_INT_P (trial
)
5127 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5128 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5129 && REG_P (XEXP (SET_DEST (sets
[i
].rtl
), 0))
5130 && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets
[i
].rtl
)))
5131 >= INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1)))
5132 && ((unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5133 + (unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5134 <= HOST_BITS_PER_WIDE_INT
))
5136 rtx dest_reg
= XEXP (SET_DEST (sets
[i
].rtl
), 0);
5137 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5138 rtx pos
= XEXP (SET_DEST (sets
[i
].rtl
), 2);
5139 unsigned int dest_hash
= HASH (dest_reg
, GET_MODE (dest_reg
));
5140 struct table_elt
*dest_elt
5141 = lookup (dest_reg
, dest_hash
, GET_MODE (dest_reg
));
5142 rtx dest_cst
= NULL
;
5145 for (p
= dest_elt
->first_same_value
; p
; p
= p
->next_same_value
)
5146 if (p
->is_const
&& CONST_INT_P (p
->exp
))
5153 HOST_WIDE_INT val
= INTVAL (dest_cst
);
5156 if (BITS_BIG_ENDIAN
)
5157 shift
= GET_MODE_PRECISION (GET_MODE (dest_reg
))
5158 - INTVAL (pos
) - INTVAL (width
);
5160 shift
= INTVAL (pos
);
5161 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
5162 mask
= ~(HOST_WIDE_INT
) 0;
5164 mask
= ((HOST_WIDE_INT
) 1 << INTVAL (width
)) - 1;
5165 val
&= ~(mask
<< shift
);
5166 val
|= (INTVAL (trial
) & mask
) << shift
;
5167 val
= trunc_int_for_mode (val
, GET_MODE (dest_reg
));
5168 validate_unshare_change (insn
, &SET_DEST (sets
[i
].rtl
),
5170 validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5172 if (apply_change_group ())
5174 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5177 remove_note (insn
, note
);
5178 df_notes_rescan (insn
);
5182 src_eqv_volatile
= 0;
5183 src_eqv_in_memory
= 0;
5191 /* We don't normally have an insn matching (set (pc) (pc)), so
5192 check for this separately here. We will delete such an
5195 For other cases such as a table jump or conditional jump
5196 where we know the ultimate target, go ahead and replace the
5197 operand. While that may not make a valid insn, we will
5198 reemit the jump below (and also insert any necessary
5200 if (n_sets
== 1 && dest
== pc_rtx
5202 || (GET_CODE (trial
) == LABEL_REF
5203 && ! condjump_p (insn
))))
5205 /* Don't substitute non-local labels, this confuses CFG. */
5206 if (GET_CODE (trial
) == LABEL_REF
5207 && LABEL_REF_NONLOCAL_P (trial
))
5210 SET_SRC (sets
[i
].rtl
) = trial
;
5211 cse_jumps_altered
= true;
5215 /* Reject certain invalid forms of CONST that we create. */
5216 else if (CONSTANT_P (trial
)
5217 && GET_CODE (trial
) == CONST
5218 /* Reject cases that will cause decode_rtx_const to
5219 die. On the alpha when simplifying a switch, we
5220 get (const (truncate (minus (label_ref)
5222 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
5223 /* Likewise on IA-64, except without the
5225 || (GET_CODE (XEXP (trial
, 0)) == MINUS
5226 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
5227 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
5228 /* Do nothing for this case. */
5231 /* Look for a substitution that makes a valid insn. */
5232 else if (validate_unshare_change
5233 (insn
, &SET_SRC (sets
[i
].rtl
), trial
, 0))
5235 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
5237 /* The result of apply_change_group can be ignored; see
5240 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
5241 apply_change_group ();
5246 /* If we previously found constant pool entries for
5247 constants and this is a constant, try making a
5248 pool entry. Put it in src_folded unless we already have done
5249 this since that is where it likely came from. */
5251 else if (constant_pool_entries_cost
5252 && CONSTANT_P (trial
)
5254 || (!MEM_P (src_folded
)
5255 && ! src_folded_force_flag
))
5256 && GET_MODE_CLASS (mode
) != MODE_CC
5257 && mode
!= VOIDmode
)
5259 src_folded_force_flag
= 1;
5261 src_folded_cost
= constant_pool_entries_cost
;
5262 src_folded_regcost
= constant_pool_entries_regcost
;
5266 /* If we changed the insn too much, handle this set from scratch. */
5273 src
= SET_SRC (sets
[i
].rtl
);
5275 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5276 However, there is an important exception: If both are registers
5277 that are not the head of their equivalence class, replace SET_SRC
5278 with the head of the class. If we do not do this, we will have
5279 both registers live over a portion of the basic block. This way,
5280 their lifetimes will likely abut instead of overlapping. */
5282 && REGNO_QTY_VALID_P (REGNO (dest
)))
5284 int dest_q
= REG_QTY (REGNO (dest
));
5285 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
5287 if (dest_ent
->mode
== GET_MODE (dest
)
5288 && dest_ent
->first_reg
!= REGNO (dest
)
5289 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
5290 /* Don't do this if the original insn had a hard reg as
5291 SET_SRC or SET_DEST. */
5292 && (!REG_P (sets
[i
].src
)
5293 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
5294 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
5295 /* We can't call canon_reg here because it won't do anything if
5296 SRC is a hard register. */
5298 int src_q
= REG_QTY (REGNO (src
));
5299 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5300 int first
= src_ent
->first_reg
;
5302 = (first
>= FIRST_PSEUDO_REGISTER
5303 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
5305 /* We must use validate-change even for this, because this
5306 might be a special no-op instruction, suitable only to
5308 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
5311 /* If we had a constant that is cheaper than what we are now
5312 setting SRC to, use that constant. We ignored it when we
5313 thought we could make this into a no-op. */
5314 if (src_const
&& COST (src_const
) < COST (src
)
5315 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
5322 /* If we made a change, recompute SRC values. */
5323 if (src
!= sets
[i
].src
)
5326 hash_arg_in_memory
= 0;
5328 sets
[i
].src_hash
= HASH (src
, mode
);
5329 sets
[i
].src_volatile
= do_not_record
;
5330 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5331 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
5334 /* If this is a single SET, we are setting a register, and we have an
5335 equivalent constant, we want to add a REG_EQUAL note if the constant
5336 is different from the source. We don't want to do it for a constant
5337 pseudo since verifying that this pseudo hasn't been eliminated is a
5338 pain; moreover such a note won't help anything.
5340 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5341 which can be created for a reference to a compile time computable
5342 entry in a jump table. */
5346 && !REG_P (src_const
)
5347 && !(GET_CODE (src_const
) == SUBREG
5348 && REG_P (SUBREG_REG (src_const
)))
5349 && !(GET_CODE (src_const
) == CONST
5350 && GET_CODE (XEXP (src_const
, 0)) == MINUS
5351 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
5352 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
)
5353 && !rtx_equal_p (src
, src_const
))
5355 /* Make sure that the rtx is not shared. */
5356 src_const
= copy_rtx (src_const
);
5358 /* Record the actual constant value in a REG_EQUAL note,
5359 making a new one if one does not already exist. */
5360 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
5361 df_notes_rescan (insn
);
5364 /* Now deal with the destination. */
5367 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5368 while (GET_CODE (dest
) == SUBREG
5369 || GET_CODE (dest
) == ZERO_EXTRACT
5370 || GET_CODE (dest
) == STRICT_LOW_PART
)
5371 dest
= XEXP (dest
, 0);
5373 sets
[i
].inner_dest
= dest
;
5377 #ifdef PUSH_ROUNDING
5378 /* Stack pushes invalidate the stack pointer. */
5379 rtx addr
= XEXP (dest
, 0);
5380 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
5381 && XEXP (addr
, 0) == stack_pointer_rtx
)
5382 invalidate (stack_pointer_rtx
, VOIDmode
);
5384 dest
= fold_rtx (dest
, insn
);
5387 /* Compute the hash code of the destination now,
5388 before the effects of this instruction are recorded,
5389 since the register values used in the address computation
5390 are those before this instruction. */
5391 sets
[i
].dest_hash
= HASH (dest
, mode
);
5393 /* Don't enter a bit-field in the hash table
5394 because the value in it after the store
5395 may not equal what was stored, due to truncation. */
5397 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5399 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5401 if (src_const
!= 0 && CONST_INT_P (src_const
)
5402 && CONST_INT_P (width
)
5403 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5404 && ! (INTVAL (src_const
)
5405 & (HOST_WIDE_INT_M1U
<< INTVAL (width
))))
5406 /* Exception: if the value is constant,
5407 and it won't be truncated, record it. */
5411 /* This is chosen so that the destination will be invalidated
5412 but no new value will be recorded.
5413 We must invalidate because sometimes constant
5414 values can be recorded for bitfields. */
5415 sets
[i
].src_elt
= 0;
5416 sets
[i
].src_volatile
= 1;
5422 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5424 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5426 /* One less use of the label this insn used to jump to. */
5427 delete_insn_and_edges (insn
);
5428 cse_jumps_altered
= true;
5429 /* No more processing for this set. */
5433 /* If this SET is now setting PC to a label, we know it used to
5434 be a conditional or computed branch. */
5435 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5436 && !LABEL_REF_NONLOCAL_P (src
))
5438 /* We reemit the jump in as many cases as possible just in
5439 case the form of an unconditional jump is significantly
5440 different than a computed jump or conditional jump.
5442 If this insn has multiple sets, then reemitting the
5443 jump is nontrivial. So instead we just force rerecognition
5444 and hope for the best. */
5450 new_rtx
= emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
5451 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5452 LABEL_NUSES (XEXP (src
, 0))++;
5454 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5455 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5458 XEXP (note
, 1) = NULL_RTX
;
5459 REG_NOTES (new_rtx
) = note
;
5462 delete_insn_and_edges (insn
);
5466 INSN_CODE (insn
) = -1;
5468 /* Do not bother deleting any unreachable code, let jump do it. */
5469 cse_jumps_altered
= true;
5473 /* If destination is volatile, invalidate it and then do no further
5474 processing for this assignment. */
5476 else if (do_not_record
)
5478 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5479 invalidate (dest
, VOIDmode
);
5480 else if (MEM_P (dest
))
5481 invalidate (dest
, VOIDmode
);
5482 else if (GET_CODE (dest
) == STRICT_LOW_PART
5483 || GET_CODE (dest
) == ZERO_EXTRACT
)
5484 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5488 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5489 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5492 /* If setting CC0, record what it was set to, or a constant, if it
5493 is equivalent to a constant. If it is being set to a floating-point
5494 value, make a COMPARE with the appropriate constant of 0. If we
5495 don't do this, later code can interpret this as a test against
5496 const0_rtx, which can cause problems if we try to put it into an
5497 insn as a floating-point operand. */
5498 if (dest
== cc0_rtx
)
5500 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5501 this_insn_cc0_mode
= mode
;
5502 if (FLOAT_MODE_P (mode
))
5503 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5509 /* Now enter all non-volatile source expressions in the hash table
5510 if they are not already present.
5511 Record their equivalence classes in src_elt.
5512 This way we can insert the corresponding destinations into
5513 the same classes even if the actual sources are no longer in them
5514 (having been invalidated). */
5516 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5517 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5519 struct table_elt
*elt
;
5520 struct table_elt
*classp
= sets
[0].src_elt
;
5521 rtx dest
= SET_DEST (sets
[0].rtl
);
5522 machine_mode eqvmode
= GET_MODE (dest
);
5524 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5526 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5529 if (insert_regs (src_eqv
, classp
, 0))
5531 rehash_using_reg (src_eqv
);
5532 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5534 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5535 elt
->in_memory
= src_eqv_in_memory
;
5538 /* Check to see if src_eqv_elt is the same as a set source which
5539 does not yet have an elt, and if so set the elt of the set source
5541 for (i
= 0; i
< n_sets
; i
++)
5542 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5543 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5544 sets
[i
].src_elt
= src_eqv_elt
;
5547 for (i
= 0; i
< n_sets
; i
++)
5548 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5549 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5551 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5553 /* REG_EQUAL in setting a STRICT_LOW_PART
5554 gives an equivalent for the entire destination register,
5555 not just for the subreg being stored in now.
5556 This is a more interesting equivalence, so we arrange later
5557 to treat the entire reg as the destination. */
5558 sets
[i
].src_elt
= src_eqv_elt
;
5559 sets
[i
].src_hash
= src_eqv_hash
;
5563 /* Insert source and constant equivalent into hash table, if not
5565 struct table_elt
*classp
= src_eqv_elt
;
5566 rtx src
= sets
[i
].src
;
5567 rtx dest
= SET_DEST (sets
[i
].rtl
);
5569 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5571 /* It's possible that we have a source value known to be
5572 constant but don't have a REG_EQUAL note on the insn.
5573 Lack of a note will mean src_eqv_elt will be NULL. This
5574 can happen where we've generated a SUBREG to access a
5575 CONST_INT that is already in a register in a wider mode.
5576 Ensure that the source expression is put in the proper
5579 classp
= sets
[i
].src_const_elt
;
5581 if (sets
[i
].src_elt
== 0)
5583 struct table_elt
*elt
;
5585 /* Note that these insert_regs calls cannot remove
5586 any of the src_elt's, because they would have failed to
5587 match if not still valid. */
5588 if (insert_regs (src
, classp
, 0))
5590 rehash_using_reg (src
);
5591 sets
[i
].src_hash
= HASH (src
, mode
);
5593 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5594 elt
->in_memory
= sets
[i
].src_in_memory
;
5595 sets
[i
].src_elt
= classp
= elt
;
5597 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5598 && src
!= sets
[i
].src_const
5599 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5600 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5601 sets
[i
].src_const_hash
, mode
);
5604 else if (sets
[i
].src_elt
== 0)
5605 /* If we did not insert the source into the hash table (e.g., it was
5606 volatile), note the equivalence class for the REG_EQUAL value, if any,
5607 so that the destination goes into that class. */
5608 sets
[i
].src_elt
= src_eqv_elt
;
5610 /* Record destination addresses in the hash table. This allows us to
5611 check if they are invalidated by other sets. */
5612 for (i
= 0; i
< n_sets
; i
++)
5616 rtx x
= sets
[i
].inner_dest
;
5617 struct table_elt
*elt
;
5624 mode
= GET_MODE (x
);
5625 hash
= HASH (x
, mode
);
5626 elt
= lookup (x
, hash
, mode
);
5629 if (insert_regs (x
, NULL
, 0))
5631 rtx dest
= SET_DEST (sets
[i
].rtl
);
5633 rehash_using_reg (x
);
5634 hash
= HASH (x
, mode
);
5635 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5637 elt
= insert (x
, NULL
, hash
, mode
);
5640 sets
[i
].dest_addr_elt
= elt
;
5643 sets
[i
].dest_addr_elt
= NULL
;
5647 invalidate_from_clobbers (insn
);
5649 /* Some registers are invalidated by subroutine calls. Memory is
5650 invalidated by non-constant calls. */
5654 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5655 invalidate_memory ();
5656 invalidate_for_call ();
5659 /* Now invalidate everything set by this instruction.
5660 If a SUBREG or other funny destination is being set,
5661 sets[i].rtl is still nonzero, so here we invalidate the reg
5662 a part of which is being set. */
5664 for (i
= 0; i
< n_sets
; i
++)
5667 /* We can't use the inner dest, because the mode associated with
5668 a ZERO_EXTRACT is significant. */
5669 rtx dest
= SET_DEST (sets
[i
].rtl
);
5671 /* Needed for registers to remove the register from its
5672 previous quantity's chain.
5673 Needed for memory if this is a nonvarying address, unless
5674 we have just done an invalidate_memory that covers even those. */
5675 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5676 invalidate (dest
, VOIDmode
);
5677 else if (MEM_P (dest
))
5678 invalidate (dest
, VOIDmode
);
5679 else if (GET_CODE (dest
) == STRICT_LOW_PART
5680 || GET_CODE (dest
) == ZERO_EXTRACT
)
5681 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5684 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5685 the regs restored by the longjmp come from a later time
5687 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5689 flush_hash_table ();
5693 /* Make sure registers mentioned in destinations
5694 are safe for use in an expression to be inserted.
5695 This removes from the hash table
5696 any invalid entry that refers to one of these registers.
5698 We don't care about the return value from mention_regs because
5699 we are going to hash the SET_DEST values unconditionally. */
5701 for (i
= 0; i
< n_sets
; i
++)
5705 rtx x
= SET_DEST (sets
[i
].rtl
);
5711 /* We used to rely on all references to a register becoming
5712 inaccessible when a register changes to a new quantity,
5713 since that changes the hash code. However, that is not
5714 safe, since after HASH_SIZE new quantities we get a
5715 hash 'collision' of a register with its own invalid
5716 entries. And since SUBREGs have been changed not to
5717 change their hash code with the hash code of the register,
5718 it wouldn't work any longer at all. So we have to check
5719 for any invalid references lying around now.
5720 This code is similar to the REG case in mention_regs,
5721 but it knows that reg_tick has been incremented, and
5722 it leaves reg_in_table as -1 . */
5723 unsigned int regno
= REGNO (x
);
5724 unsigned int endregno
= END_REGNO (x
);
5727 for (i
= regno
; i
< endregno
; i
++)
5729 if (REG_IN_TABLE (i
) >= 0)
5731 remove_invalid_refs (i
);
5732 REG_IN_TABLE (i
) = -1;
5739 /* We may have just removed some of the src_elt's from the hash table.
5740 So replace each one with the current head of the same class.
5741 Also check if destination addresses have been removed. */
5743 for (i
= 0; i
< n_sets
; i
++)
5746 if (sets
[i
].dest_addr_elt
5747 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5749 /* The elt was removed, which means this destination is not
5750 valid after this instruction. */
5751 sets
[i
].rtl
= NULL_RTX
;
5753 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5754 /* If elt was removed, find current head of same class,
5755 or 0 if nothing remains of that class. */
5757 struct table_elt
*elt
= sets
[i
].src_elt
;
5759 while (elt
&& elt
->prev_same_value
)
5760 elt
= elt
->prev_same_value
;
5762 while (elt
&& elt
->first_same_value
== 0)
5763 elt
= elt
->next_same_value
;
5764 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5768 /* Now insert the destinations into their equivalence classes. */
5770 for (i
= 0; i
< n_sets
; i
++)
5773 rtx dest
= SET_DEST (sets
[i
].rtl
);
5774 struct table_elt
*elt
;
5776 /* Don't record value if we are not supposed to risk allocating
5777 floating-point values in registers that might be wider than
5779 if ((flag_float_store
5781 && FLOAT_MODE_P (GET_MODE (dest
)))
5782 /* Don't record BLKmode values, because we don't know the
5783 size of it, and can't be sure that other BLKmode values
5784 have the same or smaller size. */
5785 || GET_MODE (dest
) == BLKmode
5786 /* If we didn't put a REG_EQUAL value or a source into the hash
5787 table, there is no point is recording DEST. */
5788 || sets
[i
].src_elt
== 0
5789 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5790 or SIGN_EXTEND, don't record DEST since it can cause
5791 some tracking to be wrong.
5793 ??? Think about this more later. */
5794 || (paradoxical_subreg_p (dest
)
5795 && (GET_CODE (sets
[i
].src
) == SIGN_EXTEND
5796 || GET_CODE (sets
[i
].src
) == ZERO_EXTEND
)))
5799 /* STRICT_LOW_PART isn't part of the value BEING set,
5800 and neither is the SUBREG inside it.
5801 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5802 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5803 dest
= SUBREG_REG (XEXP (dest
, 0));
5805 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5806 /* Registers must also be inserted into chains for quantities. */
5807 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5809 /* If `insert_regs' changes something, the hash code must be
5811 rehash_using_reg (dest
);
5812 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5815 elt
= insert (dest
, sets
[i
].src_elt
,
5816 sets
[i
].dest_hash
, GET_MODE (dest
));
5818 /* If this is a constant, insert the constant anchors with the
5819 equivalent register-offset expressions using register DEST. */
5820 if (targetm
.const_anchor
5822 && SCALAR_INT_MODE_P (GET_MODE (dest
))
5823 && GET_CODE (sets
[i
].src_elt
->exp
) == CONST_INT
)
5824 insert_const_anchors (dest
, sets
[i
].src_elt
->exp
, GET_MODE (dest
));
5826 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
5827 && !MEM_READONLY_P (sets
[i
].inner_dest
));
5829 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5830 narrower than M2, and both M1 and M2 are the same number of words,
5831 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5832 make that equivalence as well.
5834 However, BAR may have equivalences for which gen_lowpart
5835 will produce a simpler value than gen_lowpart applied to
5836 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5837 BAR's equivalences. If we don't get a simplified form, make
5838 the SUBREG. It will not be used in an equivalence, but will
5839 cause two similar assignments to be detected.
5841 Note the loop below will find SUBREG_REG (DEST) since we have
5842 already entered SRC and DEST of the SET in the table. */
5844 if (GET_CODE (dest
) == SUBREG
5845 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1)
5847 == (GET_MODE_SIZE (GET_MODE (dest
)) - 1) / UNITS_PER_WORD
)
5848 && (GET_MODE_SIZE (GET_MODE (dest
))
5849 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
5850 && sets
[i
].src_elt
!= 0)
5852 machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
5853 struct table_elt
*elt
, *classp
= 0;
5855 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
5856 elt
= elt
->next_same_value
)
5860 struct table_elt
*src_elt
;
5863 /* Ignore invalid entries. */
5864 if (!REG_P (elt
->exp
)
5865 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5868 /* We may have already been playing subreg games. If the
5869 mode is already correct for the destination, use it. */
5870 if (GET_MODE (elt
->exp
) == new_mode
)
5874 /* Calculate big endian correction for the SUBREG_BYTE.
5875 We have already checked that M1 (GET_MODE (dest))
5876 is not narrower than M2 (new_mode). */
5877 if (BYTES_BIG_ENDIAN
)
5878 byte
= (GET_MODE_SIZE (GET_MODE (dest
))
5879 - GET_MODE_SIZE (new_mode
));
5881 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
5882 GET_MODE (dest
), byte
);
5885 /* The call to simplify_gen_subreg fails if the value
5886 is VOIDmode, yet we can't do any simplification, e.g.
5887 for EXPR_LISTs denoting function call results.
5888 It is invalid to construct a SUBREG with a VOIDmode
5889 SUBREG_REG, hence a zero new_src means we can't do
5890 this substitution. */
5894 src_hash
= HASH (new_src
, new_mode
);
5895 src_elt
= lookup (new_src
, src_hash
, new_mode
);
5897 /* Put the new source in the hash table is if isn't
5901 if (insert_regs (new_src
, classp
, 0))
5903 rehash_using_reg (new_src
);
5904 src_hash
= HASH (new_src
, new_mode
);
5906 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
5907 src_elt
->in_memory
= elt
->in_memory
;
5909 else if (classp
&& classp
!= src_elt
->first_same_value
)
5910 /* Show that two things that we've seen before are
5911 actually the same. */
5912 merge_equiv_classes (src_elt
, classp
);
5914 classp
= src_elt
->first_same_value
;
5915 /* Ignore invalid entries. */
5917 && !REG_P (classp
->exp
)
5918 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
5919 classp
= classp
->next_same_value
;
5924 /* Special handling for (set REG0 REG1) where REG0 is the
5925 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5926 be used in the sequel, so (if easily done) change this insn to
5927 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5928 that computed their value. Then REG1 will become a dead store
5929 and won't cloud the situation for later optimizations.
5931 Do not make this change if REG1 is a hard register, because it will
5932 then be used in the sequel and we may be changing a two-operand insn
5933 into a three-operand insn.
5935 Also do not do this if we are operating on a copy of INSN. */
5937 if (n_sets
== 1 && sets
[0].rtl
)
5938 try_back_substitute_reg (sets
[0].rtl
, insn
);
5943 /* Remove from the hash table all expressions that reference memory. */
5946 invalidate_memory (void)
5949 struct table_elt
*p
, *next
;
5951 for (i
= 0; i
< HASH_SIZE
; i
++)
5952 for (p
= table
[i
]; p
; p
= next
)
5954 next
= p
->next_same_hash
;
5956 remove_from_table (p
, i
);
5960 /* Perform invalidation on the basis of everything about INSN,
5961 except for invalidating the actual places that are SET in it.
5962 This includes the places CLOBBERed, and anything that might
5963 alias with something that is SET or CLOBBERed. */
5966 invalidate_from_clobbers (rtx_insn
*insn
)
5968 rtx x
= PATTERN (insn
);
5970 if (GET_CODE (x
) == CLOBBER
)
5972 rtx ref
= XEXP (x
, 0);
5975 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
5977 invalidate (ref
, VOIDmode
);
5978 else if (GET_CODE (ref
) == STRICT_LOW_PART
5979 || GET_CODE (ref
) == ZERO_EXTRACT
)
5980 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
5983 else if (GET_CODE (x
) == PARALLEL
)
5986 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
5988 rtx y
= XVECEXP (x
, 0, i
);
5989 if (GET_CODE (y
) == CLOBBER
)
5991 rtx ref
= XEXP (y
, 0);
5992 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
5994 invalidate (ref
, VOIDmode
);
5995 else if (GET_CODE (ref
) == STRICT_LOW_PART
5996 || GET_CODE (ref
) == ZERO_EXTRACT
)
5997 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6003 /* Perform invalidation on the basis of everything about INSN.
6004 This includes the places CLOBBERed, and anything that might
6005 alias with something that is SET or CLOBBERed. */
6008 invalidate_from_sets_and_clobbers (rtx_insn
*insn
)
6011 rtx x
= PATTERN (insn
);
6015 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6016 if (GET_CODE (XEXP (tem
, 0)) == CLOBBER
)
6017 invalidate (SET_DEST (XEXP (tem
, 0)), VOIDmode
);
6020 /* Ensure we invalidate the destination register of a CALL insn.
6021 This is necessary for machines where this register is a fixed_reg,
6022 because no other code would invalidate it. */
6023 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
6024 invalidate (SET_DEST (x
), VOIDmode
);
6026 else if (GET_CODE (x
) == PARALLEL
)
6030 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6032 rtx y
= XVECEXP (x
, 0, i
);
6033 if (GET_CODE (y
) == CLOBBER
)
6035 rtx clobbered
= XEXP (y
, 0);
6037 if (REG_P (clobbered
)
6038 || GET_CODE (clobbered
) == SUBREG
)
6039 invalidate (clobbered
, VOIDmode
);
6040 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
6041 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
6042 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
6044 else if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
6045 invalidate (SET_DEST (y
), VOIDmode
);
6050 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6051 and replace any registers in them with either an equivalent constant
6052 or the canonical form of the register. If we are inside an address,
6053 only do this if the address remains valid.
6055 OBJECT is 0 except when within a MEM in which case it is the MEM.
6057 Return the replacement for X. */
6060 cse_process_notes_1 (rtx x
, rtx object
, bool *changed
)
6062 enum rtx_code code
= GET_CODE (x
);
6063 const char *fmt
= GET_RTX_FORMAT (code
);
6078 validate_change (x
, &XEXP (x
, 0),
6079 cse_process_notes (XEXP (x
, 0), x
, changed
), 0);
6083 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
6084 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
, changed
);
6090 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
, changed
);
6097 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6098 /* We don't substitute VOIDmode constants into these rtx,
6099 since they would impede folding. */
6100 if (GET_MODE (new_rtx
) != VOIDmode
)
6101 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6105 case UNSIGNED_FLOAT
:
6107 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6108 /* We don't substitute negative VOIDmode constants into these rtx,
6109 since they would impede folding. */
6110 if (GET_MODE (new_rtx
) != VOIDmode
6111 || (CONST_INT_P (new_rtx
) && INTVAL (new_rtx
) >= 0)
6112 || (CONST_DOUBLE_P (new_rtx
) && CONST_DOUBLE_HIGH (new_rtx
) >= 0))
6113 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6118 i
= REG_QTY (REGNO (x
));
6120 /* Return a constant or a constant register. */
6121 if (REGNO_QTY_VALID_P (REGNO (x
)))
6123 struct qty_table_elem
*ent
= &qty_table
[i
];
6125 if (ent
->const_rtx
!= NULL_RTX
6126 && (CONSTANT_P (ent
->const_rtx
)
6127 || REG_P (ent
->const_rtx
)))
6129 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
6131 return copy_rtx (new_rtx
);
6135 /* Otherwise, canonicalize this register. */
6136 return canon_reg (x
, NULL
);
6142 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6144 validate_change (object
, &XEXP (x
, i
),
6145 cse_process_notes (XEXP (x
, i
), object
, changed
), 0);
6151 cse_process_notes (rtx x
, rtx object
, bool *changed
)
6153 rtx new_rtx
= cse_process_notes_1 (x
, object
, changed
);
6160 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6162 DATA is a pointer to a struct cse_basic_block_data, that is used to
6164 It is filled with a queue of basic blocks, starting with FIRST_BB
6165 and following a trace through the CFG.
6167 If all paths starting at FIRST_BB have been followed, or no new path
6168 starting at FIRST_BB can be constructed, this function returns FALSE.
6169 Otherwise, DATA->path is filled and the function returns TRUE indicating
6170 that a path to follow was found.
6172 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6173 block in the path will be FIRST_BB. */
6176 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
6183 bitmap_set_bit (cse_visited_basic_blocks
, first_bb
->index
);
6185 /* See if there is a previous path. */
6186 path_size
= data
->path_size
;
6188 /* There is a previous path. Make sure it started with FIRST_BB. */
6190 gcc_assert (data
->path
[0].bb
== first_bb
);
6192 /* There was only one basic block in the last path. Clear the path and
6193 return, so that paths starting at another basic block can be tried. */
6200 /* If the path was empty from the beginning, construct a new path. */
6202 data
->path
[path_size
++].bb
= first_bb
;
6205 /* Otherwise, path_size must be equal to or greater than 2, because
6206 a previous path exists that is at least two basic blocks long.
6208 Update the previous branch path, if any. If the last branch was
6209 previously along the branch edge, take the fallthrough edge now. */
6210 while (path_size
>= 2)
6212 basic_block last_bb_in_path
, previous_bb_in_path
;
6216 last_bb_in_path
= data
->path
[path_size
].bb
;
6217 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
6219 /* If we previously followed a path along the branch edge, try
6220 the fallthru edge now. */
6221 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
6222 && any_condjump_p (BB_END (previous_bb_in_path
))
6223 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
6224 && e
== BRANCH_EDGE (previous_bb_in_path
))
6226 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
6227 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6228 && single_pred_p (bb
)
6229 /* We used to assert here that we would only see blocks
6230 that we have not visited yet. But we may end up
6231 visiting basic blocks twice if the CFG has changed
6232 in this run of cse_main, because when the CFG changes
6233 the topological sort of the CFG also changes. A basic
6234 blocks that previously had more than two predecessors
6235 may now have a single predecessor, and become part of
6236 a path that starts at another basic block.
6238 We still want to visit each basic block only once, so
6239 halt the path here if we have already visited BB. */
6240 && !bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
))
6242 bitmap_set_bit (cse_visited_basic_blocks
, bb
->index
);
6243 data
->path
[path_size
++].bb
= bb
;
6248 data
->path
[path_size
].bb
= NULL
;
6251 /* If only one block remains in the path, bail. */
6259 /* Extend the path if possible. */
6262 bb
= data
->path
[path_size
- 1].bb
;
6263 while (bb
&& path_size
< PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
))
6265 if (single_succ_p (bb
))
6266 e
= single_succ_edge (bb
);
6267 else if (EDGE_COUNT (bb
->succs
) == 2
6268 && any_condjump_p (BB_END (bb
)))
6270 /* First try to follow the branch. If that doesn't lead
6271 to a useful path, follow the fallthru edge. */
6272 e
= BRANCH_EDGE (bb
);
6273 if (!single_pred_p (e
->dest
))
6274 e
= FALLTHRU_EDGE (bb
);
6280 && !((e
->flags
& EDGE_ABNORMAL_CALL
) && cfun
->has_nonlocal_label
)
6281 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6282 && single_pred_p (e
->dest
)
6283 /* Avoid visiting basic blocks twice. The large comment
6284 above explains why this can happen. */
6285 && !bitmap_bit_p (cse_visited_basic_blocks
, e
->dest
->index
))
6287 basic_block bb2
= e
->dest
;
6288 bitmap_set_bit (cse_visited_basic_blocks
, bb2
->index
);
6289 data
->path
[path_size
++].bb
= bb2
;
6298 data
->path_size
= path_size
;
6299 return path_size
!= 0;
6302 /* Dump the path in DATA to file F. NSETS is the number of sets
6306 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
6310 fprintf (f
, ";; Following path with %d sets: ", nsets
);
6311 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
6312 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
6313 fputc ('\n', dump_file
);
6318 /* Return true if BB has exception handling successor edges. */
6321 have_eh_succ_edges (basic_block bb
)
6326 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6327 if (e
->flags
& EDGE_EH
)
6334 /* Scan to the end of the path described by DATA. Return an estimate of
6335 the total number of SETs of all insns in the path. */
6338 cse_prescan_path (struct cse_basic_block_data
*data
)
6341 int path_size
= data
->path_size
;
6344 /* Scan to end of each basic block in the path. */
6345 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6350 bb
= data
->path
[path_entry
].bb
;
6352 FOR_BB_INSNS (bb
, insn
)
6357 /* A PARALLEL can have lots of SETs in it,
6358 especially if it is really an ASM_OPERANDS. */
6359 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6360 nsets
+= XVECLEN (PATTERN (insn
), 0);
6366 data
->nsets
= nsets
;
6369 /* Return true if the pattern of INSN uses a LABEL_REF for which
6370 there isn't a REG_LABEL_OPERAND note. */
6373 check_for_label_ref (rtx_insn
*insn
)
6375 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6376 note for it, we must rerun jump since it needs to place the note. If
6377 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6378 don't do this since no REG_LABEL_OPERAND will be added. */
6379 subrtx_iterator::array_type array
;
6380 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
6382 const_rtx x
= *iter
;
6383 if (GET_CODE (x
) == LABEL_REF
6384 && !LABEL_REF_NONLOCAL_P (x
)
6386 || !label_is_jump_target_p (LABEL_REF_LABEL (x
), insn
))
6387 && LABEL_P (LABEL_REF_LABEL (x
))
6388 && INSN_UID (LABEL_REF_LABEL (x
)) != 0
6389 && !find_reg_note (insn
, REG_LABEL_OPERAND
, LABEL_REF_LABEL (x
)))
6395 /* Process a single extended basic block described by EBB_DATA. */
6398 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
6400 int path_size
= ebb_data
->path_size
;
6404 /* Allocate the space needed by qty_table. */
6405 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
6408 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
6409 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
6410 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6415 bb
= ebb_data
->path
[path_entry
].bb
;
6417 /* Invalidate recorded information for eh regs if there is an EH
6418 edge pointing to that bb. */
6419 if (bb_has_eh_pred (bb
))
6423 FOR_EACH_ARTIFICIAL_DEF (def
, bb
->index
)
6424 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
6425 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6428 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6429 FOR_BB_INSNS (bb
, insn
)
6431 /* If we have processed 1,000 insns, flush the hash table to
6432 avoid extreme quadratic behavior. We must not include NOTEs
6433 in the count since there may be more of them when generating
6434 debugging information. If we clear the table at different
6435 times, code generated with -g -O might be different than code
6436 generated with -O but not -g.
6438 FIXME: This is a real kludge and needs to be done some other
6440 if (NONDEBUG_INSN_P (insn
)
6441 && num_insns
++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS
))
6443 flush_hash_table ();
6449 /* Process notes first so we have all notes in canonical forms
6450 when looking for duplicate operations. */
6451 if (REG_NOTES (insn
))
6453 bool changed
= false;
6454 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
),
6455 NULL_RTX
, &changed
);
6457 df_notes_rescan (insn
);
6462 /* If we haven't already found an insn where we added a LABEL_REF,
6464 if (INSN_P (insn
) && !recorded_label_ref
6465 && check_for_label_ref (insn
))
6466 recorded_label_ref
= true;
6469 if (NONDEBUG_INSN_P (insn
))
6471 /* If the previous insn sets CC0 and this insn no
6472 longer references CC0, delete the previous insn.
6473 Here we use fact that nothing expects CC0 to be
6474 valid over an insn, which is true until the final
6476 rtx_insn
*prev_insn
;
6479 prev_insn
= prev_nonnote_nondebug_insn (insn
);
6480 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6481 && (tem
= single_set (prev_insn
)) != NULL_RTX
6482 && SET_DEST (tem
) == cc0_rtx
6483 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6484 delete_insn (prev_insn
);
6486 /* If this insn is not the last insn in the basic
6487 block, it will be PREV_INSN(insn) in the next
6488 iteration. If we recorded any CC0-related
6489 information for this insn, remember it. */
6490 if (insn
!= BB_END (bb
))
6492 prev_insn_cc0
= this_insn_cc0
;
6493 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6500 /* With non-call exceptions, we are not always able to update
6501 the CFG properly inside cse_insn. So clean up possibly
6502 redundant EH edges here. */
6503 if (cfun
->can_throw_non_call_exceptions
&& have_eh_succ_edges (bb
))
6504 cse_cfg_altered
|= purge_dead_edges (bb
);
6506 /* If we changed a conditional jump, we may have terminated
6507 the path we are following. Check that by verifying that
6508 the edge we would take still exists. If the edge does
6509 not exist anymore, purge the remainder of the path.
6510 Note that this will cause us to return to the caller. */
6511 if (path_entry
< path_size
- 1)
6513 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6514 if (!find_edge (bb
, next_bb
))
6520 /* If we truncate the path, we must also reset the
6521 visited bit on the remaining blocks in the path,
6522 or we will never visit them at all. */
6523 bitmap_clear_bit (cse_visited_basic_blocks
,
6524 ebb_data
->path
[path_size
].bb
->index
);
6525 ebb_data
->path
[path_size
].bb
= NULL
;
6527 while (path_size
- 1 != path_entry
);
6528 ebb_data
->path_size
= path_size
;
6532 /* If this is a conditional jump insn, record any known
6533 equivalences due to the condition being tested. */
6535 if (path_entry
< path_size
- 1
6537 && single_set (insn
)
6538 && any_condjump_p (insn
))
6540 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6541 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6542 record_jump_equiv (insn
, taken
);
6546 /* Clear the CC0-tracking related insns, they can't provide
6547 useful information across basic block boundaries. */
6552 gcc_assert (next_qty
<= max_qty
);
6558 /* Perform cse on the instructions of a function.
6559 F is the first instruction.
6560 NREGS is one plus the highest pseudo-reg number used in the instruction.
6562 Return 2 if jump optimizations should be redone due to simplifications
6563 in conditional jump instructions.
6564 Return 1 if the CFG should be cleaned up because it has been modified.
6565 Return 0 otherwise. */
6568 cse_main (rtx_insn
*f ATTRIBUTE_UNUSED
, int nregs
)
6570 struct cse_basic_block_data ebb_data
;
6572 int *rc_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
6575 df_set_flags (DF_LR_RUN_DCE
);
6576 df_note_add_problem ();
6578 df_set_flags (DF_DEFER_INSN_RESCAN
);
6580 reg_scan (get_insns (), max_reg_num ());
6581 init_cse_reg_info (nregs
);
6583 ebb_data
.path
= XNEWVEC (struct branch_path
,
6584 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
));
6586 cse_cfg_altered
= false;
6587 cse_jumps_altered
= false;
6588 recorded_label_ref
= false;
6589 constant_pool_entries_cost
= 0;
6590 constant_pool_entries_regcost
= 0;
6591 ebb_data
.path_size
= 0;
6593 rtl_hooks
= cse_rtl_hooks
;
6596 init_alias_analysis ();
6598 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6600 /* Set up the table of already visited basic blocks. */
6601 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6602 bitmap_clear (cse_visited_basic_blocks
);
6604 /* Loop over basic blocks in reverse completion order (RPO),
6605 excluding the ENTRY and EXIT blocks. */
6606 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6608 while (i
< n_blocks
)
6610 /* Find the first block in the RPO queue that we have not yet
6611 processed before. */
6614 bb
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[i
++]);
6616 while (bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
)
6619 /* Find all paths starting with BB, and process them. */
6620 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6622 /* Pre-scan the path. */
6623 cse_prescan_path (&ebb_data
);
6625 /* If this basic block has no sets, skip it. */
6626 if (ebb_data
.nsets
== 0)
6629 /* Get a reasonable estimate for the maximum number of qty's
6630 needed for this path. For this, we take the number of sets
6631 and multiply that by MAX_RECOG_OPERANDS. */
6632 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6634 /* Dump the path we're about to process. */
6636 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6638 cse_extended_basic_block (&ebb_data
);
6643 end_alias_analysis ();
6644 free (reg_eqv_table
);
6645 free (ebb_data
.path
);
6646 sbitmap_free (cse_visited_basic_blocks
);
6648 rtl_hooks
= general_rtl_hooks
;
6650 if (cse_jumps_altered
|| recorded_label_ref
)
6652 else if (cse_cfg_altered
)
6658 /* Count the number of times registers are used (not set) in X.
6659 COUNTS is an array in which we accumulate the count, INCR is how much
6660 we count each register usage.
6662 Don't count a usage of DEST, which is the SET_DEST of a SET which
6663 contains X in its SET_SRC. This is because such a SET does not
6664 modify the liveness of DEST.
6665 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6666 We must then count uses of a SET_DEST regardless, because the insn can't be
6670 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6680 switch (code
= GET_CODE (x
))
6684 counts
[REGNO (x
)] += incr
;
6696 /* If we are clobbering a MEM, mark any registers inside the address
6698 if (MEM_P (XEXP (x
, 0)))
6699 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6703 /* Unless we are setting a REG, count everything in SET_DEST. */
6704 if (!REG_P (SET_DEST (x
)))
6705 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6706 count_reg_usage (SET_SRC (x
), counts
,
6707 dest
? dest
: SET_DEST (x
),
6717 /* We expect dest to be NULL_RTX here. If the insn may throw,
6718 or if it cannot be deleted due to side-effects, mark this fact
6719 by setting DEST to pc_rtx. */
6720 if ((!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (x
))
6721 || side_effects_p (PATTERN (x
)))
6723 if (code
== CALL_INSN
)
6724 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6725 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6727 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6730 note
= find_reg_equal_equiv_note (x
);
6733 rtx eqv
= XEXP (note
, 0);
6735 if (GET_CODE (eqv
) == EXPR_LIST
)
6736 /* This REG_EQUAL note describes the result of a function call.
6737 Process all the arguments. */
6740 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6741 eqv
= XEXP (eqv
, 1);
6743 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6745 count_reg_usage (eqv
, counts
, dest
, incr
);
6750 if (REG_NOTE_KIND (x
) == REG_EQUAL
6751 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6752 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6753 involving registers in the address. */
6754 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6755 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6757 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6761 /* Iterate over just the inputs, not the constraints as well. */
6762 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6763 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6774 fmt
= GET_RTX_FORMAT (code
);
6775 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6778 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6779 else if (fmt
[i
] == 'E')
6780 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6781 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6785 /* Return true if X is a dead register. */
6788 is_dead_reg (const_rtx x
, int *counts
)
6791 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6792 && counts
[REGNO (x
)] == 0);
6795 /* Return true if set is live. */
6797 set_live_p (rtx set
, rtx_insn
*insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6804 if (set_noop_p (set
))
6808 else if (GET_CODE (SET_DEST (set
)) == CC0
6809 && !side_effects_p (SET_SRC (set
))
6810 && ((tem
= next_nonnote_nondebug_insn (insn
)) == NULL_RTX
6812 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6815 else if (!is_dead_reg (SET_DEST (set
), counts
)
6816 || side_effects_p (SET_SRC (set
)))
6821 /* Return true if insn is live. */
6824 insn_live_p (rtx_insn
*insn
, int *counts
)
6827 if (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
6829 else if (GET_CODE (PATTERN (insn
)) == SET
)
6830 return set_live_p (PATTERN (insn
), insn
, counts
);
6831 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6833 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6835 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6837 if (GET_CODE (elt
) == SET
)
6839 if (set_live_p (elt
, insn
, counts
))
6842 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6847 else if (DEBUG_INSN_P (insn
))
6851 for (next
= NEXT_INSN (insn
); next
; next
= NEXT_INSN (next
))
6854 else if (!DEBUG_INSN_P (next
))
6856 else if (INSN_VAR_LOCATION_DECL (insn
) == INSN_VAR_LOCATION_DECL (next
))
6865 /* Count the number of stores into pseudo. Callback for note_stores. */
6868 count_stores (rtx x
, const_rtx set ATTRIBUTE_UNUSED
, void *data
)
6870 int *counts
= (int *) data
;
6871 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
6872 counts
[REGNO (x
)]++;
6875 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6876 pseudo doesn't have a replacement. COUNTS[X] is zero if register X
6877 is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6878 Set *SEEN_REPL to true if we see a dead register that does have
6882 is_dead_debug_insn (const_rtx pat
, int *counts
, rtx
*replacements
,
6885 subrtx_iterator::array_type array
;
6886 FOR_EACH_SUBRTX (iter
, array
, pat
, NONCONST
)
6888 const_rtx x
= *iter
;
6889 if (is_dead_reg (x
, counts
))
6891 if (replacements
&& replacements
[REGNO (x
)] != NULL_RTX
)
6900 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6901 Callback for simplify_replace_fn_rtx. */
6904 replace_dead_reg (rtx x
, const_rtx old_rtx ATTRIBUTE_UNUSED
, void *data
)
6906 rtx
*replacements
= (rtx
*) data
;
6909 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6910 && replacements
[REGNO (x
)] != NULL_RTX
)
6912 if (GET_MODE (x
) == GET_MODE (replacements
[REGNO (x
)]))
6913 return replacements
[REGNO (x
)];
6914 return lowpart_subreg (GET_MODE (x
), replacements
[REGNO (x
)],
6915 GET_MODE (replacements
[REGNO (x
)]));
6920 /* Scan all the insns and delete any that are dead; i.e., they store a register
6921 that is never used or they copy a register to itself.
6923 This is used to remove insns made obviously dead by cse, loop or other
6924 optimizations. It improves the heuristics in loop since it won't try to
6925 move dead invariants out of loops or make givs for dead quantities. The
6926 remaining passes of the compilation are also sped up. */
6929 delete_trivially_dead_insns (rtx_insn
*insns
, int nreg
)
6932 rtx_insn
*insn
, *prev
;
6933 rtx
*replacements
= NULL
;
6936 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
6937 /* First count the number of times each register is used. */
6938 if (MAY_HAVE_DEBUG_INSNS
)
6940 counts
= XCNEWVEC (int, nreg
* 3);
6941 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6942 if (DEBUG_INSN_P (insn
))
6943 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
6945 else if (INSN_P (insn
))
6947 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
6948 note_stores (PATTERN (insn
), count_stores
, counts
+ nreg
* 2);
6950 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
6951 First one counts how many times each pseudo is used outside
6952 of debug insns, second counts how many times each pseudo is
6953 used in debug insns and third counts how many times a pseudo
6958 counts
= XCNEWVEC (int, nreg
);
6959 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6961 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
6962 /* If no debug insns can be present, COUNTS is just an array
6963 which counts how many times each pseudo is used. */
6965 /* Pseudo PIC register should be considered as used due to possible
6966 new usages generated. */
6967 if (!reload_completed
6968 && pic_offset_table_rtx
6969 && REGNO (pic_offset_table_rtx
) >= FIRST_PSEUDO_REGISTER
)
6970 counts
[REGNO (pic_offset_table_rtx
)]++;
6971 /* Go from the last insn to the first and delete insns that only set unused
6972 registers or copy a register to itself. As we delete an insn, remove
6973 usage counts for registers it uses.
6975 The first jump optimization pass may leave a real insn as the last
6976 insn in the function. We must not skip that insn or we may end
6977 up deleting code that is not really dead.
6979 If some otherwise unused register is only used in DEBUG_INSNs,
6980 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
6981 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
6982 has been created for the unused register, replace it with
6983 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
6984 for (insn
= get_last_insn (); insn
; insn
= prev
)
6988 prev
= PREV_INSN (insn
);
6992 live_insn
= insn_live_p (insn
, counts
);
6994 /* If this is a dead insn, delete it and show registers in it aren't
6997 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
6999 if (DEBUG_INSN_P (insn
))
7000 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7005 if (MAY_HAVE_DEBUG_INSNS
7006 && (set
= single_set (insn
)) != NULL_RTX
7007 && is_dead_reg (SET_DEST (set
), counts
)
7008 /* Used at least once in some DEBUG_INSN. */
7009 && counts
[REGNO (SET_DEST (set
)) + nreg
] > 0
7010 /* And set exactly once. */
7011 && counts
[REGNO (SET_DEST (set
)) + nreg
* 2] == 1
7012 && !side_effects_p (SET_SRC (set
))
7013 && asm_noperands (PATTERN (insn
)) < 0)
7015 rtx dval
, bind_var_loc
;
7018 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7019 dval
= make_debug_expr_from_rtl (SET_DEST (set
));
7021 /* Emit a debug bind insn before the insn in which
7024 gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set
)),
7025 DEBUG_EXPR_TREE_DECL (dval
),
7027 VAR_INIT_STATUS_INITIALIZED
);
7028 count_reg_usage (bind_var_loc
, counts
+ nreg
, NULL_RTX
, 1);
7030 bind
= emit_debug_insn_before (bind_var_loc
, insn
);
7031 df_insn_rescan (bind
);
7033 if (replacements
== NULL
)
7034 replacements
= XCNEWVEC (rtx
, nreg
);
7035 replacements
[REGNO (SET_DEST (set
))] = dval
;
7038 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
7041 delete_insn_and_edges (insn
);
7045 if (MAY_HAVE_DEBUG_INSNS
)
7047 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
7048 if (DEBUG_INSN_P (insn
))
7050 /* If this debug insn references a dead register that wasn't replaced
7051 with an DEBUG_EXPR, reset the DEBUG_INSN. */
7052 bool seen_repl
= false;
7053 if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn
),
7054 counts
, replacements
, &seen_repl
))
7056 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
7057 df_insn_rescan (insn
);
7061 INSN_VAR_LOCATION_LOC (insn
)
7062 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn
),
7063 NULL_RTX
, replace_dead_reg
,
7065 df_insn_rescan (insn
);
7068 free (replacements
);
7071 if (dump_file
&& ndead
)
7072 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
7076 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
7080 /* If LOC contains references to NEWREG in a different mode, change them
7081 to use NEWREG instead. */
7084 cse_change_cc_mode (subrtx_ptr_iterator::array_type
&array
,
7085 rtx
*loc
, rtx insn
, rtx newreg
)
7087 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
7093 && REGNO (x
) == REGNO (newreg
)
7094 && GET_MODE (x
) != GET_MODE (newreg
))
7096 validate_change (insn
, loc
, newreg
, 1);
7097 iter
.skip_subrtxes ();
7102 /* Change the mode of any reference to the register REGNO (NEWREG) to
7103 GET_MODE (NEWREG) in INSN. */
7106 cse_change_cc_mode_insn (rtx_insn
*insn
, rtx newreg
)
7113 subrtx_ptr_iterator::array_type array
;
7114 cse_change_cc_mode (array
, &PATTERN (insn
), insn
, newreg
);
7115 cse_change_cc_mode (array
, ®_NOTES (insn
), insn
, newreg
);
7117 /* If the following assertion was triggered, there is most probably
7118 something wrong with the cc_modes_compatible back end function.
7119 CC modes only can be considered compatible if the insn - with the mode
7120 replaced by any of the compatible modes - can still be recognized. */
7121 success
= apply_change_group ();
7122 gcc_assert (success
);
7125 /* Change the mode of any reference to the register REGNO (NEWREG) to
7126 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7127 any instruction which modifies NEWREG. */
7130 cse_change_cc_mode_insns (rtx_insn
*start
, rtx_insn
*end
, rtx newreg
)
7134 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
7136 if (! INSN_P (insn
))
7139 if (reg_set_p (newreg
, insn
))
7142 cse_change_cc_mode_insn (insn
, newreg
);
7146 /* BB is a basic block which finishes with CC_REG as a condition code
7147 register which is set to CC_SRC. Look through the successors of BB
7148 to find blocks which have a single predecessor (i.e., this one),
7149 and look through those blocks for an assignment to CC_REG which is
7150 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7151 permitted to change the mode of CC_SRC to a compatible mode. This
7152 returns VOIDmode if no equivalent assignments were found.
7153 Otherwise it returns the mode which CC_SRC should wind up with.
7154 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7155 but is passed unmodified down to recursive calls in order to prevent
7158 The main complexity in this function is handling the mode issues.
7159 We may have more than one duplicate which we can eliminate, and we
7160 try to find a mode which will work for multiple duplicates. */
7163 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
7164 bool can_change_mode
)
7168 unsigned int insn_count
;
7171 machine_mode modes
[2];
7172 rtx_insn
*last_insns
[2];
7177 /* We expect to have two successors. Look at both before picking
7178 the final mode for the comparison. If we have more successors
7179 (i.e., some sort of table jump, although that seems unlikely),
7180 then we require all beyond the first two to use the same
7183 found_equiv
= false;
7184 mode
= GET_MODE (cc_src
);
7186 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7191 if (e
->flags
& EDGE_COMPLEX
)
7194 if (EDGE_COUNT (e
->dest
->preds
) != 1
7195 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
7196 /* Avoid endless recursion on unreachable blocks. */
7197 || e
->dest
== orig_bb
)
7200 end
= NEXT_INSN (BB_END (e
->dest
));
7201 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
7205 if (! INSN_P (insn
))
7208 /* If CC_SRC is modified, we have to stop looking for
7209 something which uses it. */
7210 if (modified_in_p (cc_src
, insn
))
7213 /* Check whether INSN sets CC_REG to CC_SRC. */
7214 set
= single_set (insn
);
7216 && REG_P (SET_DEST (set
))
7217 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7220 machine_mode set_mode
;
7221 machine_mode comp_mode
;
7224 set_mode
= GET_MODE (SET_SRC (set
));
7225 comp_mode
= set_mode
;
7226 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
7228 else if (GET_CODE (cc_src
) == COMPARE
7229 && GET_CODE (SET_SRC (set
)) == COMPARE
7231 && rtx_equal_p (XEXP (cc_src
, 0),
7232 XEXP (SET_SRC (set
), 0))
7233 && rtx_equal_p (XEXP (cc_src
, 1),
7234 XEXP (SET_SRC (set
), 1)))
7237 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
7238 if (comp_mode
!= VOIDmode
7239 && (can_change_mode
|| comp_mode
== mode
))
7246 if (insn_count
< ARRAY_SIZE (insns
))
7248 insns
[insn_count
] = insn
;
7249 modes
[insn_count
] = set_mode
;
7250 last_insns
[insn_count
] = end
;
7253 if (mode
!= comp_mode
)
7255 gcc_assert (can_change_mode
);
7258 /* The modified insn will be re-recognized later. */
7259 PUT_MODE (cc_src
, mode
);
7264 if (set_mode
!= mode
)
7266 /* We found a matching expression in the
7267 wrong mode, but we don't have room to
7268 store it in the array. Punt. This case
7272 /* INSN sets CC_REG to a value equal to CC_SRC
7273 with the right mode. We can simply delete
7278 /* We found an instruction to delete. Keep looking,
7279 in the hopes of finding a three-way jump. */
7283 /* We found an instruction which sets the condition
7284 code, so don't look any farther. */
7288 /* If INSN sets CC_REG in some other way, don't look any
7290 if (reg_set_p (cc_reg
, insn
))
7294 /* If we fell off the bottom of the block, we can keep looking
7295 through successors. We pass CAN_CHANGE_MODE as false because
7296 we aren't prepared to handle compatibility between the
7297 further blocks and this block. */
7300 machine_mode submode
;
7302 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
7303 if (submode
!= VOIDmode
)
7305 gcc_assert (submode
== mode
);
7307 can_change_mode
= false;
7315 /* Now INSN_COUNT is the number of instructions we found which set
7316 CC_REG to a value equivalent to CC_SRC. The instructions are in
7317 INSNS. The modes used by those instructions are in MODES. */
7320 for (i
= 0; i
< insn_count
; ++i
)
7322 if (modes
[i
] != mode
)
7324 /* We need to change the mode of CC_REG in INSNS[i] and
7325 subsequent instructions. */
7328 if (GET_MODE (cc_reg
) == mode
)
7331 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7333 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
7337 delete_insn_and_edges (insns
[i
]);
7343 /* If we have a fixed condition code register (or two), walk through
7344 the instructions and try to eliminate duplicate assignments. */
7347 cse_condition_code_reg (void)
7349 unsigned int cc_regno_1
;
7350 unsigned int cc_regno_2
;
7355 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
7358 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
7359 if (cc_regno_2
!= INVALID_REGNUM
)
7360 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
7362 cc_reg_2
= NULL_RTX
;
7364 FOR_EACH_BB_FN (bb
, cfun
)
7366 rtx_insn
*last_insn
;
7369 rtx_insn
*cc_src_insn
;
7372 machine_mode orig_mode
;
7374 /* Look for blocks which end with a conditional jump based on a
7375 condition code register. Then look for the instruction which
7376 sets the condition code register. Then look through the
7377 successor blocks for instructions which set the condition
7378 code register to the same value. There are other possible
7379 uses of the condition code register, but these are by far the
7380 most common and the ones which we are most likely to be able
7383 last_insn
= BB_END (bb
);
7384 if (!JUMP_P (last_insn
))
7387 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
7389 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
7396 for (insn
= PREV_INSN (last_insn
);
7397 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
7398 insn
= PREV_INSN (insn
))
7402 if (! INSN_P (insn
))
7404 set
= single_set (insn
);
7406 && REG_P (SET_DEST (set
))
7407 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7410 cc_src
= SET_SRC (set
);
7413 else if (reg_set_p (cc_reg
, insn
))
7420 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
7423 /* Now CC_REG is a condition code register used for a
7424 conditional jump at the end of the block, and CC_SRC, in
7425 CC_SRC_INSN, is the value to which that condition code
7426 register is set, and CC_SRC is still meaningful at the end of
7429 orig_mode
= GET_MODE (cc_src
);
7430 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
7431 if (mode
!= VOIDmode
)
7433 gcc_assert (mode
== GET_MODE (cc_src
));
7434 if (mode
!= orig_mode
)
7436 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7438 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
7440 /* Do the same in the following insns that use the
7441 current value of CC_REG within BB. */
7442 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
7443 NEXT_INSN (last_insn
),
7451 /* Perform common subexpression elimination. Nonzero value from
7452 `cse_main' means that jumps were simplified and some code may now
7453 be unreachable, so do jump optimization again. */
7455 rest_of_handle_cse (void)
7460 dump_flow_info (dump_file
, dump_flags
);
7462 tem
= cse_main (get_insns (), max_reg_num ());
7464 /* If we are not running more CSE passes, then we are no longer
7465 expecting CSE to be run. But always rerun it in a cheap mode. */
7466 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
7470 timevar_push (TV_JUMP
);
7471 rebuild_jump_labels (get_insns ());
7472 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7473 timevar_pop (TV_JUMP
);
7475 else if (tem
== 1 || optimize
> 1)
7483 const pass_data pass_data_cse
=
7485 RTL_PASS
, /* type */
7487 OPTGROUP_NONE
, /* optinfo_flags */
7489 0, /* properties_required */
7490 0, /* properties_provided */
7491 0, /* properties_destroyed */
7492 0, /* todo_flags_start */
7493 TODO_df_finish
, /* todo_flags_finish */
7496 class pass_cse
: public rtl_opt_pass
7499 pass_cse (gcc::context
*ctxt
)
7500 : rtl_opt_pass (pass_data_cse
, ctxt
)
7503 /* opt_pass methods: */
7504 virtual bool gate (function
*) { return optimize
> 0; }
7505 virtual unsigned int execute (function
*) { return rest_of_handle_cse (); }
7507 }; // class pass_cse
7512 make_pass_cse (gcc::context
*ctxt
)
7514 return new pass_cse (ctxt
);
7518 /* Run second CSE pass after loop optimizations. */
7520 rest_of_handle_cse2 (void)
7525 dump_flow_info (dump_file
, dump_flags
);
7527 tem
= cse_main (get_insns (), max_reg_num ());
7529 /* Run a pass to eliminate duplicated assignments to condition code
7530 registers. We have to run this after bypass_jumps, because it
7531 makes it harder for that pass to determine whether a jump can be
7533 cse_condition_code_reg ();
7535 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7539 timevar_push (TV_JUMP
);
7540 rebuild_jump_labels (get_insns ());
7541 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7542 timevar_pop (TV_JUMP
);
7547 cse_not_expected
= 1;
7554 const pass_data pass_data_cse2
=
7556 RTL_PASS
, /* type */
7558 OPTGROUP_NONE
, /* optinfo_flags */
7559 TV_CSE2
, /* tv_id */
7560 0, /* properties_required */
7561 0, /* properties_provided */
7562 0, /* properties_destroyed */
7563 0, /* todo_flags_start */
7564 TODO_df_finish
, /* todo_flags_finish */
7567 class pass_cse2
: public rtl_opt_pass
7570 pass_cse2 (gcc::context
*ctxt
)
7571 : rtl_opt_pass (pass_data_cse2
, ctxt
)
7574 /* opt_pass methods: */
7575 virtual bool gate (function
*)
7577 return optimize
> 0 && flag_rerun_cse_after_loop
;
7580 virtual unsigned int execute (function
*) { return rest_of_handle_cse2 (); }
7582 }; // class pass_cse2
7587 make_pass_cse2 (gcc::context
*ctxt
)
7589 return new pass_cse2 (ctxt
);
7592 /* Run second CSE pass after loop optimizations. */
7594 rest_of_handle_cse_after_global_opts (void)
7599 /* We only want to do local CSE, so don't follow jumps. */
7600 save_cfj
= flag_cse_follow_jumps
;
7601 flag_cse_follow_jumps
= 0;
7603 rebuild_jump_labels (get_insns ());
7604 tem
= cse_main (get_insns (), max_reg_num ());
7605 purge_all_dead_edges ();
7606 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7608 cse_not_expected
= !flag_rerun_cse_after_loop
;
7610 /* If cse altered any jumps, rerun jump opts to clean things up. */
7613 timevar_push (TV_JUMP
);
7614 rebuild_jump_labels (get_insns ());
7615 cleanup_cfg (CLEANUP_CFG_CHANGED
);
7616 timevar_pop (TV_JUMP
);
7621 flag_cse_follow_jumps
= save_cfj
;
7627 const pass_data pass_data_cse_after_global_opts
=
7629 RTL_PASS
, /* type */
7630 "cse_local", /* name */
7631 OPTGROUP_NONE
, /* optinfo_flags */
7633 0, /* properties_required */
7634 0, /* properties_provided */
7635 0, /* properties_destroyed */
7636 0, /* todo_flags_start */
7637 TODO_df_finish
, /* todo_flags_finish */
7640 class pass_cse_after_global_opts
: public rtl_opt_pass
7643 pass_cse_after_global_opts (gcc::context
*ctxt
)
7644 : rtl_opt_pass (pass_data_cse_after_global_opts
, ctxt
)
7647 /* opt_pass methods: */
7648 virtual bool gate (function
*)
7650 return optimize
> 0 && flag_rerun_cse_after_global_opts
;
7653 virtual unsigned int execute (function
*)
7655 return rest_of_handle_cse_after_global_opts ();
7658 }; // class pass_cse_after_global_opts
7663 make_pass_cse_after_global_opts (gcc::context
*ctxt
)
7665 return new pass_cse_after_global_opts (ctxt
);