1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information isn't
53 completely updated (however this is only a local issue since it is
54 regenerated before the next pass that uses it):
56 - reg_live_length is not updated
57 - reg_n_refs is not adjusted in the rare case when a register is
58 no longer required in a computation
59 - there are extremely rare cases (see distribute_notes) when a
61 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
62 removed because there is no way to know which register it was
65 To simplify substitution, we combine only when the earlier insn(s)
66 consist of only a single assignment. To simplify updating afterward,
67 we never combine when a subroutine call appears in the middle.
69 Since we do not represent assignments to CC0 explicitly except when that
70 is all an insn does, there is no LOG_LINKS entry in an insn that uses
71 the condition code for the insn that set the condition code.
72 Fortunately, these two insns must be consecutive.
73 Therefore, every JUMP_INSN is taken to have an implicit logical link
74 to the preceding insn. This is not quite right, since non-jumps can
75 also use the condition code; but in practice such insns would not
80 #include "coretypes.h"
87 #include "hard-reg-set.h"
88 #include "basic-block.h"
89 #include "insn-config.h"
91 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
93 #include "insn-attr.h"
99 #include "insn-codes.h"
100 #include "rtlhooks-def.h"
101 /* Include output.h for dump_file. */
105 #include "tree-pass.h"
109 /* Number of attempts to combine instructions in this function. */
111 static int combine_attempts
;
113 /* Number of attempts that got as far as substitution in this function. */
115 static int combine_merges
;
117 /* Number of instructions combined with added SETs in this function. */
119 static int combine_extras
;
121 /* Number of instructions combined in this function. */
123 static int combine_successes
;
125 /* Totals over entire compilation. */
127 static int total_attempts
, total_merges
, total_extras
, total_successes
;
129 /* combine_instructions may try to replace the right hand side of the
130 second instruction with the value of an associated REG_EQUAL note
131 before throwing it at try_combine. That is problematic when there
132 is a REG_DEAD note for a register used in the old right hand side
133 and can cause distribute_notes to do wrong things. This is the
134 second instruction if it has been so modified, null otherwise. */
138 /* When I2MOD is nonnull, this is a copy of the old right hand side. */
140 static rtx i2mod_old_rhs
;
142 /* When I2MOD is nonnull, this is a copy of the new right hand side. */
144 static rtx i2mod_new_rhs
;
146 typedef struct reg_stat_struct
{
147 /* Record last point of death of (hard or pseudo) register n. */
150 /* Record last point of modification of (hard or pseudo) register n. */
153 /* The next group of fields allows the recording of the last value assigned
154 to (hard or pseudo) register n. We use this information to see if an
155 operation being processed is redundant given a prior operation performed
156 on the register. For example, an `and' with a constant is redundant if
157 all the zero bits are already known to be turned off.
159 We use an approach similar to that used by cse, but change it in the
162 (1) We do not want to reinitialize at each label.
163 (2) It is useful, but not critical, to know the actual value assigned
164 to a register. Often just its form is helpful.
166 Therefore, we maintain the following fields:
168 last_set_value the last value assigned
169 last_set_label records the value of label_tick when the
170 register was assigned
171 last_set_table_tick records the value of label_tick when a
172 value using the register is assigned
173 last_set_invalid set to nonzero when it is not valid
174 to use the value of this register in some
177 To understand the usage of these tables, it is important to understand
178 the distinction between the value in last_set_value being valid and
179 the register being validly contained in some other expression in the
182 (The next two parameters are out of date).
184 reg_stat[i].last_set_value is valid if it is nonzero, and either
185 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
187 Register I may validly appear in any expression returned for the value
188 of another register if reg_n_sets[i] is 1. It may also appear in the
189 value for register J if reg_stat[j].last_set_invalid is zero, or
190 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
192 If an expression is found in the table containing a register which may
193 not validly appear in an expression, the register is replaced by
194 something that won't match, (clobber (const_int 0)). */
196 /* Record last value assigned to (hard or pseudo) register n. */
200 /* Record the value of label_tick when an expression involving register n
201 is placed in last_set_value. */
203 int last_set_table_tick
;
205 /* Record the value of label_tick when the value for register n is placed in
210 /* These fields are maintained in parallel with last_set_value and are
211 used to store the mode in which the register was last set, the bits
212 that were known to be zero when it was last set, and the number of
213 sign bits copies it was known to have when it was last set. */
215 unsigned HOST_WIDE_INT last_set_nonzero_bits
;
216 char last_set_sign_bit_copies
;
217 ENUM_BITFIELD(machine_mode
) last_set_mode
: 8;
219 /* Set nonzero if references to register n in expressions should not be
220 used. last_set_invalid is set nonzero when this register is being
221 assigned to and last_set_table_tick == label_tick. */
223 char last_set_invalid
;
225 /* Some registers that are set more than once and used in more than one
226 basic block are nevertheless always set in similar ways. For example,
227 a QImode register may be loaded from memory in two places on a machine
228 where byte loads zero extend.
230 We record in the following fields if a register has some leading bits
231 that are always equal to the sign bit, and what we know about the
232 nonzero bits of a register, specifically which bits are known to be
235 If an entry is zero, it means that we don't know anything special. */
237 unsigned char sign_bit_copies
;
239 unsigned HOST_WIDE_INT nonzero_bits
;
241 /* Record the value of the label_tick when the last truncation
242 happened. The field truncated_to_mode is only valid if
243 truncation_label == label_tick. */
245 int truncation_label
;
247 /* Record the last truncation seen for this register. If truncation
248 is not a nop to this mode we might be able to save an explicit
249 truncation if we know that value already contains a truncated
252 ENUM_BITFIELD(machine_mode
) truncated_to_mode
: 8;
255 DEF_VEC_O(reg_stat_type
);
256 DEF_VEC_ALLOC_O(reg_stat_type
,heap
);
258 static VEC(reg_stat_type
,heap
) *reg_stat
;
260 /* Record the luid of the last insn that invalidated memory
261 (anything that writes memory, and subroutine calls, but not pushes). */
263 static int mem_last_set
;
265 /* Record the luid of the last CALL_INSN
266 so we can tell whether a potential combination crosses any calls. */
268 static int last_call_luid
;
270 /* When `subst' is called, this is the insn that is being modified
271 (by combining in a previous insn). The PATTERN of this insn
272 is still the old pattern partially modified and it should not be
273 looked at, but this may be used to examine the successors of the insn
274 to judge whether a simplification is valid. */
276 static rtx subst_insn
;
278 /* This is the lowest LUID that `subst' is currently dealing with.
279 get_last_value will not return a value if the register was set at or
280 after this LUID. If not for this mechanism, we could get confused if
281 I2 or I1 in try_combine were an insn that used the old value of a register
282 to obtain a new value. In that case, we might erroneously get the
283 new value of the register when we wanted the old one. */
285 static int subst_low_luid
;
287 /* This contains any hard registers that are used in newpat; reg_dead_at_p
288 must consider all these registers to be always live. */
290 static HARD_REG_SET newpat_used_regs
;
292 /* This is an insn to which a LOG_LINKS entry has been added. If this
293 insn is the earlier than I2 or I3, combine should rescan starting at
296 static rtx added_links_insn
;
298 /* Basic block in which we are performing combines. */
299 static basic_block this_basic_block
;
302 /* Length of the currently allocated uid_insn_cost array. */
304 static int max_uid_known
;
306 /* The following array records the insn_rtx_cost for every insn
307 in the instruction stream. */
309 static int *uid_insn_cost
;
311 /* The following array records the LOG_LINKS for every insn in the
312 instruction stream as an INSN_LIST rtx. */
314 static rtx
*uid_log_links
;
316 #define INSN_COST(INSN) (uid_insn_cost[INSN_UID (INSN)])
317 #define LOG_LINKS(INSN) (uid_log_links[INSN_UID (INSN)])
319 /* Incremented for each basic block. */
321 static int label_tick
;
323 /* Reset to label_tick for each label. */
325 static int label_tick_ebb_start
;
327 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
328 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
330 static enum machine_mode nonzero_bits_mode
;
332 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
333 be safely used. It is zero while computing them and after combine has
334 completed. This former test prevents propagating values based on
335 previously set values, which can be incorrect if a variable is modified
338 static int nonzero_sign_valid
;
341 /* Record one modification to rtl structure
342 to be undone by storing old_contents into *where. */
347 enum { UNDO_RTX
, UNDO_INT
, UNDO_MODE
} kind
;
348 union { rtx r
; int i
; enum machine_mode m
; } old_contents
;
349 union { rtx
*r
; int *i
; } where
;
352 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
353 num_undo says how many are currently recorded.
355 other_insn is nonzero if we have modified some other insn in the process
356 of working on subst_insn. It must be verified too. */
365 static struct undobuf undobuf
;
367 /* Number of times the pseudo being substituted for
368 was found and replaced. */
370 static int n_occurrences
;
372 static rtx
reg_nonzero_bits_for_combine (const_rtx
, enum machine_mode
, const_rtx
,
374 unsigned HOST_WIDE_INT
,
375 unsigned HOST_WIDE_INT
*);
376 static rtx
reg_num_sign_bit_copies_for_combine (const_rtx
, enum machine_mode
, const_rtx
,
378 unsigned int, unsigned int *);
379 static void do_SUBST (rtx
*, rtx
);
380 static void do_SUBST_INT (int *, int);
381 static void init_reg_last (void);
382 static void setup_incoming_promotions (rtx
);
383 static void set_nonzero_bits_and_sign_copies (rtx
, const_rtx
, void *);
384 static int cant_combine_insn_p (rtx
);
385 static int can_combine_p (rtx
, rtx
, rtx
, rtx
, rtx
*, rtx
*);
386 static int combinable_i3pat (rtx
, rtx
*, rtx
, rtx
, int, rtx
*);
387 static int contains_muldiv (rtx
);
388 static rtx
try_combine (rtx
, rtx
, rtx
, int *);
389 static void undo_all (void);
390 static void undo_commit (void);
391 static rtx
*find_split_point (rtx
*, rtx
);
392 static rtx
subst (rtx
, rtx
, rtx
, int, int);
393 static rtx
combine_simplify_rtx (rtx
, enum machine_mode
, int);
394 static rtx
simplify_if_then_else (rtx
);
395 static rtx
simplify_set (rtx
);
396 static rtx
simplify_logical (rtx
);
397 static rtx
expand_compound_operation (rtx
);
398 static const_rtx
expand_field_assignment (const_rtx
);
399 static rtx
make_extraction (enum machine_mode
, rtx
, HOST_WIDE_INT
,
400 rtx
, unsigned HOST_WIDE_INT
, int, int, int);
401 static rtx
extract_left_shift (rtx
, int);
402 static rtx
make_compound_operation (rtx
, enum rtx_code
);
403 static int get_pos_from_mask (unsigned HOST_WIDE_INT
,
404 unsigned HOST_WIDE_INT
*);
405 static rtx
canon_reg_for_combine (rtx
, rtx
);
406 static rtx
force_to_mode (rtx
, enum machine_mode
,
407 unsigned HOST_WIDE_INT
, int);
408 static rtx
if_then_else_cond (rtx
, rtx
*, rtx
*);
409 static rtx
known_cond (rtx
, enum rtx_code
, rtx
, rtx
);
410 static int rtx_equal_for_field_assignment_p (rtx
, rtx
);
411 static rtx
make_field_assignment (rtx
);
412 static rtx
apply_distributive_law (rtx
);
413 static rtx
distribute_and_simplify_rtx (rtx
, int);
414 static rtx
simplify_and_const_int_1 (enum machine_mode
, rtx
,
415 unsigned HOST_WIDE_INT
);
416 static rtx
simplify_and_const_int (rtx
, enum machine_mode
, rtx
,
417 unsigned HOST_WIDE_INT
);
418 static int merge_outer_ops (enum rtx_code
*, HOST_WIDE_INT
*, enum rtx_code
,
419 HOST_WIDE_INT
, enum machine_mode
, int *);
420 static rtx
simplify_shift_const_1 (enum rtx_code
, enum machine_mode
, rtx
, int);
421 static rtx
simplify_shift_const (rtx
, enum rtx_code
, enum machine_mode
, rtx
,
423 static int recog_for_combine (rtx
*, rtx
, rtx
*);
424 static rtx
gen_lowpart_for_combine (enum machine_mode
, rtx
);
425 static enum rtx_code
simplify_comparison (enum rtx_code
, rtx
*, rtx
*);
426 static void update_table_tick (rtx
);
427 static void record_value_for_reg (rtx
, rtx
, rtx
);
428 static void check_conversions (rtx
, rtx
);
429 static void record_dead_and_set_regs_1 (rtx
, const_rtx
, void *);
430 static void record_dead_and_set_regs (rtx
);
431 static int get_last_value_validate (rtx
*, rtx
, int, int);
432 static rtx
get_last_value (const_rtx
);
433 static int use_crosses_set_p (const_rtx
, int);
434 static void reg_dead_at_p_1 (rtx
, const_rtx
, void *);
435 static int reg_dead_at_p (rtx
, rtx
);
436 static void move_deaths (rtx
, rtx
, int, rtx
, rtx
*);
437 static int reg_bitfield_target_p (rtx
, rtx
);
438 static void distribute_notes (rtx
, rtx
, rtx
, rtx
, rtx
, rtx
);
439 static void distribute_links (rtx
);
440 static void mark_used_regs_combine (rtx
);
441 static void record_promoted_value (rtx
, rtx
);
442 static int unmentioned_reg_p_1 (rtx
*, void *);
443 static bool unmentioned_reg_p (rtx
, rtx
);
444 static void record_truncated_value (rtx
);
445 static bool reg_truncated_to_mode (enum machine_mode
, const_rtx
);
446 static rtx
gen_lowpart_or_truncate (enum machine_mode
, rtx
);
449 /* It is not safe to use ordinary gen_lowpart in combine.
450 See comments in gen_lowpart_for_combine. */
451 #undef RTL_HOOKS_GEN_LOWPART
452 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
454 /* Our implementation of gen_lowpart never emits a new pseudo. */
455 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
456 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
458 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
459 #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
461 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
462 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
464 #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
465 #define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
467 static const struct rtl_hooks combine_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
470 /* Try to split PATTERN found in INSN. This returns NULL_RTX if
471 PATTERN can not be split. Otherwise, it returns an insn sequence.
472 This is a wrapper around split_insns which ensures that the
473 reg_stat vector is made larger if the splitter creates a new
477 combine_split_insns (rtx pattern
, rtx insn
)
482 ret
= split_insns (pattern
, insn
);
483 nregs
= max_reg_num ();
484 if (nregs
> VEC_length (reg_stat_type
, reg_stat
))
485 VEC_safe_grow_cleared (reg_stat_type
, heap
, reg_stat
, nregs
);
489 /* This is used by find_single_use to locate an rtx in LOC that
490 contains exactly one use of DEST, which is typically either a REG
491 or CC0. It returns a pointer to the innermost rtx expression
492 containing DEST. Appearances of DEST that are being used to
493 totally replace it are not counted. */
496 find_single_use_1 (rtx dest
, rtx
*loc
)
499 enum rtx_code code
= GET_CODE (x
);
517 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
518 of a REG that occupies all of the REG, the insn uses DEST if
519 it is mentioned in the destination or the source. Otherwise, we
520 need just check the source. */
521 if (GET_CODE (SET_DEST (x
)) != CC0
522 && GET_CODE (SET_DEST (x
)) != PC
523 && !REG_P (SET_DEST (x
))
524 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
525 && REG_P (SUBREG_REG (SET_DEST (x
)))
526 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
527 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
528 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
529 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
532 return find_single_use_1 (dest
, &SET_SRC (x
));
536 return find_single_use_1 (dest
, &XEXP (x
, 0));
542 /* If it wasn't one of the common cases above, check each expression and
543 vector of this code. Look for a unique usage of DEST. */
545 fmt
= GET_RTX_FORMAT (code
);
546 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
550 if (dest
== XEXP (x
, i
)
551 || (REG_P (dest
) && REG_P (XEXP (x
, i
))
552 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
555 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
558 result
= this_result
;
559 else if (this_result
)
560 /* Duplicate usage. */
563 else if (fmt
[i
] == 'E')
567 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
569 if (XVECEXP (x
, i
, j
) == dest
571 && REG_P (XVECEXP (x
, i
, j
))
572 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
575 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
578 result
= this_result
;
579 else if (this_result
)
589 /* See if DEST, produced in INSN, is used only a single time in the
590 sequel. If so, return a pointer to the innermost rtx expression in which
593 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
595 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
596 care about REG_DEAD notes or LOG_LINKS.
598 Otherwise, we find the single use by finding an insn that has a
599 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
600 only referenced once in that insn, we know that it must be the first
601 and last insn referencing DEST. */
604 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
613 next
= NEXT_INSN (insn
);
615 || (!NONJUMP_INSN_P (next
) && !JUMP_P (next
)))
618 result
= find_single_use_1 (dest
, &PATTERN (next
));
628 for (next
= next_nonnote_insn (insn
);
629 next
!= 0 && !LABEL_P (next
);
630 next
= next_nonnote_insn (next
))
631 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
633 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
634 if (XEXP (link
, 0) == insn
)
639 result
= find_single_use_1 (dest
, &PATTERN (next
));
649 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
650 insn. The substitution can be undone by undo_all. If INTO is already
651 set to NEWVAL, do not record this change. Because computing NEWVAL might
652 also call SUBST, we have to compute it before we put anything into
656 do_SUBST (rtx
*into
, rtx newval
)
661 if (oldval
== newval
)
664 /* We'd like to catch as many invalid transformations here as
665 possible. Unfortunately, there are way too many mode changes
666 that are perfectly valid, so we'd waste too much effort for
667 little gain doing the checks here. Focus on catching invalid
668 transformations involving integer constants. */
669 if (GET_MODE_CLASS (GET_MODE (oldval
)) == MODE_INT
670 && GET_CODE (newval
) == CONST_INT
)
672 /* Sanity check that we're replacing oldval with a CONST_INT
673 that is a valid sign-extension for the original mode. */
674 gcc_assert (INTVAL (newval
)
675 == trunc_int_for_mode (INTVAL (newval
), GET_MODE (oldval
)));
677 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
678 CONST_INT is not valid, because after the replacement, the
679 original mode would be gone. Unfortunately, we can't tell
680 when do_SUBST is called to replace the operand thereof, so we
681 perform this test on oldval instead, checking whether an
682 invalid replacement took place before we got here. */
683 gcc_assert (!(GET_CODE (oldval
) == SUBREG
684 && GET_CODE (SUBREG_REG (oldval
)) == CONST_INT
));
685 gcc_assert (!(GET_CODE (oldval
) == ZERO_EXTEND
686 && GET_CODE (XEXP (oldval
, 0)) == CONST_INT
));
690 buf
= undobuf
.frees
, undobuf
.frees
= buf
->next
;
692 buf
= XNEW (struct undo
);
694 buf
->kind
= UNDO_RTX
;
696 buf
->old_contents
.r
= oldval
;
699 buf
->next
= undobuf
.undos
, undobuf
.undos
= buf
;
702 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
704 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
705 for the value of a HOST_WIDE_INT value (including CONST_INT) is
709 do_SUBST_INT (int *into
, int newval
)
714 if (oldval
== newval
)
718 buf
= undobuf
.frees
, undobuf
.frees
= buf
->next
;
720 buf
= XNEW (struct undo
);
722 buf
->kind
= UNDO_INT
;
724 buf
->old_contents
.i
= oldval
;
727 buf
->next
= undobuf
.undos
, undobuf
.undos
= buf
;
730 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
732 /* Similar to SUBST, but just substitute the mode. This is used when
733 changing the mode of a pseudo-register, so that any other
734 references to the entry in the regno_reg_rtx array will change as
738 do_SUBST_MODE (rtx
*into
, enum machine_mode newval
)
741 enum machine_mode oldval
= GET_MODE (*into
);
743 if (oldval
== newval
)
747 buf
= undobuf
.frees
, undobuf
.frees
= buf
->next
;
749 buf
= XNEW (struct undo
);
751 buf
->kind
= UNDO_MODE
;
753 buf
->old_contents
.m
= oldval
;
754 adjust_reg_mode (*into
, newval
);
756 buf
->next
= undobuf
.undos
, undobuf
.undos
= buf
;
759 #define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE(&(INTO), (NEWVAL))
761 /* Subroutine of try_combine. Determine whether the combine replacement
762 patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
763 insn_rtx_cost that the original instruction sequence I1, I2, I3 and
764 undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX.
765 NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX. This
766 function returns false, if the costs of all instructions can be
767 estimated, and the replacements are more expensive than the original
771 combine_validate_cost (rtx i1
, rtx i2
, rtx i3
, rtx newpat
, rtx newi2pat
,
774 int i1_cost
, i2_cost
, i3_cost
;
775 int new_i2_cost
, new_i3_cost
;
776 int old_cost
, new_cost
;
778 /* Lookup the original insn_rtx_costs. */
779 i2_cost
= INSN_COST (i2
);
780 i3_cost
= INSN_COST (i3
);
784 i1_cost
= INSN_COST (i1
);
785 old_cost
= (i1_cost
> 0 && i2_cost
> 0 && i3_cost
> 0)
786 ? i1_cost
+ i2_cost
+ i3_cost
: 0;
790 old_cost
= (i2_cost
> 0 && i3_cost
> 0) ? i2_cost
+ i3_cost
: 0;
794 /* Calculate the replacement insn_rtx_costs. */
795 new_i3_cost
= insn_rtx_cost (newpat
);
798 new_i2_cost
= insn_rtx_cost (newi2pat
);
799 new_cost
= (new_i2_cost
> 0 && new_i3_cost
> 0)
800 ? new_i2_cost
+ new_i3_cost
: 0;
804 new_cost
= new_i3_cost
;
808 if (undobuf
.other_insn
)
810 int old_other_cost
, new_other_cost
;
812 old_other_cost
= INSN_COST (undobuf
.other_insn
);
813 new_other_cost
= insn_rtx_cost (newotherpat
);
814 if (old_other_cost
> 0 && new_other_cost
> 0)
816 old_cost
+= old_other_cost
;
817 new_cost
+= new_other_cost
;
823 /* Disallow this recombination if both new_cost and old_cost are
824 greater than zero, and new_cost is greater than old cost. */
826 && new_cost
> old_cost
)
833 "rejecting combination of insns %d, %d and %d\n",
834 INSN_UID (i1
), INSN_UID (i2
), INSN_UID (i3
));
835 fprintf (dump_file
, "original costs %d + %d + %d = %d\n",
836 i1_cost
, i2_cost
, i3_cost
, old_cost
);
841 "rejecting combination of insns %d and %d\n",
842 INSN_UID (i2
), INSN_UID (i3
));
843 fprintf (dump_file
, "original costs %d + %d = %d\n",
844 i2_cost
, i3_cost
, old_cost
);
849 fprintf (dump_file
, "replacement costs %d + %d = %d\n",
850 new_i2_cost
, new_i3_cost
, new_cost
);
853 fprintf (dump_file
, "replacement cost %d\n", new_cost
);
859 /* Update the uid_insn_cost array with the replacement costs. */
860 INSN_COST (i2
) = new_i2_cost
;
861 INSN_COST (i3
) = new_i3_cost
;
869 /* Delete any insns that copy a register to itself. */
872 delete_noop_moves (void)
879 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
)); insn
= next
)
881 next
= NEXT_INSN (insn
);
882 if (INSN_P (insn
) && noop_move_p (insn
))
886 /* If we're about to remove the first insn of a libcall
887 then move the libcall note to the next real insn and
888 update the retval note. */
889 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
890 && XEXP (note
, 0) != insn
)
892 rtx new_libcall_insn
= next_real_insn (insn
);
893 rtx retval_note
= find_reg_note (XEXP (note
, 0),
894 REG_RETVAL
, NULL_RTX
);
895 REG_NOTES (new_libcall_insn
)
896 = gen_rtx_INSN_LIST (REG_LIBCALL
, XEXP (note
, 0),
897 REG_NOTES (new_libcall_insn
));
898 XEXP (retval_note
, 0) = new_libcall_insn
;
902 fprintf (dump_file
, "deleting noop move %d\n", INSN_UID (insn
));
904 delete_insn_and_edges (insn
);
911 /* Fill in log links field for all insns. */
914 create_log_links (void)
918 struct df_ref
**def_vec
, **use_vec
;
920 next_use
= XCNEWVEC (rtx
, max_reg_num ());
922 /* Pass through each block from the end, recording the uses of each
923 register and establishing log links when def is encountered.
924 Note that we do not clear next_use array in order to save time,
925 so we have to test whether the use is in the same basic block as def.
927 There are a few cases below when we do not consider the definition or
928 usage -- these are taken from original flow.c did. Don't ask me why it is
929 done this way; I don't know and if it works, I don't want to know. */
933 FOR_BB_INSNS_REVERSE (bb
, insn
)
938 /* Log links are created only once. */
939 gcc_assert (!LOG_LINKS (insn
));
941 for (def_vec
= DF_INSN_DEFS (insn
); *def_vec
; def_vec
++)
943 struct df_ref
*def
= *def_vec
;
944 int regno
= DF_REF_REGNO (def
);
947 if (!next_use
[regno
])
950 /* Do not consider if it is pre/post modification in MEM. */
951 if (DF_REF_FLAGS (def
) & DF_REF_PRE_POST_MODIFY
)
954 /* Do not make the log link for frame pointer. */
955 if ((regno
== FRAME_POINTER_REGNUM
956 && (! reload_completed
|| frame_pointer_needed
))
957 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
958 || (regno
== HARD_FRAME_POINTER_REGNUM
959 && (! reload_completed
|| frame_pointer_needed
))
961 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
962 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
967 use_insn
= next_use
[regno
];
968 if (BLOCK_FOR_INSN (use_insn
) == bb
)
972 We don't build a LOG_LINK for hard registers contained
973 in ASM_OPERANDs. If these registers get replaced,
974 we might wind up changing the semantics of the insn,
975 even if reload can make what appear to be valid
976 assignments later. */
977 if (regno
>= FIRST_PSEUDO_REGISTER
978 || asm_noperands (PATTERN (use_insn
)) < 0)
979 LOG_LINKS (use_insn
) =
980 alloc_INSN_LIST (insn
, LOG_LINKS (use_insn
));
982 next_use
[regno
] = NULL_RTX
;
985 for (use_vec
= DF_INSN_USES (insn
); *use_vec
; use_vec
++)
987 struct df_ref
*use
= *use_vec
;
988 int regno
= DF_REF_REGNO (use
);
990 /* Do not consider the usage of the stack pointer
992 if (DF_REF_FLAGS (use
) & DF_REF_CALL_STACK_USAGE
)
995 next_use
[regno
] = insn
;
1003 /* Clear LOG_LINKS fields of insns. */
1006 clear_log_links (void)
1010 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1012 free_INSN_LIST_list (&LOG_LINKS (insn
));
1018 /* Main entry point for combiner. F is the first insn of the function.
1019 NREGS is the first unused pseudo-reg number.
1021 Return nonzero if the combiner has turned an indirect jump
1022 instruction into a direct jump. */
1024 combine_instructions (rtx f
, unsigned int nregs
)
1030 rtx links
, nextlinks
;
1033 int new_direct_jump_p
= 0;
1035 for (first
= f
; first
&& !INSN_P (first
); )
1036 first
= NEXT_INSN (first
);
1040 combine_attempts
= 0;
1043 combine_successes
= 0;
1045 rtl_hooks
= combine_rtl_hooks
;
1047 VEC_safe_grow_cleared (reg_stat_type
, heap
, reg_stat
, nregs
);
1049 init_recog_no_volatile ();
1051 /* Allocate array for insn info. */
1052 max_uid_known
= get_max_uid ();
1053 uid_log_links
= XCNEWVEC (rtx
, max_uid_known
+ 1);
1054 uid_insn_cost
= XCNEWVEC (int, max_uid_known
+ 1);
1056 nonzero_bits_mode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1058 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1059 problems when, for example, we have j <<= 1 in a loop. */
1061 nonzero_sign_valid
= 0;
1063 /* Scan all SETs and see if we can deduce anything about what
1064 bits are known to be zero for some registers and how many copies
1065 of the sign bit are known to exist for those registers.
1067 Also set any known values so that we can use it while searching
1068 for what bits are known to be set. */
1070 label_tick
= label_tick_ebb_start
= 1;
1072 setup_incoming_promotions (first
);
1074 create_log_links ();
1075 FOR_EACH_BB (this_basic_block
)
1080 FOR_BB_INSNS (this_basic_block
, insn
)
1081 if (INSN_P (insn
) && BLOCK_FOR_INSN (insn
))
1083 subst_low_luid
= DF_INSN_LUID (insn
);
1086 note_stores (PATTERN (insn
), set_nonzero_bits_and_sign_copies
,
1088 record_dead_and_set_regs (insn
);
1091 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
1092 if (REG_NOTE_KIND (links
) == REG_INC
)
1093 set_nonzero_bits_and_sign_copies (XEXP (links
, 0), NULL_RTX
,
1097 /* Record the current insn_rtx_cost of this instruction. */
1098 if (NONJUMP_INSN_P (insn
))
1099 INSN_COST (insn
) = insn_rtx_cost (PATTERN (insn
));
1101 fprintf(dump_file
, "insn_cost %d: %d\n",
1102 INSN_UID (insn
), INSN_COST (insn
));
1104 else if (LABEL_P (insn
))
1105 label_tick_ebb_start
= label_tick
;
1108 nonzero_sign_valid
= 1;
1110 /* Now scan all the insns in forward order. */
1112 label_tick
= label_tick_ebb_start
= 1;
1114 setup_incoming_promotions (first
);
1116 FOR_EACH_BB (this_basic_block
)
1121 for (insn
= BB_HEAD (this_basic_block
);
1122 insn
!= NEXT_INSN (BB_END (this_basic_block
));
1123 insn
= next
? next
: NEXT_INSN (insn
))
1128 /* See if we know about function return values before this
1129 insn based upon SUBREG flags. */
1130 check_conversions (insn
, PATTERN (insn
));
1132 /* Try this insn with each insn it links back to. */
1134 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
1135 if ((next
= try_combine (insn
, XEXP (links
, 0),
1136 NULL_RTX
, &new_direct_jump_p
)) != 0)
1139 /* Try each sequence of three linked insns ending with this one. */
1141 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
1143 rtx link
= XEXP (links
, 0);
1145 /* If the linked insn has been replaced by a note, then there
1146 is no point in pursuing this chain any further. */
1150 for (nextlinks
= LOG_LINKS (link
);
1152 nextlinks
= XEXP (nextlinks
, 1))
1153 if ((next
= try_combine (insn
, link
,
1154 XEXP (nextlinks
, 0),
1155 &new_direct_jump_p
)) != 0)
1160 /* Try to combine a jump insn that uses CC0
1161 with a preceding insn that sets CC0, and maybe with its
1162 logical predecessor as well.
1163 This is how we make decrement-and-branch insns.
1164 We need this special code because data flow connections
1165 via CC0 do not get entered in LOG_LINKS. */
1168 && (prev
= prev_nonnote_insn (insn
)) != 0
1169 && NONJUMP_INSN_P (prev
)
1170 && sets_cc0_p (PATTERN (prev
)))
1172 if ((next
= try_combine (insn
, prev
,
1173 NULL_RTX
, &new_direct_jump_p
)) != 0)
1176 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
1177 nextlinks
= XEXP (nextlinks
, 1))
1178 if ((next
= try_combine (insn
, prev
,
1179 XEXP (nextlinks
, 0),
1180 &new_direct_jump_p
)) != 0)
1184 /* Do the same for an insn that explicitly references CC0. */
1185 if (NONJUMP_INSN_P (insn
)
1186 && (prev
= prev_nonnote_insn (insn
)) != 0
1187 && NONJUMP_INSN_P (prev
)
1188 && sets_cc0_p (PATTERN (prev
))
1189 && GET_CODE (PATTERN (insn
)) == SET
1190 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (insn
))))
1192 if ((next
= try_combine (insn
, prev
,
1193 NULL_RTX
, &new_direct_jump_p
)) != 0)
1196 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
1197 nextlinks
= XEXP (nextlinks
, 1))
1198 if ((next
= try_combine (insn
, prev
,
1199 XEXP (nextlinks
, 0),
1200 &new_direct_jump_p
)) != 0)
1204 /* Finally, see if any of the insns that this insn links to
1205 explicitly references CC0. If so, try this insn, that insn,
1206 and its predecessor if it sets CC0. */
1207 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
1208 if (NONJUMP_INSN_P (XEXP (links
, 0))
1209 && GET_CODE (PATTERN (XEXP (links
, 0))) == SET
1210 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (XEXP (links
, 0))))
1211 && (prev
= prev_nonnote_insn (XEXP (links
, 0))) != 0
1212 && NONJUMP_INSN_P (prev
)
1213 && sets_cc0_p (PATTERN (prev
))
1214 && (next
= try_combine (insn
, XEXP (links
, 0),
1215 prev
, &new_direct_jump_p
)) != 0)
1219 /* Try combining an insn with two different insns whose results it
1221 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
1222 for (nextlinks
= XEXP (links
, 1); nextlinks
;
1223 nextlinks
= XEXP (nextlinks
, 1))
1224 if ((next
= try_combine (insn
, XEXP (links
, 0),
1225 XEXP (nextlinks
, 0),
1226 &new_direct_jump_p
)) != 0)
1229 /* Try this insn with each REG_EQUAL note it links back to. */
1230 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
1233 rtx temp
= XEXP (links
, 0);
1234 if ((set
= single_set (temp
)) != 0
1235 && (note
= find_reg_equal_equiv_note (temp
)) != 0
1236 && (note
= XEXP (note
, 0), GET_CODE (note
)) != EXPR_LIST
1237 /* Avoid using a register that may already been marked
1238 dead by an earlier instruction. */
1239 && ! unmentioned_reg_p (note
, SET_SRC (set
))
1240 && (GET_MODE (note
) == VOIDmode
1241 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set
)))
1242 : GET_MODE (SET_DEST (set
)) == GET_MODE (note
)))
1244 /* Temporarily replace the set's source with the
1245 contents of the REG_EQUAL note. The insn will
1246 be deleted or recognized by try_combine. */
1247 rtx orig
= SET_SRC (set
);
1248 SET_SRC (set
) = note
;
1250 i2mod_old_rhs
= copy_rtx (orig
);
1251 i2mod_new_rhs
= copy_rtx (note
);
1252 next
= try_combine (insn
, i2mod
, NULL_RTX
,
1253 &new_direct_jump_p
);
1257 SET_SRC (set
) = orig
;
1262 record_dead_and_set_regs (insn
);
1267 else if (LABEL_P (insn
))
1268 label_tick_ebb_start
= label_tick
;
1274 new_direct_jump_p
|= purge_all_dead_edges ();
1275 delete_noop_moves ();
1278 free (uid_log_links
);
1279 free (uid_insn_cost
);
1280 VEC_free (reg_stat_type
, heap
, reg_stat
);
1283 struct undo
*undo
, *next
;
1284 for (undo
= undobuf
.frees
; undo
; undo
= next
)
1292 total_attempts
+= combine_attempts
;
1293 total_merges
+= combine_merges
;
1294 total_extras
+= combine_extras
;
1295 total_successes
+= combine_successes
;
1297 nonzero_sign_valid
= 0;
1298 rtl_hooks
= general_rtl_hooks
;
1300 /* Make recognizer allow volatile MEMs again. */
1303 return new_direct_jump_p
;
1306 /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1309 init_reg_last (void)
1314 for (i
= 0; VEC_iterate (reg_stat_type
, reg_stat
, i
, p
); ++i
)
1315 memset (p
, 0, offsetof (reg_stat_type
, sign_bit_copies
));
1318 /* Set up any promoted values for incoming argument registers. */
1321 setup_incoming_promotions (rtx first
)
1324 bool strictly_local
= false;
1326 if (!targetm
.calls
.promote_function_args (TREE_TYPE (cfun
->decl
)))
1329 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
1330 arg
= TREE_CHAIN (arg
))
1332 rtx reg
= DECL_INCOMING_RTL (arg
);
1334 enum machine_mode mode1
, mode2
, mode3
, mode4
;
1336 /* Only continue if the incoming argument is in a register. */
1340 /* Determine, if possible, whether all call sites of the current
1341 function lie within the current compilation unit. (This does
1342 take into account the exporting of a function via taking its
1343 address, and so forth.) */
1344 if (flag_unit_at_a_time
)
1345 strictly_local
= cgraph_local_info (current_function_decl
)->local
;
1347 /* The mode and signedness of the argument before any promotions happen
1348 (equal to the mode of the pseudo holding it at that stage). */
1349 mode1
= TYPE_MODE (TREE_TYPE (arg
));
1350 uns1
= TYPE_UNSIGNED (TREE_TYPE (arg
));
1352 /* The mode and signedness of the argument after any source language and
1353 TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1354 mode2
= TYPE_MODE (DECL_ARG_TYPE (arg
));
1355 uns3
= TYPE_UNSIGNED (DECL_ARG_TYPE (arg
));
1357 /* The mode and signedness of the argument as it is actually passed,
1358 after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions. */
1359 mode3
= promote_mode (DECL_ARG_TYPE (arg
), mode2
, &uns3
, 1);
1361 /* The mode of the register in which the argument is being passed. */
1362 mode4
= GET_MODE (reg
);
1364 /* Eliminate sign extensions in the callee when possible. Only
1366 (a) a mode promotion has occurred;
1367 (b) the mode of the register is the same as the mode of
1368 the argument as it is passed; and
1369 (c) the signedness does not change across any of the promotions; and
1370 (d) when no language-level promotions (which we cannot guarantee
1371 will have been done by an external caller) are necessary,
1372 unless we know that this function is only ever called from
1373 the current compilation unit -- all of whose call sites will
1374 do the mode1 --> mode2 promotion. */
1378 && (mode1
== mode2
|| strictly_local
))
1380 /* Record that the value was promoted from mode1 to mode3,
1381 so that any sign extension at the head of the current
1382 function may be eliminated. */
1384 x
= gen_rtx_CLOBBER (mode1
, const0_rtx
);
1385 x
= gen_rtx_fmt_e ((uns3
? ZERO_EXTEND
: SIGN_EXTEND
), mode3
, x
);
1386 record_value_for_reg (reg
, first
, x
);
1391 /* Called via note_stores. If X is a pseudo that is narrower than
1392 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1394 If we are setting only a portion of X and we can't figure out what
1395 portion, assume all bits will be used since we don't know what will
1398 Similarly, set how many bits of X are known to be copies of the sign bit
1399 at all locations in the function. This is the smallest number implied
1403 set_nonzero_bits_and_sign_copies (rtx x
, const_rtx set
, void *data
)
1405 rtx insn
= (rtx
) data
;
1409 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
1410 /* If this register is undefined at the start of the file, we can't
1411 say what its contents were. */
1412 && ! REGNO_REG_SET_P
1413 (DF_LR_IN (ENTRY_BLOCK_PTR
->next_bb
), REGNO (x
))
1414 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
)
1416 reg_stat_type
*rsp
= VEC_index (reg_stat_type
, reg_stat
, REGNO (x
));
1418 if (set
== 0 || GET_CODE (set
) == CLOBBER
)
1420 rsp
->nonzero_bits
= GET_MODE_MASK (GET_MODE (x
));
1421 rsp
->sign_bit_copies
= 1;
1425 /* If this register is being initialized using itself, and the
1426 register is uninitialized in this basic block, and there are
1427 no LOG_LINKS which set the register, then part of the
1428 register is uninitialized. In that case we can't assume
1429 anything about the number of nonzero bits.
1431 ??? We could do better if we checked this in
1432 reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1433 could avoid making assumptions about the insn which initially
1434 sets the register, while still using the information in other
1435 insns. We would have to be careful to check every insn
1436 involved in the combination. */
1439 && reg_referenced_p (x
, PATTERN (insn
))
1440 && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn
)),
1445 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
1447 if (dead_or_set_p (XEXP (link
, 0), x
))
1452 rsp
->nonzero_bits
= GET_MODE_MASK (GET_MODE (x
));
1453 rsp
->sign_bit_copies
= 1;
1458 /* If this is a complex assignment, see if we can convert it into a
1459 simple assignment. */
1460 set
= expand_field_assignment (set
);
1462 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1463 set what we know about X. */
1465 if (SET_DEST (set
) == x
1466 || (GET_CODE (SET_DEST (set
)) == SUBREG
1467 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set
)))
1468 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set
)))))
1469 && SUBREG_REG (SET_DEST (set
)) == x
))
1471 rtx src
= SET_SRC (set
);
1473 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1474 /* If X is narrower than a word and SRC is a non-negative
1475 constant that would appear negative in the mode of X,
1476 sign-extend it for use in reg_stat[].nonzero_bits because some
1477 machines (maybe most) will actually do the sign-extension
1478 and this is the conservative approach.
1480 ??? For 2.5, try to tighten up the MD files in this regard
1481 instead of this kludge. */
1483 if (GET_MODE_BITSIZE (GET_MODE (x
)) < BITS_PER_WORD
1484 && GET_CODE (src
) == CONST_INT
1486 && 0 != (INTVAL (src
)
1487 & ((HOST_WIDE_INT
) 1
1488 << (GET_MODE_BITSIZE (GET_MODE (x
)) - 1))))
1489 src
= GEN_INT (INTVAL (src
)
1490 | ((HOST_WIDE_INT
) (-1)
1491 << GET_MODE_BITSIZE (GET_MODE (x
))));
1494 /* Don't call nonzero_bits if it cannot change anything. */
1495 if (rsp
->nonzero_bits
!= ~(unsigned HOST_WIDE_INT
) 0)
1496 rsp
->nonzero_bits
|= nonzero_bits (src
, nonzero_bits_mode
);
1497 num
= num_sign_bit_copies (SET_SRC (set
), GET_MODE (x
));
1498 if (rsp
->sign_bit_copies
== 0
1499 || rsp
->sign_bit_copies
> num
)
1500 rsp
->sign_bit_copies
= num
;
1504 rsp
->nonzero_bits
= GET_MODE_MASK (GET_MODE (x
));
1505 rsp
->sign_bit_copies
= 1;
1510 /* See if INSN can be combined into I3. PRED and SUCC are optionally
1511 insns that were previously combined into I3 or that will be combined
1512 into the merger of INSN and I3.
1514 Return 0 if the combination is not allowed for any reason.
1516 If the combination is allowed, *PDEST will be set to the single
1517 destination of INSN and *PSRC to the single source, and this function
1521 can_combine_p (rtx insn
, rtx i3
, rtx pred ATTRIBUTE_UNUSED
, rtx succ
,
1522 rtx
*pdest
, rtx
*psrc
)
1531 int all_adjacent
= (succ
? (next_active_insn (insn
) == succ
1532 && next_active_insn (succ
) == i3
)
1533 : next_active_insn (insn
) == i3
);
1535 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1536 or a PARALLEL consisting of such a SET and CLOBBERs.
1538 If INSN has CLOBBER parallel parts, ignore them for our processing.
1539 By definition, these happen during the execution of the insn. When it
1540 is merged with another insn, all bets are off. If they are, in fact,
1541 needed and aren't also supplied in I3, they may be added by
1542 recog_for_combine. Otherwise, it won't match.
1544 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1547 Get the source and destination of INSN. If more than one, can't
1550 if (GET_CODE (PATTERN (insn
)) == SET
)
1551 set
= PATTERN (insn
);
1552 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
1553 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1555 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1557 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
1560 switch (GET_CODE (elt
))
1562 /* This is important to combine floating point insns
1563 for the SH4 port. */
1565 /* Combining an isolated USE doesn't make sense.
1566 We depend here on combinable_i3pat to reject them. */
1567 /* The code below this loop only verifies that the inputs of
1568 the SET in INSN do not change. We call reg_set_between_p
1569 to verify that the REG in the USE does not change between
1571 If the USE in INSN was for a pseudo register, the matching
1572 insn pattern will likely match any register; combining this
1573 with any other USE would only be safe if we knew that the
1574 used registers have identical values, or if there was
1575 something to tell them apart, e.g. different modes. For
1576 now, we forgo such complicated tests and simply disallow
1577 combining of USES of pseudo registers with any other USE. */
1578 if (REG_P (XEXP (elt
, 0))
1579 && GET_CODE (PATTERN (i3
)) == PARALLEL
)
1581 rtx i3pat
= PATTERN (i3
);
1582 int i
= XVECLEN (i3pat
, 0) - 1;
1583 unsigned int regno
= REGNO (XEXP (elt
, 0));
1587 rtx i3elt
= XVECEXP (i3pat
, 0, i
);
1589 if (GET_CODE (i3elt
) == USE
1590 && REG_P (XEXP (i3elt
, 0))
1591 && (REGNO (XEXP (i3elt
, 0)) == regno
1592 ? reg_set_between_p (XEXP (elt
, 0),
1593 PREV_INSN (insn
), i3
)
1594 : regno
>= FIRST_PSEUDO_REGISTER
))
1601 /* We can ignore CLOBBERs. */
1606 /* Ignore SETs whose result isn't used but not those that
1607 have side-effects. */
1608 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (elt
))
1609 && (!(note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
))
1610 || INTVAL (XEXP (note
, 0)) <= 0)
1611 && ! side_effects_p (elt
))
1614 /* If we have already found a SET, this is a second one and
1615 so we cannot combine with this insn. */
1623 /* Anything else means we can't combine. */
1629 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1630 so don't do anything with it. */
1631 || GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
)
1640 set
= expand_field_assignment (set
);
1641 src
= SET_SRC (set
), dest
= SET_DEST (set
);
1643 /* Don't eliminate a store in the stack pointer. */
1644 if (dest
== stack_pointer_rtx
1645 /* Don't combine with an insn that sets a register to itself if it has
1646 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1647 || (rtx_equal_p (src
, dest
) && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1648 /* Can't merge an ASM_OPERANDS. */
1649 || GET_CODE (src
) == ASM_OPERANDS
1650 /* Can't merge a function call. */
1651 || GET_CODE (src
) == CALL
1652 /* Don't eliminate a function call argument. */
1654 && (find_reg_fusage (i3
, USE
, dest
)
1656 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
1657 && global_regs
[REGNO (dest
)])))
1658 /* Don't substitute into an incremented register. */
1659 || FIND_REG_INC_NOTE (i3
, dest
)
1660 || (succ
&& FIND_REG_INC_NOTE (succ
, dest
))
1661 /* Don't substitute into a non-local goto, this confuses CFG. */
1662 || (JUMP_P (i3
) && find_reg_note (i3
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
1664 /* Don't combine the end of a libcall into anything. */
1665 /* ??? This gives worse code, and appears to be unnecessary, since no
1666 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1667 use REG_RETVAL notes for noconflict blocks, but other code here
1668 makes sure that those insns don't disappear. */
1669 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
1671 /* Make sure that DEST is not used after SUCC but before I3. */
1672 || (succ
&& ! all_adjacent
1673 && reg_used_between_p (dest
, succ
, i3
))
1674 /* Make sure that the value that is to be substituted for the register
1675 does not use any registers whose values alter in between. However,
1676 If the insns are adjacent, a use can't cross a set even though we
1677 think it might (this can happen for a sequence of insns each setting
1678 the same destination; last_set of that register might point to
1679 a NOTE). If INSN has a REG_EQUIV note, the register is always
1680 equivalent to the memory so the substitution is valid even if there
1681 are intervening stores. Also, don't move a volatile asm or
1682 UNSPEC_VOLATILE across any other insns. */
1685 || ! find_reg_note (insn
, REG_EQUIV
, src
))
1686 && use_crosses_set_p (src
, DF_INSN_LUID (insn
)))
1687 || (GET_CODE (src
) == ASM_OPERANDS
&& MEM_VOLATILE_P (src
))
1688 || GET_CODE (src
) == UNSPEC_VOLATILE
))
1689 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1690 better register allocation by not doing the combine. */
1691 || find_reg_note (i3
, REG_NO_CONFLICT
, dest
)
1692 || (succ
&& find_reg_note (succ
, REG_NO_CONFLICT
, dest
))
1693 /* Don't combine across a CALL_INSN, because that would possibly
1694 change whether the life span of some REGs crosses calls or not,
1695 and it is a pain to update that information.
1696 Exception: if source is a constant, moving it later can't hurt.
1697 Accept that as a special case. */
1698 || (DF_INSN_LUID (insn
) < last_call_luid
&& ! CONSTANT_P (src
)))
1701 /* DEST must either be a REG or CC0. */
1704 /* If register alignment is being enforced for multi-word items in all
1705 cases except for parameters, it is possible to have a register copy
1706 insn referencing a hard register that is not allowed to contain the
1707 mode being copied and which would not be valid as an operand of most
1708 insns. Eliminate this problem by not combining with such an insn.
1710 Also, on some machines we don't want to extend the life of a hard
1714 && ((REGNO (dest
) < FIRST_PSEUDO_REGISTER
1715 && ! HARD_REGNO_MODE_OK (REGNO (dest
), GET_MODE (dest
)))
1716 /* Don't extend the life of a hard register unless it is
1717 user variable (if we have few registers) or it can't
1718 fit into the desired register (meaning something special
1720 Also avoid substituting a return register into I3, because
1721 reload can't handle a conflict with constraints of other
1723 || (REGNO (src
) < FIRST_PSEUDO_REGISTER
1724 && ! HARD_REGNO_MODE_OK (REGNO (src
), GET_MODE (src
)))))
1727 else if (GET_CODE (dest
) != CC0
)
1731 if (GET_CODE (PATTERN (i3
)) == PARALLEL
)
1732 for (i
= XVECLEN (PATTERN (i3
), 0) - 1; i
>= 0; i
--)
1733 if (GET_CODE (XVECEXP (PATTERN (i3
), 0, i
)) == CLOBBER
)
1735 /* Don't substitute for a register intended as a clobberable
1737 rtx reg
= XEXP (XVECEXP (PATTERN (i3
), 0, i
), 0);
1738 if (rtx_equal_p (reg
, dest
))
1741 /* If the clobber represents an earlyclobber operand, we must not
1742 substitute an expression containing the clobbered register.
1743 As we do not analyze the constraint strings here, we have to
1744 make the conservative assumption. However, if the register is
1745 a fixed hard reg, the clobber cannot represent any operand;
1746 we leave it up to the machine description to either accept or
1747 reject use-and-clobber patterns. */
1749 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
1750 || !fixed_regs
[REGNO (reg
)])
1751 if (reg_overlap_mentioned_p (reg
, src
))
1755 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1756 or not), reject, unless nothing volatile comes between it and I3 */
1758 if (GET_CODE (src
) == ASM_OPERANDS
|| volatile_refs_p (src
))
1760 /* Make sure succ doesn't contain a volatile reference. */
1761 if (succ
!= 0 && volatile_refs_p (PATTERN (succ
)))
1764 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
1765 if (INSN_P (p
) && p
!= succ
&& volatile_refs_p (PATTERN (p
)))
1769 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1770 to be an explicit register variable, and was chosen for a reason. */
1772 if (GET_CODE (src
) == ASM_OPERANDS
1773 && REG_P (dest
) && REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
1776 /* If there are any volatile insns between INSN and I3, reject, because
1777 they might affect machine state. */
1779 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
1780 if (INSN_P (p
) && p
!= succ
&& volatile_insn_p (PATTERN (p
)))
1783 /* If INSN contains an autoincrement or autodecrement, make sure that
1784 register is not used between there and I3, and not already used in
1785 I3 either. Neither must it be used in PRED or SUCC, if they exist.
1786 Also insist that I3 not be a jump; if it were one
1787 and the incremented register were spilled, we would lose. */
1790 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1791 if (REG_NOTE_KIND (link
) == REG_INC
1793 || reg_used_between_p (XEXP (link
, 0), insn
, i3
)
1794 || (pred
!= NULL_RTX
1795 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (pred
)))
1796 || (succ
!= NULL_RTX
1797 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (succ
)))
1798 || reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i3
))))
1803 /* Don't combine an insn that follows a CC0-setting insn.
1804 An insn that uses CC0 must not be separated from the one that sets it.
1805 We do, however, allow I2 to follow a CC0-setting insn if that insn
1806 is passed as I1; in that case it will be deleted also.
1807 We also allow combining in this case if all the insns are adjacent
1808 because that would leave the two CC0 insns adjacent as well.
1809 It would be more logical to test whether CC0 occurs inside I1 or I2,
1810 but that would be much slower, and this ought to be equivalent. */
1812 p
= prev_nonnote_insn (insn
);
1813 if (p
&& p
!= pred
&& NONJUMP_INSN_P (p
) && sets_cc0_p (PATTERN (p
))
1818 /* If we get here, we have passed all the tests and the combination is
1827 /* LOC is the location within I3 that contains its pattern or the component
1828 of a PARALLEL of the pattern. We validate that it is valid for combining.
1830 One problem is if I3 modifies its output, as opposed to replacing it
1831 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1832 so would produce an insn that is not equivalent to the original insns.
1836 (set (reg:DI 101) (reg:DI 100))
1837 (set (subreg:SI (reg:DI 101) 0) <foo>)
1839 This is NOT equivalent to:
1841 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1842 (set (reg:DI 101) (reg:DI 100))])
1844 Not only does this modify 100 (in which case it might still be valid
1845 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1847 We can also run into a problem if I2 sets a register that I1
1848 uses and I1 gets directly substituted into I3 (not via I2). In that
1849 case, we would be getting the wrong value of I2DEST into I3, so we
1850 must reject the combination. This case occurs when I2 and I1 both
1851 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1852 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1853 of a SET must prevent combination from occurring.
1855 Before doing the above check, we first try to expand a field assignment
1856 into a set of logical operations.
1858 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1859 we place a register that is both set and used within I3. If more than one
1860 such register is detected, we fail.
1862 Return 1 if the combination is valid, zero otherwise. */
1865 combinable_i3pat (rtx i3
, rtx
*loc
, rtx i2dest
, rtx i1dest
,
1866 int i1_not_in_src
, rtx
*pi3dest_killed
)
1870 if (GET_CODE (x
) == SET
)
1873 rtx dest
= SET_DEST (set
);
1874 rtx src
= SET_SRC (set
);
1875 rtx inner_dest
= dest
;
1878 while (GET_CODE (inner_dest
) == STRICT_LOW_PART
1879 || GET_CODE (inner_dest
) == SUBREG
1880 || GET_CODE (inner_dest
) == ZERO_EXTRACT
)
1881 inner_dest
= XEXP (inner_dest
, 0);
1883 /* Check for the case where I3 modifies its output, as discussed
1884 above. We don't want to prevent pseudos from being combined
1885 into the address of a MEM, so only prevent the combination if
1886 i1 or i2 set the same MEM. */
1887 if ((inner_dest
!= dest
&&
1888 (!MEM_P (inner_dest
)
1889 || rtx_equal_p (i2dest
, inner_dest
)
1890 || (i1dest
&& rtx_equal_p (i1dest
, inner_dest
)))
1891 && (reg_overlap_mentioned_p (i2dest
, inner_dest
)
1892 || (i1dest
&& reg_overlap_mentioned_p (i1dest
, inner_dest
))))
1894 /* This is the same test done in can_combine_p except we can't test
1895 all_adjacent; we don't have to, since this instruction will stay
1896 in place, thus we are not considering increasing the lifetime of
1899 Also, if this insn sets a function argument, combining it with
1900 something that might need a spill could clobber a previous
1901 function argument; the all_adjacent test in can_combine_p also
1902 checks this; here, we do a more specific test for this case. */
1904 || (REG_P (inner_dest
)
1905 && REGNO (inner_dest
) < FIRST_PSEUDO_REGISTER
1906 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest
),
1907 GET_MODE (inner_dest
))))
1908 || (i1_not_in_src
&& reg_overlap_mentioned_p (i1dest
, src
)))
1911 /* If DEST is used in I3, it is being killed in this insn, so
1912 record that for later. We have to consider paradoxical
1913 subregs here, since they kill the whole register, but we
1914 ignore partial subregs, STRICT_LOW_PART, etc.
1915 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1916 STACK_POINTER_REGNUM, since these are always considered to be
1917 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1919 if (GET_CODE (subdest
) == SUBREG
1920 && (GET_MODE_SIZE (GET_MODE (subdest
))
1921 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest
)))))
1922 subdest
= SUBREG_REG (subdest
);
1925 && reg_referenced_p (subdest
, PATTERN (i3
))
1926 && REGNO (subdest
) != FRAME_POINTER_REGNUM
1927 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1928 && REGNO (subdest
) != HARD_FRAME_POINTER_REGNUM
1930 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1931 && (REGNO (subdest
) != ARG_POINTER_REGNUM
1932 || ! fixed_regs
[REGNO (subdest
)])
1934 && REGNO (subdest
) != STACK_POINTER_REGNUM
)
1936 if (*pi3dest_killed
)
1939 *pi3dest_killed
= subdest
;
1943 else if (GET_CODE (x
) == PARALLEL
)
1947 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
1948 if (! combinable_i3pat (i3
, &XVECEXP (x
, 0, i
), i2dest
, i1dest
,
1949 i1_not_in_src
, pi3dest_killed
))
1956 /* Return 1 if X is an arithmetic expression that contains a multiplication
1957 and division. We don't count multiplications by powers of two here. */
1960 contains_muldiv (rtx x
)
1962 switch (GET_CODE (x
))
1964 case MOD
: case DIV
: case UMOD
: case UDIV
:
1968 return ! (GET_CODE (XEXP (x
, 1)) == CONST_INT
1969 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0);
1972 return contains_muldiv (XEXP (x
, 0))
1973 || contains_muldiv (XEXP (x
, 1));
1976 return contains_muldiv (XEXP (x
, 0));
1982 /* Determine whether INSN can be used in a combination. Return nonzero if
1983 not. This is used in try_combine to detect early some cases where we
1984 can't perform combinations. */
1987 cant_combine_insn_p (rtx insn
)
1992 /* If this isn't really an insn, we can't do anything.
1993 This can occur when flow deletes an insn that it has merged into an
1994 auto-increment address. */
1995 if (! INSN_P (insn
))
1998 /* Never combine loads and stores involving hard regs that are likely
1999 to be spilled. The register allocator can usually handle such
2000 reg-reg moves by tying. If we allow the combiner to make
2001 substitutions of likely-spilled regs, reload might die.
2002 As an exception, we allow combinations involving fixed regs; these are
2003 not available to the register allocator so there's no risk involved. */
2005 set
= single_set (insn
);
2008 src
= SET_SRC (set
);
2009 dest
= SET_DEST (set
);
2010 if (GET_CODE (src
) == SUBREG
)
2011 src
= SUBREG_REG (src
);
2012 if (GET_CODE (dest
) == SUBREG
)
2013 dest
= SUBREG_REG (dest
);
2014 if (REG_P (src
) && REG_P (dest
)
2015 && ((REGNO (src
) < FIRST_PSEUDO_REGISTER
2016 && ! fixed_regs
[REGNO (src
)]
2017 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src
))))
2018 || (REGNO (dest
) < FIRST_PSEUDO_REGISTER
2019 && ! fixed_regs
[REGNO (dest
)]
2020 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest
))))))
2026 struct likely_spilled_retval_info
2028 unsigned regno
, nregs
;
2032 /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2033 hard registers that are known to be written to / clobbered in full. */
2035 likely_spilled_retval_1 (rtx x
, const_rtx set
, void *data
)
2037 struct likely_spilled_retval_info
*info
= data
;
2038 unsigned regno
, nregs
;
2041 if (!REG_P (XEXP (set
, 0)))
2044 if (regno
>= info
->regno
+ info
->nregs
)
2046 nregs
= hard_regno_nregs
[regno
][GET_MODE (x
)];
2047 if (regno
+ nregs
<= info
->regno
)
2049 new_mask
= (2U << (nregs
- 1)) - 1;
2050 if (regno
< info
->regno
)
2051 new_mask
>>= info
->regno
- regno
;
2053 new_mask
<<= regno
- info
->regno
;
2054 info
->mask
&= ~new_mask
;
2057 /* Return nonzero iff part of the return value is live during INSN, and
2058 it is likely spilled. This can happen when more than one insn is needed
2059 to copy the return value, e.g. when we consider to combine into the
2060 second copy insn for a complex value. */
2063 likely_spilled_retval_p (rtx insn
)
2065 rtx use
= BB_END (this_basic_block
);
2067 unsigned regno
, nregs
;
2068 /* We assume here that no machine mode needs more than
2069 32 hard registers when the value overlaps with a register
2070 for which FUNCTION_VALUE_REGNO_P is true. */
2072 struct likely_spilled_retval_info info
;
2074 if (!NONJUMP_INSN_P (use
) || GET_CODE (PATTERN (use
)) != USE
|| insn
== use
)
2076 reg
= XEXP (PATTERN (use
), 0);
2077 if (!REG_P (reg
) || !FUNCTION_VALUE_REGNO_P (REGNO (reg
)))
2079 regno
= REGNO (reg
);
2080 nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2083 mask
= (2U << (nregs
- 1)) - 1;
2085 /* Disregard parts of the return value that are set later. */
2089 for (p
= PREV_INSN (use
); info
.mask
&& p
!= insn
; p
= PREV_INSN (p
))
2091 note_stores (PATTERN (p
), likely_spilled_retval_1
, &info
);
2094 /* Check if any of the (probably) live return value registers is
2099 if ((mask
& 1 << nregs
)
2100 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno
+ nregs
)))
2106 /* Adjust INSN after we made a change to its destination.
2108 Changing the destination can invalidate notes that say something about
2109 the results of the insn and a LOG_LINK pointing to the insn. */
2112 adjust_for_new_dest (rtx insn
)
2114 /* For notes, be conservative and simply remove them. */
2115 remove_reg_equal_equiv_notes (insn
);
2117 /* The new insn will have a destination that was previously the destination
2118 of an insn just above it. Call distribute_links to make a LOG_LINK from
2119 the next use of that destination. */
2120 distribute_links (gen_rtx_INSN_LIST (VOIDmode
, insn
, NULL_RTX
));
2122 df_insn_rescan (insn
);
2125 /* Return TRUE if combine can reuse reg X in mode MODE.
2126 ADDED_SETS is nonzero if the original set is still required. */
2128 can_change_dest_mode (rtx x
, int added_sets
, enum machine_mode mode
)
2136 /* Allow hard registers if the new mode is legal, and occupies no more
2137 registers than the old mode. */
2138 if (regno
< FIRST_PSEUDO_REGISTER
)
2139 return (HARD_REGNO_MODE_OK (regno
, mode
)
2140 && (hard_regno_nregs
[regno
][GET_MODE (x
)]
2141 >= hard_regno_nregs
[regno
][mode
]));
2143 /* Or a pseudo that is only used once. */
2144 return (REG_N_SETS (regno
) == 1 && !added_sets
2145 && !REG_USERVAR_P (x
));
2149 /* Check whether X, the destination of a set, refers to part of
2150 the register specified by REG. */
2153 reg_subword_p (rtx x
, rtx reg
)
2155 /* Check that reg is an integer mode register. */
2156 if (!REG_P (reg
) || GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
2159 if (GET_CODE (x
) == STRICT_LOW_PART
2160 || GET_CODE (x
) == ZERO_EXTRACT
)
2163 return GET_CODE (x
) == SUBREG
2164 && SUBREG_REG (x
) == reg
2165 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
;
2169 /* Try to combine the insns I1 and I2 into I3.
2170 Here I1 and I2 appear earlier than I3.
2171 I1 can be zero; then we combine just I2 into I3.
2173 If we are combining three insns and the resulting insn is not recognized,
2174 try splitting it into two insns. If that happens, I2 and I3 are retained
2175 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
2178 Return 0 if the combination does not work. Then nothing is changed.
2179 If we did the combination, return the insn at which combine should
2182 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2183 new direct jump instruction. */
2186 try_combine (rtx i3
, rtx i2
, rtx i1
, int *new_direct_jump_p
)
2188 /* New patterns for I3 and I2, respectively. */
2189 rtx newpat
, newi2pat
= 0;
2190 rtvec newpat_vec_with_clobbers
= 0;
2191 int substed_i2
= 0, substed_i1
= 0;
2192 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
2193 int added_sets_1
, added_sets_2
;
2194 /* Total number of SETs to put into I3. */
2196 /* Nonzero if I2's body now appears in I3. */
2198 /* INSN_CODEs for new I3, new I2, and user of condition code. */
2199 int insn_code_number
, i2_code_number
= 0, other_code_number
= 0;
2200 /* Contains I3 if the destination of I3 is used in its source, which means
2201 that the old life of I3 is being killed. If that usage is placed into
2202 I2 and not in I3, a REG_DEAD note must be made. */
2203 rtx i3dest_killed
= 0;
2204 /* SET_DEST and SET_SRC of I2 and I1. */
2205 rtx i2dest
, i2src
, i1dest
= 0, i1src
= 0;
2206 /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases. */
2207 rtx i1pat
= 0, i2pat
= 0;
2208 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
2209 int i2dest_in_i2src
= 0, i1dest_in_i1src
= 0, i2dest_in_i1src
= 0;
2210 int i2dest_killed
= 0, i1dest_killed
= 0;
2211 int i1_feeds_i3
= 0;
2212 /* Notes that must be added to REG_NOTES in I3 and I2. */
2213 rtx new_i3_notes
, new_i2_notes
;
2214 /* Notes that we substituted I3 into I2 instead of the normal case. */
2215 int i3_subst_into_i2
= 0;
2216 /* Notes that I1, I2 or I3 is a MULT operation. */
2224 rtx new_other_notes
;
2227 /* Exit early if one of the insns involved can't be used for
2229 if (cant_combine_insn_p (i3
)
2230 || cant_combine_insn_p (i2
)
2231 || (i1
&& cant_combine_insn_p (i1
))
2232 || likely_spilled_retval_p (i3
)
2233 /* We also can't do anything if I3 has a
2234 REG_LIBCALL note since we don't want to disrupt the contiguity of a
2237 /* ??? This gives worse code, and appears to be unnecessary, since no
2238 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
2239 || find_reg_note (i3
, REG_LIBCALL
, NULL_RTX
)
2245 undobuf
.other_insn
= 0;
2247 /* Reset the hard register usage information. */
2248 CLEAR_HARD_REG_SET (newpat_used_regs
);
2250 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
2251 code below, set I1 to be the earlier of the two insns. */
2252 if (i1
&& DF_INSN_LUID (i1
) > DF_INSN_LUID (i2
))
2253 temp
= i1
, i1
= i2
, i2
= temp
;
2255 added_links_insn
= 0;
2257 /* First check for one important special-case that the code below will
2258 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
2259 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2260 we may be able to replace that destination with the destination of I3.
2261 This occurs in the common code where we compute both a quotient and
2262 remainder into a structure, in which case we want to do the computation
2263 directly into the structure to avoid register-register copies.
2265 Note that this case handles both multiple sets in I2 and also
2266 cases where I2 has a number of CLOBBER or PARALLELs.
2268 We make very conservative checks below and only try to handle the
2269 most common cases of this. For example, we only handle the case
2270 where I2 and I3 are adjacent to avoid making difficult register
2273 if (i1
== 0 && NONJUMP_INSN_P (i3
) && GET_CODE (PATTERN (i3
)) == SET
2274 && REG_P (SET_SRC (PATTERN (i3
)))
2275 && REGNO (SET_SRC (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
2276 && find_reg_note (i3
, REG_DEAD
, SET_SRC (PATTERN (i3
)))
2277 && GET_CODE (PATTERN (i2
)) == PARALLEL
2278 && ! side_effects_p (SET_DEST (PATTERN (i3
)))
2279 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2280 below would need to check what is inside (and reg_overlap_mentioned_p
2281 doesn't support those codes anyway). Don't allow those destinations;
2282 the resulting insn isn't likely to be recognized anyway. */
2283 && GET_CODE (SET_DEST (PATTERN (i3
))) != ZERO_EXTRACT
2284 && GET_CODE (SET_DEST (PATTERN (i3
))) != STRICT_LOW_PART
2285 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3
)),
2286 SET_DEST (PATTERN (i3
)))
2287 && next_real_insn (i2
) == i3
)
2289 rtx p2
= PATTERN (i2
);
2291 /* Make sure that the destination of I3,
2292 which we are going to substitute into one output of I2,
2293 is not used within another output of I2. We must avoid making this:
2294 (parallel [(set (mem (reg 69)) ...)
2295 (set (reg 69) ...)])
2296 which is not well-defined as to order of actions.
2297 (Besides, reload can't handle output reloads for this.)
2299 The problem can also happen if the dest of I3 is a memory ref,
2300 if another dest in I2 is an indirect memory ref. */
2301 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
2302 if ((GET_CODE (XVECEXP (p2
, 0, i
)) == SET
2303 || GET_CODE (XVECEXP (p2
, 0, i
)) == CLOBBER
)
2304 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3
)),
2305 SET_DEST (XVECEXP (p2
, 0, i
))))
2308 if (i
== XVECLEN (p2
, 0))
2309 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
2310 if ((GET_CODE (XVECEXP (p2
, 0, i
)) == SET
2311 || GET_CODE (XVECEXP (p2
, 0, i
)) == CLOBBER
)
2312 && SET_DEST (XVECEXP (p2
, 0, i
)) == SET_SRC (PATTERN (i3
)))
2317 subst_low_luid
= DF_INSN_LUID (i2
);
2319 added_sets_2
= added_sets_1
= 0;
2320 i2dest
= SET_SRC (PATTERN (i3
));
2321 i2dest_killed
= dead_or_set_p (i2
, i2dest
);
2323 /* Replace the dest in I2 with our dest and make the resulting
2324 insn the new pattern for I3. Then skip to where we
2325 validate the pattern. Everything was set up above. */
2326 SUBST (SET_DEST (XVECEXP (p2
, 0, i
)),
2327 SET_DEST (PATTERN (i3
)));
2330 i3_subst_into_i2
= 1;
2331 goto validate_replacement
;
2335 /* If I2 is setting a pseudo to a constant and I3 is setting some
2336 sub-part of it to another constant, merge them by making a new
2339 && (temp
= single_set (i2
)) != 0
2340 && (GET_CODE (SET_SRC (temp
)) == CONST_INT
2341 || GET_CODE (SET_SRC (temp
)) == CONST_DOUBLE
)
2342 && GET_CODE (PATTERN (i3
)) == SET
2343 && (GET_CODE (SET_SRC (PATTERN (i3
))) == CONST_INT
2344 || GET_CODE (SET_SRC (PATTERN (i3
))) == CONST_DOUBLE
)
2345 && reg_subword_p (SET_DEST (PATTERN (i3
)), SET_DEST (temp
)))
2347 rtx dest
= SET_DEST (PATTERN (i3
));
2351 if (GET_CODE (dest
) == ZERO_EXTRACT
)
2353 if (GET_CODE (XEXP (dest
, 1)) == CONST_INT
2354 && GET_CODE (XEXP (dest
, 2)) == CONST_INT
)
2356 width
= INTVAL (XEXP (dest
, 1));
2357 offset
= INTVAL (XEXP (dest
, 2));
2358 dest
= XEXP (dest
, 0);
2359 if (BITS_BIG_ENDIAN
)
2360 offset
= GET_MODE_BITSIZE (GET_MODE (dest
)) - width
- offset
;
2365 if (GET_CODE (dest
) == STRICT_LOW_PART
)
2366 dest
= XEXP (dest
, 0);
2367 width
= GET_MODE_BITSIZE (GET_MODE (dest
));
2373 /* If this is the low part, we're done. */
2374 if (subreg_lowpart_p (dest
))
2376 /* Handle the case where inner is twice the size of outer. */
2377 else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp
)))
2378 == 2 * GET_MODE_BITSIZE (GET_MODE (dest
)))
2379 offset
+= GET_MODE_BITSIZE (GET_MODE (dest
));
2380 /* Otherwise give up for now. */
2386 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp
)))
2387 <= HOST_BITS_PER_WIDE_INT
* 2))
2389 HOST_WIDE_INT mhi
, ohi
, ihi
;
2390 HOST_WIDE_INT mlo
, olo
, ilo
;
2391 rtx inner
= SET_SRC (PATTERN (i3
));
2392 rtx outer
= SET_SRC (temp
);
2394 if (GET_CODE (outer
) == CONST_INT
)
2396 olo
= INTVAL (outer
);
2397 ohi
= olo
< 0 ? -1 : 0;
2401 olo
= CONST_DOUBLE_LOW (outer
);
2402 ohi
= CONST_DOUBLE_HIGH (outer
);
2405 if (GET_CODE (inner
) == CONST_INT
)
2407 ilo
= INTVAL (inner
);
2408 ihi
= ilo
< 0 ? -1 : 0;
2412 ilo
= CONST_DOUBLE_LOW (inner
);
2413 ihi
= CONST_DOUBLE_HIGH (inner
);
2416 if (width
< HOST_BITS_PER_WIDE_INT
)
2418 mlo
= ((unsigned HOST_WIDE_INT
) 1 << width
) - 1;
2421 else if (width
< HOST_BITS_PER_WIDE_INT
* 2)
2423 mhi
= ((unsigned HOST_WIDE_INT
) 1
2424 << (width
- HOST_BITS_PER_WIDE_INT
)) - 1;
2436 if (offset
>= HOST_BITS_PER_WIDE_INT
)
2438 mhi
= mlo
<< (offset
- HOST_BITS_PER_WIDE_INT
);
2440 ihi
= ilo
<< (offset
- HOST_BITS_PER_WIDE_INT
);
2443 else if (offset
> 0)
2445 mhi
= (mhi
<< offset
) | ((unsigned HOST_WIDE_INT
) mlo
2446 >> (HOST_BITS_PER_WIDE_INT
- offset
));
2447 mlo
= mlo
<< offset
;
2448 ihi
= (ihi
<< offset
) | ((unsigned HOST_WIDE_INT
) ilo
2449 >> (HOST_BITS_PER_WIDE_INT
- offset
));
2450 ilo
= ilo
<< offset
;
2453 olo
= (olo
& ~mlo
) | ilo
;
2454 ohi
= (ohi
& ~mhi
) | ihi
;
2458 subst_low_luid
= DF_INSN_LUID (i2
);
2459 added_sets_2
= added_sets_1
= 0;
2460 i2dest
= SET_DEST (temp
);
2461 i2dest_killed
= dead_or_set_p (i2
, i2dest
);
2463 SUBST (SET_SRC (temp
),
2464 immed_double_const (olo
, ohi
, GET_MODE (SET_DEST (temp
))));
2466 newpat
= PATTERN (i2
);
2467 goto validate_replacement
;
2472 /* If we have no I1 and I2 looks like:
2473 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2475 make up a dummy I1 that is
2478 (set (reg:CC X) (compare:CC Y (const_int 0)))
2480 (We can ignore any trailing CLOBBERs.)
2482 This undoes a previous combination and allows us to match a branch-and-
2485 if (i1
== 0 && GET_CODE (PATTERN (i2
)) == PARALLEL
2486 && XVECLEN (PATTERN (i2
), 0) >= 2
2487 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 0)) == SET
2488 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 0))))
2490 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0))) == COMPARE
2491 && XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 1) == const0_rtx
2492 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 1)) == SET
2493 && REG_P (SET_DEST (XVECEXP (PATTERN (i2
), 0, 1)))
2494 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 0),
2495 SET_SRC (XVECEXP (PATTERN (i2
), 0, 1))))
2497 for (i
= XVECLEN (PATTERN (i2
), 0) - 1; i
>= 2; i
--)
2498 if (GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) != CLOBBER
)
2503 /* We make I1 with the same INSN_UID as I2. This gives it
2504 the same DF_INSN_LUID for value tracking. Our fake I1 will
2505 never appear in the insn stream so giving it the same INSN_UID
2506 as I2 will not cause a problem. */
2508 i1
= gen_rtx_INSN (VOIDmode
, INSN_UID (i2
), NULL_RTX
, i2
,
2509 BLOCK_FOR_INSN (i2
), INSN_LOCATOR (i2
),
2510 XVECEXP (PATTERN (i2
), 0, 1), -1, NULL_RTX
);
2512 SUBST (PATTERN (i2
), XVECEXP (PATTERN (i2
), 0, 0));
2513 SUBST (XEXP (SET_SRC (PATTERN (i2
)), 0),
2514 SET_DEST (PATTERN (i1
)));
2519 /* Verify that I2 and I1 are valid for combining. */
2520 if (! can_combine_p (i2
, i3
, i1
, NULL_RTX
, &i2dest
, &i2src
)
2521 || (i1
&& ! can_combine_p (i1
, i3
, NULL_RTX
, i2
, &i1dest
, &i1src
)))
2527 /* Record whether I2DEST is used in I2SRC and similarly for the other
2528 cases. Knowing this will help in register status updating below. */
2529 i2dest_in_i2src
= reg_overlap_mentioned_p (i2dest
, i2src
);
2530 i1dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i1dest
, i1src
);
2531 i2dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i2dest
, i1src
);
2532 i2dest_killed
= dead_or_set_p (i2
, i2dest
);
2533 i1dest_killed
= i1
&& dead_or_set_p (i1
, i1dest
);
2535 /* See if I1 directly feeds into I3. It does if I1DEST is not used
2537 i1_feeds_i3
= i1
&& ! reg_overlap_mentioned_p (i1dest
, i2src
);
2539 /* Ensure that I3's pattern can be the destination of combines. */
2540 if (! combinable_i3pat (i3
, &PATTERN (i3
), i2dest
, i1dest
,
2541 i1
&& i2dest_in_i1src
&& i1_feeds_i3
,
2548 /* See if any of the insns is a MULT operation. Unless one is, we will
2549 reject a combination that is, since it must be slower. Be conservative
2551 if (GET_CODE (i2src
) == MULT
2552 || (i1
!= 0 && GET_CODE (i1src
) == MULT
)
2553 || (GET_CODE (PATTERN (i3
)) == SET
2554 && GET_CODE (SET_SRC (PATTERN (i3
))) == MULT
))
2557 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2558 We used to do this EXCEPT in one case: I3 has a post-inc in an
2559 output operand. However, that exception can give rise to insns like
2561 which is a famous insn on the PDP-11 where the value of r3 used as the
2562 source was model-dependent. Avoid this sort of thing. */
2565 if (!(GET_CODE (PATTERN (i3
)) == SET
2566 && REG_P (SET_SRC (PATTERN (i3
)))
2567 && MEM_P (SET_DEST (PATTERN (i3
)))
2568 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_INC
2569 || GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_DEC
)))
2570 /* It's not the exception. */
2573 for (link
= REG_NOTES (i3
); link
; link
= XEXP (link
, 1))
2574 if (REG_NOTE_KIND (link
) == REG_INC
2575 && (reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i2
))
2577 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i1
)))))
2584 /* See if the SETs in I1 or I2 need to be kept around in the merged
2585 instruction: whenever the value set there is still needed past I3.
2586 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2588 For the SET in I1, we have two cases: If I1 and I2 independently
2589 feed into I3, the set in I1 needs to be kept around if I1DEST dies
2590 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
2591 in I1 needs to be kept around unless I1DEST dies or is set in either
2592 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
2593 I1DEST. If so, we know I1 feeds into I2. */
2595 added_sets_2
= ! dead_or_set_p (i3
, i2dest
);
2598 = i1
&& ! (i1_feeds_i3
? dead_or_set_p (i3
, i1dest
)
2599 : (dead_or_set_p (i3
, i1dest
) || dead_or_set_p (i2
, i1dest
)));
2601 /* If the set in I2 needs to be kept around, we must make a copy of
2602 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2603 PATTERN (I2), we are only substituting for the original I1DEST, not into
2604 an already-substituted copy. This also prevents making self-referential
2605 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2610 if (GET_CODE (PATTERN (i2
)) == PARALLEL
)
2611 i2pat
= gen_rtx_SET (VOIDmode
, i2dest
, copy_rtx (i2src
));
2613 i2pat
= copy_rtx (PATTERN (i2
));
2618 if (GET_CODE (PATTERN (i1
)) == PARALLEL
)
2619 i1pat
= gen_rtx_SET (VOIDmode
, i1dest
, copy_rtx (i1src
));
2621 i1pat
= copy_rtx (PATTERN (i1
));
2626 /* Substitute in the latest insn for the regs set by the earlier ones. */
2628 maxreg
= max_reg_num ();
2633 /* Many machines that don't use CC0 have insns that can both perform an
2634 arithmetic operation and set the condition code. These operations will
2635 be represented as a PARALLEL with the first element of the vector
2636 being a COMPARE of an arithmetic operation with the constant zero.
2637 The second element of the vector will set some pseudo to the result
2638 of the same arithmetic operation. If we simplify the COMPARE, we won't
2639 match such a pattern and so will generate an extra insn. Here we test
2640 for this case, where both the comparison and the operation result are
2641 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2642 I2SRC. Later we will make the PARALLEL that contains I2. */
2644 if (i1
== 0 && added_sets_2
&& GET_CODE (PATTERN (i3
)) == SET
2645 && GET_CODE (SET_SRC (PATTERN (i3
))) == COMPARE
2646 && XEXP (SET_SRC (PATTERN (i3
)), 1) == const0_rtx
2647 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3
)), 0), i2dest
))
2649 #ifdef SELECT_CC_MODE
2651 enum machine_mode compare_mode
;
2654 newpat
= PATTERN (i3
);
2655 SUBST (XEXP (SET_SRC (newpat
), 0), i2src
);
2659 #ifdef SELECT_CC_MODE
2660 /* See if a COMPARE with the operand we substituted in should be done
2661 with the mode that is currently being used. If not, do the same
2662 processing we do in `subst' for a SET; namely, if the destination
2663 is used only once, try to replace it with a register of the proper
2664 mode and also replace the COMPARE. */
2665 if (undobuf
.other_insn
== 0
2666 && (cc_use
= find_single_use (SET_DEST (newpat
), i3
,
2667 &undobuf
.other_insn
))
2668 && ((compare_mode
= SELECT_CC_MODE (GET_CODE (*cc_use
),
2670 != GET_MODE (SET_DEST (newpat
))))
2672 if (can_change_dest_mode(SET_DEST (newpat
), added_sets_2
,
2675 unsigned int regno
= REGNO (SET_DEST (newpat
));
2678 if (regno
< FIRST_PSEUDO_REGISTER
)
2679 new_dest
= gen_rtx_REG (compare_mode
, regno
);
2682 SUBST_MODE (regno_reg_rtx
[regno
], compare_mode
);
2683 new_dest
= regno_reg_rtx
[regno
];
2686 SUBST (SET_DEST (newpat
), new_dest
);
2687 SUBST (XEXP (*cc_use
, 0), new_dest
);
2688 SUBST (SET_SRC (newpat
),
2689 gen_rtx_COMPARE (compare_mode
, i2src
, const0_rtx
));
2692 undobuf
.other_insn
= 0;
2699 /* It is possible that the source of I2 or I1 may be performing
2700 an unneeded operation, such as a ZERO_EXTEND of something
2701 that is known to have the high part zero. Handle that case
2702 by letting subst look at the innermost one of them.
2704 Another way to do this would be to have a function that tries
2705 to simplify a single insn instead of merging two or more
2706 insns. We don't do this because of the potential of infinite
2707 loops and because of the potential extra memory required.
2708 However, doing it the way we are is a bit of a kludge and
2709 doesn't catch all cases.
2711 But only do this if -fexpensive-optimizations since it slows
2712 things down and doesn't usually win.
2714 This is not done in the COMPARE case above because the
2715 unmodified I2PAT is used in the PARALLEL and so a pattern
2716 with a modified I2SRC would not match. */
2718 if (flag_expensive_optimizations
)
2720 /* Pass pc_rtx so no substitutions are done, just
2724 subst_low_luid
= DF_INSN_LUID (i1
);
2725 i1src
= subst (i1src
, pc_rtx
, pc_rtx
, 0, 0);
2729 subst_low_luid
= DF_INSN_LUID (i2
);
2730 i2src
= subst (i2src
, pc_rtx
, pc_rtx
, 0, 0);
2734 n_occurrences
= 0; /* `subst' counts here */
2736 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2737 need to make a unique copy of I2SRC each time we substitute it
2738 to avoid self-referential rtl. */
2740 subst_low_luid
= DF_INSN_LUID (i2
);
2741 newpat
= subst (PATTERN (i3
), i2dest
, i2src
, 0,
2742 ! i1_feeds_i3
&& i1dest_in_i1src
);
2745 /* Record whether i2's body now appears within i3's body. */
2746 i2_is_used
= n_occurrences
;
2749 /* If we already got a failure, don't try to do more. Otherwise,
2750 try to substitute in I1 if we have it. */
2752 if (i1
&& GET_CODE (newpat
) != CLOBBER
)
2754 /* Check that an autoincrement side-effect on I1 has not been lost.
2755 This happens if I1DEST is mentioned in I2 and dies there, and
2756 has disappeared from the new pattern. */
2757 if ((FIND_REG_INC_NOTE (i1
, NULL_RTX
) != 0
2759 && dead_or_set_p (i2
, i1dest
)
2760 && !reg_overlap_mentioned_p (i1dest
, newpat
))
2761 /* Before we can do this substitution, we must redo the test done
2762 above (see detailed comments there) that ensures that I1DEST
2763 isn't mentioned in any SETs in NEWPAT that are field assignments. */
2764 || !combinable_i3pat (NULL_RTX
, &newpat
, i1dest
, NULL_RTX
, 0, 0))
2771 subst_low_luid
= DF_INSN_LUID (i1
);
2772 newpat
= subst (newpat
, i1dest
, i1src
, 0, 0);
2776 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2777 to count all the ways that I2SRC and I1SRC can be used. */
2778 if ((FIND_REG_INC_NOTE (i2
, NULL_RTX
) != 0
2779 && i2_is_used
+ added_sets_2
> 1)
2780 || (i1
!= 0 && FIND_REG_INC_NOTE (i1
, NULL_RTX
) != 0
2781 && (n_occurrences
+ added_sets_1
+ (added_sets_2
&& ! i1_feeds_i3
)
2783 /* Fail if we tried to make a new register. */
2784 || max_reg_num () != maxreg
2785 /* Fail if we couldn't do something and have a CLOBBER. */
2786 || GET_CODE (newpat
) == CLOBBER
2787 /* Fail if this new pattern is a MULT and we didn't have one before
2788 at the outer level. */
2789 || (GET_CODE (newpat
) == SET
&& GET_CODE (SET_SRC (newpat
)) == MULT
2796 /* If the actions of the earlier insns must be kept
2797 in addition to substituting them into the latest one,
2798 we must make a new PARALLEL for the latest insn
2799 to hold additional the SETs. */
2801 if (added_sets_1
|| added_sets_2
)
2805 if (GET_CODE (newpat
) == PARALLEL
)
2807 rtvec old
= XVEC (newpat
, 0);
2808 total_sets
= XVECLEN (newpat
, 0) + added_sets_1
+ added_sets_2
;
2809 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_sets
));
2810 memcpy (XVEC (newpat
, 0)->elem
, &old
->elem
[0],
2811 sizeof (old
->elem
[0]) * old
->num_elem
);
2816 total_sets
= 1 + added_sets_1
+ added_sets_2
;
2817 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_sets
));
2818 XVECEXP (newpat
, 0, 0) = old
;
2822 XVECEXP (newpat
, 0, --total_sets
) = i1pat
;
2826 /* If there is no I1, use I2's body as is. We used to also not do
2827 the subst call below if I2 was substituted into I3,
2828 but that could lose a simplification. */
2830 XVECEXP (newpat
, 0, --total_sets
) = i2pat
;
2832 /* See comment where i2pat is assigned. */
2833 XVECEXP (newpat
, 0, --total_sets
)
2834 = subst (i2pat
, i1dest
, i1src
, 0, 0);
2838 /* We come here when we are replacing a destination in I2 with the
2839 destination of I3. */
2840 validate_replacement
:
2842 /* Note which hard regs this insn has as inputs. */
2843 mark_used_regs_combine (newpat
);
2845 /* If recog_for_combine fails, it strips existing clobbers. If we'll
2846 consider splitting this pattern, we might need these clobbers. */
2847 if (i1
&& GET_CODE (newpat
) == PARALLEL
2848 && GET_CODE (XVECEXP (newpat
, 0, XVECLEN (newpat
, 0) - 1)) == CLOBBER
)
2850 int len
= XVECLEN (newpat
, 0);
2852 newpat_vec_with_clobbers
= rtvec_alloc (len
);
2853 for (i
= 0; i
< len
; i
++)
2854 RTVEC_ELT (newpat_vec_with_clobbers
, i
) = XVECEXP (newpat
, 0, i
);
2857 /* Is the result of combination a valid instruction? */
2858 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2860 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2861 the second SET's destination is a register that is unused and isn't
2862 marked as an instruction that might trap in an EH region. In that case,
2863 we just need the first SET. This can occur when simplifying a divmod
2864 insn. We *must* test for this case here because the code below that
2865 splits two independent SETs doesn't handle this case correctly when it
2866 updates the register status.
2868 It's pointless doing this if we originally had two sets, one from
2869 i3, and one from i2. Combining then splitting the parallel results
2870 in the original i2 again plus an invalid insn (which we delete).
2871 The net effect is only to move instructions around, which makes
2872 debug info less accurate.
2874 Also check the case where the first SET's destination is unused.
2875 That would not cause incorrect code, but does cause an unneeded
2878 if (insn_code_number
< 0
2879 && !(added_sets_2
&& i1
== 0)
2880 && GET_CODE (newpat
) == PARALLEL
2881 && XVECLEN (newpat
, 0) == 2
2882 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
2883 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
2884 && asm_noperands (newpat
) < 0)
2886 rtx set0
= XVECEXP (newpat
, 0, 0);
2887 rtx set1
= XVECEXP (newpat
, 0, 1);
2890 if (((REG_P (SET_DEST (set1
))
2891 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (set1
)))
2892 || (GET_CODE (SET_DEST (set1
)) == SUBREG
2893 && find_reg_note (i3
, REG_UNUSED
, SUBREG_REG (SET_DEST (set1
)))))
2894 && (!(note
= find_reg_note (i3
, REG_EH_REGION
, NULL_RTX
))
2895 || INTVAL (XEXP (note
, 0)) <= 0)
2896 && ! side_effects_p (SET_SRC (set1
)))
2899 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2902 else if (((REG_P (SET_DEST (set0
))
2903 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (set0
)))
2904 || (GET_CODE (SET_DEST (set0
)) == SUBREG
2905 && find_reg_note (i3
, REG_UNUSED
,
2906 SUBREG_REG (SET_DEST (set0
)))))
2907 && (!(note
= find_reg_note (i3
, REG_EH_REGION
, NULL_RTX
))
2908 || INTVAL (XEXP (note
, 0)) <= 0)
2909 && ! side_effects_p (SET_SRC (set0
)))
2912 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2914 if (insn_code_number
>= 0)
2916 /* If we will be able to accept this, we have made a
2917 change to the destination of I3. This requires us to
2918 do a few adjustments. */
2920 PATTERN (i3
) = newpat
;
2921 adjust_for_new_dest (i3
);
2926 /* If we were combining three insns and the result is a simple SET
2927 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2928 insns. There are two ways to do this. It can be split using a
2929 machine-specific method (like when you have an addition of a large
2930 constant) or by combine in the function find_split_point. */
2932 if (i1
&& insn_code_number
< 0 && GET_CODE (newpat
) == SET
2933 && asm_noperands (newpat
) < 0)
2935 rtx parallel
, m_split
, *split
;
2937 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2938 use I2DEST as a scratch register will help. In the latter case,
2939 convert I2DEST to the mode of the source of NEWPAT if we can. */
2941 m_split
= combine_split_insns (newpat
, i3
);
2943 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2944 inputs of NEWPAT. */
2946 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2947 possible to try that as a scratch reg. This would require adding
2948 more code to make it work though. */
2950 if (m_split
== 0 && ! reg_overlap_mentioned_p (i2dest
, newpat
))
2952 enum machine_mode new_mode
= GET_MODE (SET_DEST (newpat
));
2954 /* First try to split using the original register as a
2955 scratch register. */
2956 parallel
= gen_rtx_PARALLEL (VOIDmode
,
2957 gen_rtvec (2, newpat
,
2958 gen_rtx_CLOBBER (VOIDmode
,
2960 m_split
= combine_split_insns (parallel
, i3
);
2962 /* If that didn't work, try changing the mode of I2DEST if
2965 && new_mode
!= GET_MODE (i2dest
)
2966 && new_mode
!= VOIDmode
2967 && can_change_dest_mode (i2dest
, added_sets_2
, new_mode
))
2969 enum machine_mode old_mode
= GET_MODE (i2dest
);
2972 if (REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
)
2973 ni2dest
= gen_rtx_REG (new_mode
, REGNO (i2dest
));
2976 SUBST_MODE (regno_reg_rtx
[REGNO (i2dest
)], new_mode
);
2977 ni2dest
= regno_reg_rtx
[REGNO (i2dest
)];
2980 parallel
= (gen_rtx_PARALLEL
2982 gen_rtvec (2, newpat
,
2983 gen_rtx_CLOBBER (VOIDmode
,
2985 m_split
= combine_split_insns (parallel
, i3
);
2988 && REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
2992 adjust_reg_mode (regno_reg_rtx
[REGNO (i2dest
)], old_mode
);
2993 buf
= undobuf
.undos
;
2994 undobuf
.undos
= buf
->next
;
2995 buf
->next
= undobuf
.frees
;
2996 undobuf
.frees
= buf
;
3001 /* If recog_for_combine has discarded clobbers, try to use them
3002 again for the split. */
3003 if (m_split
== 0 && newpat_vec_with_clobbers
)
3005 parallel
= gen_rtx_PARALLEL (VOIDmode
, newpat_vec_with_clobbers
);
3006 m_split
= combine_split_insns (parallel
, i3
);
3009 if (m_split
&& NEXT_INSN (m_split
) == NULL_RTX
)
3011 m_split
= PATTERN (m_split
);
3012 insn_code_number
= recog_for_combine (&m_split
, i3
, &new_i3_notes
);
3013 if (insn_code_number
>= 0)
3016 else if (m_split
&& NEXT_INSN (NEXT_INSN (m_split
)) == NULL_RTX
3017 && (next_real_insn (i2
) == i3
3018 || ! use_crosses_set_p (PATTERN (m_split
), DF_INSN_LUID (i2
))))
3021 rtx newi3pat
= PATTERN (NEXT_INSN (m_split
));
3022 newi2pat
= PATTERN (m_split
);
3024 i3set
= single_set (NEXT_INSN (m_split
));
3025 i2set
= single_set (m_split
);
3027 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
3029 /* If I2 or I3 has multiple SETs, we won't know how to track
3030 register status, so don't use these insns. If I2's destination
3031 is used between I2 and I3, we also can't use these insns. */
3033 if (i2_code_number
>= 0 && i2set
&& i3set
3034 && (next_real_insn (i2
) == i3
3035 || ! reg_used_between_p (SET_DEST (i2set
), i2
, i3
)))
3036 insn_code_number
= recog_for_combine (&newi3pat
, i3
,
3038 if (insn_code_number
>= 0)
3041 /* It is possible that both insns now set the destination of I3.
3042 If so, we must show an extra use of it. */
3044 if (insn_code_number
>= 0)
3046 rtx new_i3_dest
= SET_DEST (i3set
);
3047 rtx new_i2_dest
= SET_DEST (i2set
);
3049 while (GET_CODE (new_i3_dest
) == ZERO_EXTRACT
3050 || GET_CODE (new_i3_dest
) == STRICT_LOW_PART
3051 || GET_CODE (new_i3_dest
) == SUBREG
)
3052 new_i3_dest
= XEXP (new_i3_dest
, 0);
3054 while (GET_CODE (new_i2_dest
) == ZERO_EXTRACT
3055 || GET_CODE (new_i2_dest
) == STRICT_LOW_PART
3056 || GET_CODE (new_i2_dest
) == SUBREG
)
3057 new_i2_dest
= XEXP (new_i2_dest
, 0);
3059 if (REG_P (new_i3_dest
)
3060 && REG_P (new_i2_dest
)
3061 && REGNO (new_i3_dest
) == REGNO (new_i2_dest
))
3062 INC_REG_N_SETS (REGNO (new_i2_dest
), 1);
3066 /* If we can split it and use I2DEST, go ahead and see if that
3067 helps things be recognized. Verify that none of the registers
3068 are set between I2 and I3. */
3069 if (insn_code_number
< 0 && (split
= find_split_point (&newpat
, i3
)) != 0
3073 /* We need I2DEST in the proper mode. If it is a hard register
3074 or the only use of a pseudo, we can change its mode.
3075 Make sure we don't change a hard register to have a mode that
3076 isn't valid for it, or change the number of registers. */
3077 && (GET_MODE (*split
) == GET_MODE (i2dest
)
3078 || GET_MODE (*split
) == VOIDmode
3079 || can_change_dest_mode (i2dest
, added_sets_2
,
3081 && (next_real_insn (i2
) == i3
3082 || ! use_crosses_set_p (*split
, DF_INSN_LUID (i2
)))
3083 /* We can't overwrite I2DEST if its value is still used by
3085 && ! reg_referenced_p (i2dest
, newpat
))
3087 rtx newdest
= i2dest
;
3088 enum rtx_code split_code
= GET_CODE (*split
);
3089 enum machine_mode split_mode
= GET_MODE (*split
);
3090 bool subst_done
= false;
3091 newi2pat
= NULL_RTX
;
3093 /* Get NEWDEST as a register in the proper mode. We have already
3094 validated that we can do this. */
3095 if (GET_MODE (i2dest
) != split_mode
&& split_mode
!= VOIDmode
)
3097 if (REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
)
3098 newdest
= gen_rtx_REG (split_mode
, REGNO (i2dest
));
3101 SUBST_MODE (regno_reg_rtx
[REGNO (i2dest
)], split_mode
);
3102 newdest
= regno_reg_rtx
[REGNO (i2dest
)];
3106 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3107 an ASHIFT. This can occur if it was inside a PLUS and hence
3108 appeared to be a memory address. This is a kludge. */
3109 if (split_code
== MULT
3110 && GET_CODE (XEXP (*split
, 1)) == CONST_INT
3111 && INTVAL (XEXP (*split
, 1)) > 0
3112 && (i
= exact_log2 (INTVAL (XEXP (*split
, 1)))) >= 0)
3114 SUBST (*split
, gen_rtx_ASHIFT (split_mode
,
3115 XEXP (*split
, 0), GEN_INT (i
)));
3116 /* Update split_code because we may not have a multiply
3118 split_code
= GET_CODE (*split
);
3121 #ifdef INSN_SCHEDULING
3122 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3123 be written as a ZERO_EXTEND. */
3124 if (split_code
== SUBREG
&& MEM_P (SUBREG_REG (*split
)))
3126 #ifdef LOAD_EXTEND_OP
3127 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3128 what it really is. */
3129 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split
)))
3131 SUBST (*split
, gen_rtx_SIGN_EXTEND (split_mode
,
3132 SUBREG_REG (*split
)));
3135 SUBST (*split
, gen_rtx_ZERO_EXTEND (split_mode
,
3136 SUBREG_REG (*split
)));
3140 /* Attempt to split binary operators using arithmetic identities. */
3141 if (BINARY_P (SET_SRC (newpat
))
3142 && split_mode
== GET_MODE (SET_SRC (newpat
))
3143 && ! side_effects_p (SET_SRC (newpat
)))
3145 rtx setsrc
= SET_SRC (newpat
);
3146 enum machine_mode mode
= GET_MODE (setsrc
);
3147 enum rtx_code code
= GET_CODE (setsrc
);
3148 rtx src_op0
= XEXP (setsrc
, 0);
3149 rtx src_op1
= XEXP (setsrc
, 1);
3151 /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3152 if (rtx_equal_p (src_op0
, src_op1
))
3154 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
, src_op0
);
3155 SUBST (XEXP (setsrc
, 0), newdest
);
3156 SUBST (XEXP (setsrc
, 1), newdest
);
3159 /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3160 else if ((code
== PLUS
|| code
== MULT
)
3161 && GET_CODE (src_op0
) == code
3162 && GET_CODE (XEXP (src_op0
, 0)) == code
3163 && (INTEGRAL_MODE_P (mode
)
3164 || (FLOAT_MODE_P (mode
)
3165 && flag_unsafe_math_optimizations
)))
3167 rtx p
= XEXP (XEXP (src_op0
, 0), 0);
3168 rtx q
= XEXP (XEXP (src_op0
, 0), 1);
3169 rtx r
= XEXP (src_op0
, 1);
3172 /* Split both "((X op Y) op X) op Y" and
3173 "((X op Y) op Y) op X" as "T op T" where T is
3175 if ((rtx_equal_p (p
,r
) && rtx_equal_p (q
,s
))
3176 || (rtx_equal_p (p
,s
) && rtx_equal_p (q
,r
)))
3178 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
,
3180 SUBST (XEXP (setsrc
, 0), newdest
);
3181 SUBST (XEXP (setsrc
, 1), newdest
);
3184 /* Split "((X op X) op Y) op Y)" as "T op T" where
3186 else if (rtx_equal_p (p
,q
) && rtx_equal_p (r
,s
))
3188 rtx tmp
= simplify_gen_binary (code
, mode
, p
, r
);
3189 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
, tmp
);
3190 SUBST (XEXP (setsrc
, 0), newdest
);
3191 SUBST (XEXP (setsrc
, 1), newdest
);
3199 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
, *split
);
3200 SUBST (*split
, newdest
);
3203 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
3205 /* recog_for_combine might have added CLOBBERs to newi2pat.
3206 Make sure NEWPAT does not depend on the clobbered regs. */
3207 if (GET_CODE (newi2pat
) == PARALLEL
)
3208 for (i
= XVECLEN (newi2pat
, 0) - 1; i
>= 0; i
--)
3209 if (GET_CODE (XVECEXP (newi2pat
, 0, i
)) == CLOBBER
)
3211 rtx reg
= XEXP (XVECEXP (newi2pat
, 0, i
), 0);
3212 if (reg_overlap_mentioned_p (reg
, newpat
))
3219 /* If the split point was a MULT and we didn't have one before,
3220 don't use one now. */
3221 if (i2_code_number
>= 0 && ! (split_code
== MULT
&& ! have_mult
))
3222 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
3226 /* Check for a case where we loaded from memory in a narrow mode and
3227 then sign extended it, but we need both registers. In that case,
3228 we have a PARALLEL with both loads from the same memory location.
3229 We can split this into a load from memory followed by a register-register
3230 copy. This saves at least one insn, more if register allocation can
3233 We cannot do this if the destination of the first assignment is a
3234 condition code register or cc0. We eliminate this case by making sure
3235 the SET_DEST and SET_SRC have the same mode.
3237 We cannot do this if the destination of the second assignment is
3238 a register that we have already assumed is zero-extended. Similarly
3239 for a SUBREG of such a register. */
3241 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
3242 && GET_CODE (newpat
) == PARALLEL
3243 && XVECLEN (newpat
, 0) == 2
3244 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
3245 && GET_CODE (SET_SRC (XVECEXP (newpat
, 0, 0))) == SIGN_EXTEND
3246 && (GET_MODE (SET_DEST (XVECEXP (newpat
, 0, 0)))
3247 == GET_MODE (SET_SRC (XVECEXP (newpat
, 0, 0))))
3248 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
3249 && rtx_equal_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
3250 XEXP (SET_SRC (XVECEXP (newpat
, 0, 0)), 0))
3251 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
3253 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
3254 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
3255 && ! (temp
= SET_DEST (XVECEXP (newpat
, 0, 1)),
3257 && VEC_index (reg_stat_type
, reg_stat
,
3258 REGNO (temp
))->nonzero_bits
!= 0
3259 && GET_MODE_BITSIZE (GET_MODE (temp
)) < BITS_PER_WORD
3260 && GET_MODE_BITSIZE (GET_MODE (temp
)) < HOST_BITS_PER_INT
3261 && (VEC_index (reg_stat_type
, reg_stat
,
3262 REGNO (temp
))->nonzero_bits
3263 != GET_MODE_MASK (word_mode
))))
3264 && ! (GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) == SUBREG
3265 && (temp
= SUBREG_REG (SET_DEST (XVECEXP (newpat
, 0, 1))),
3267 && VEC_index (reg_stat_type
, reg_stat
,
3268 REGNO (temp
))->nonzero_bits
!= 0
3269 && GET_MODE_BITSIZE (GET_MODE (temp
)) < BITS_PER_WORD
3270 && GET_MODE_BITSIZE (GET_MODE (temp
)) < HOST_BITS_PER_INT
3271 && (VEC_index (reg_stat_type
, reg_stat
,
3272 REGNO (temp
))->nonzero_bits
3273 != GET_MODE_MASK (word_mode
)))))
3274 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
3275 SET_SRC (XVECEXP (newpat
, 0, 1)))
3276 && ! find_reg_note (i3
, REG_UNUSED
,
3277 SET_DEST (XVECEXP (newpat
, 0, 0))))
3281 newi2pat
= XVECEXP (newpat
, 0, 0);
3282 ni2dest
= SET_DEST (XVECEXP (newpat
, 0, 0));
3283 newpat
= XVECEXP (newpat
, 0, 1);
3284 SUBST (SET_SRC (newpat
),
3285 gen_lowpart (GET_MODE (SET_SRC (newpat
)), ni2dest
));
3286 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
3288 if (i2_code_number
>= 0)
3289 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
3291 if (insn_code_number
>= 0)
3295 /* Similarly, check for a case where we have a PARALLEL of two independent
3296 SETs but we started with three insns. In this case, we can do the sets
3297 as two separate insns. This case occurs when some SET allows two
3298 other insns to combine, but the destination of that SET is still live. */
3300 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
3301 && GET_CODE (newpat
) == PARALLEL
3302 && XVECLEN (newpat
, 0) == 2
3303 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
3304 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != ZERO_EXTRACT
3305 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != STRICT_LOW_PART
3306 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
3307 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
3308 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
3309 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
3311 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
3312 XVECEXP (newpat
, 0, 0))
3313 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 0)),
3314 XVECEXP (newpat
, 0, 1))
3315 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat
, 0, 0)))
3316 && contains_muldiv (SET_SRC (XVECEXP (newpat
, 0, 1))))
3318 /* We cannot split the parallel into two sets if both sets
3320 && ! (reg_referenced_p (cc0_rtx
, XVECEXP (newpat
, 0, 0))
3321 && reg_referenced_p (cc0_rtx
, XVECEXP (newpat
, 0, 1)))
3325 /* Normally, it doesn't matter which of the two is done first,
3326 but it does if one references cc0. In that case, it has to
3329 if (reg_referenced_p (cc0_rtx
, XVECEXP (newpat
, 0, 0)))
3331 newi2pat
= XVECEXP (newpat
, 0, 0);
3332 newpat
= XVECEXP (newpat
, 0, 1);
3337 newi2pat
= XVECEXP (newpat
, 0, 1);
3338 newpat
= XVECEXP (newpat
, 0, 0);
3341 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
3343 if (i2_code_number
>= 0)
3344 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
3347 /* If it still isn't recognized, fail and change things back the way they
3349 if ((insn_code_number
< 0
3350 /* Is the result a reasonable ASM_OPERANDS? */
3351 && (! check_asm_operands (newpat
) || added_sets_1
|| added_sets_2
)))
3357 /* If we had to change another insn, make sure it is valid also. */
3358 if (undobuf
.other_insn
)
3360 CLEAR_HARD_REG_SET (newpat_used_regs
);
3362 other_pat
= PATTERN (undobuf
.other_insn
);
3363 other_code_number
= recog_for_combine (&other_pat
, undobuf
.other_insn
,
3366 if (other_code_number
< 0 && ! check_asm_operands (other_pat
))
3374 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3375 they are adjacent to each other or not. */
3377 rtx p
= prev_nonnote_insn (i3
);
3378 if (p
&& p
!= i2
&& NONJUMP_INSN_P (p
) && newi2pat
3379 && sets_cc0_p (newi2pat
))
3387 /* Only allow this combination if insn_rtx_costs reports that the
3388 replacement instructions are cheaper than the originals. */
3389 if (!combine_validate_cost (i1
, i2
, i3
, newpat
, newi2pat
, other_pat
))
3395 /* We now know that we can do this combination. Merge the insns and
3396 update the status of registers and LOG_LINKS. */
3398 if (undobuf
.other_insn
)
3402 PATTERN (undobuf
.other_insn
) = other_pat
;
3404 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
3405 are still valid. Then add any non-duplicate notes added by
3406 recog_for_combine. */
3407 for (note
= REG_NOTES (undobuf
.other_insn
); note
; note
= next
)
3409 next
= XEXP (note
, 1);
3411 if (REG_NOTE_KIND (note
) == REG_UNUSED
3412 && ! reg_set_p (XEXP (note
, 0), PATTERN (undobuf
.other_insn
)))
3413 remove_note (undobuf
.other_insn
, note
);
3416 distribute_notes (new_other_notes
, undobuf
.other_insn
,
3417 undobuf
.other_insn
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3426 /* I3 now uses what used to be its destination and which is now
3427 I2's destination. This requires us to do a few adjustments. */
3428 PATTERN (i3
) = newpat
;
3429 adjust_for_new_dest (i3
);
3431 /* We need a LOG_LINK from I3 to I2. But we used to have one,
3434 However, some later insn might be using I2's dest and have
3435 a LOG_LINK pointing at I3. We must remove this link.
3436 The simplest way to remove the link is to point it at I1,
3437 which we know will be a NOTE. */
3439 /* newi2pat is usually a SET here; however, recog_for_combine might
3440 have added some clobbers. */
3441 if (GET_CODE (newi2pat
) == PARALLEL
)
3442 ni2dest
= SET_DEST (XVECEXP (newi2pat
, 0, 0));
3444 ni2dest
= SET_DEST (newi2pat
);
3446 for (insn
= NEXT_INSN (i3
);
3447 insn
&& (this_basic_block
->next_bb
== EXIT_BLOCK_PTR
3448 || insn
!= BB_HEAD (this_basic_block
->next_bb
));
3449 insn
= NEXT_INSN (insn
))
3451 if (INSN_P (insn
) && reg_referenced_p (ni2dest
, PATTERN (insn
)))
3453 for (link
= LOG_LINKS (insn
); link
;
3454 link
= XEXP (link
, 1))
3455 if (XEXP (link
, 0) == i3
)
3456 XEXP (link
, 0) = i1
;
3464 rtx i3notes
, i2notes
, i1notes
= 0;
3465 rtx i3links
, i2links
, i1links
= 0;
3468 /* Compute which registers we expect to eliminate. newi2pat may be setting
3469 either i3dest or i2dest, so we must check it. Also, i1dest may be the
3470 same as i3dest, in which case newi2pat may be setting i1dest. */
3471 rtx elim_i2
= ((newi2pat
&& reg_set_p (i2dest
, newi2pat
))
3472 || i2dest_in_i2src
|| i2dest_in_i1src
3475 rtx elim_i1
= (i1
== 0 || i1dest_in_i1src
3476 || (newi2pat
&& reg_set_p (i1dest
, newi2pat
))
3480 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3482 i3notes
= REG_NOTES (i3
), i3links
= LOG_LINKS (i3
);
3483 i2notes
= REG_NOTES (i2
), i2links
= LOG_LINKS (i2
);
3485 i1notes
= REG_NOTES (i1
), i1links
= LOG_LINKS (i1
);
3487 /* Ensure that we do not have something that should not be shared but
3488 occurs multiple times in the new insns. Check this by first
3489 resetting all the `used' flags and then copying anything is shared. */
3491 reset_used_flags (i3notes
);
3492 reset_used_flags (i2notes
);
3493 reset_used_flags (i1notes
);
3494 reset_used_flags (newpat
);
3495 reset_used_flags (newi2pat
);
3496 if (undobuf
.other_insn
)
3497 reset_used_flags (PATTERN (undobuf
.other_insn
));
3499 i3notes
= copy_rtx_if_shared (i3notes
);
3500 i2notes
= copy_rtx_if_shared (i2notes
);
3501 i1notes
= copy_rtx_if_shared (i1notes
);
3502 newpat
= copy_rtx_if_shared (newpat
);
3503 newi2pat
= copy_rtx_if_shared (newi2pat
);
3504 if (undobuf
.other_insn
)
3505 reset_used_flags (PATTERN (undobuf
.other_insn
));
3507 INSN_CODE (i3
) = insn_code_number
;
3508 PATTERN (i3
) = newpat
;
3510 if (CALL_P (i3
) && CALL_INSN_FUNCTION_USAGE (i3
))
3512 rtx call_usage
= CALL_INSN_FUNCTION_USAGE (i3
);
3514 reset_used_flags (call_usage
);
3515 call_usage
= copy_rtx (call_usage
);
3518 replace_rtx (call_usage
, i2dest
, i2src
);
3521 replace_rtx (call_usage
, i1dest
, i1src
);
3523 CALL_INSN_FUNCTION_USAGE (i3
) = call_usage
;
3526 if (undobuf
.other_insn
)
3527 INSN_CODE (undobuf
.other_insn
) = other_code_number
;
3529 /* We had one special case above where I2 had more than one set and
3530 we replaced a destination of one of those sets with the destination
3531 of I3. In that case, we have to update LOG_LINKS of insns later
3532 in this basic block. Note that this (expensive) case is rare.
3534 Also, in this case, we must pretend that all REG_NOTEs for I2
3535 actually came from I3, so that REG_UNUSED notes from I2 will be
3536 properly handled. */
3538 if (i3_subst_into_i2
)
3540 for (i
= 0; i
< XVECLEN (PATTERN (i2
), 0); i
++)
3541 if ((GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) == SET
3542 || GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) == CLOBBER
)
3543 && REG_P (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)))
3544 && SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)) != i2dest
3545 && ! find_reg_note (i2
, REG_UNUSED
,
3546 SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))))
3547 for (temp
= NEXT_INSN (i2
);
3548 temp
&& (this_basic_block
->next_bb
== EXIT_BLOCK_PTR
3549 || BB_HEAD (this_basic_block
) != temp
);
3550 temp
= NEXT_INSN (temp
))
3551 if (temp
!= i3
&& INSN_P (temp
))
3552 for (link
= LOG_LINKS (temp
); link
; link
= XEXP (link
, 1))
3553 if (XEXP (link
, 0) == i2
)
3554 XEXP (link
, 0) = i3
;
3559 while (XEXP (link
, 1))
3560 link
= XEXP (link
, 1);
3561 XEXP (link
, 1) = i2notes
;
3575 INSN_CODE (i2
) = i2_code_number
;
3576 PATTERN (i2
) = newi2pat
;
3579 SET_INSN_DELETED (i2
);
3585 SET_INSN_DELETED (i1
);
3588 /* Get death notes for everything that is now used in either I3 or
3589 I2 and used to die in a previous insn. If we built two new
3590 patterns, move from I1 to I2 then I2 to I3 so that we get the
3591 proper movement on registers that I2 modifies. */
3595 move_deaths (newi2pat
, NULL_RTX
, DF_INSN_LUID (i1
), i2
, &midnotes
);
3596 move_deaths (newpat
, newi2pat
, DF_INSN_LUID (i1
), i3
, &midnotes
);
3599 move_deaths (newpat
, NULL_RTX
, i1
? DF_INSN_LUID (i1
) : DF_INSN_LUID (i2
),
3602 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
3604 distribute_notes (i3notes
, i3
, i3
, newi2pat
? i2
: NULL_RTX
,
3607 distribute_notes (i2notes
, i2
, i3
, newi2pat
? i2
: NULL_RTX
,
3610 distribute_notes (i1notes
, i1
, i3
, newi2pat
? i2
: NULL_RTX
,
3613 distribute_notes (midnotes
, NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3616 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
3617 know these are REG_UNUSED and want them to go to the desired insn,
3618 so we always pass it as i3. */
3620 if (newi2pat
&& new_i2_notes
)
3621 distribute_notes (new_i2_notes
, i2
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3624 distribute_notes (new_i3_notes
, i3
, i3
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3626 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
3627 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
3628 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
3629 in that case, it might delete I2. Similarly for I2 and I1.
3630 Show an additional death due to the REG_DEAD note we make here. If
3631 we discard it in distribute_notes, we will decrement it again. */
3635 if (newi2pat
&& reg_set_p (i3dest_killed
, newi2pat
))
3636 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i3dest_killed
,
3638 NULL_RTX
, i2
, NULL_RTX
, elim_i2
, elim_i1
);
3640 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i3dest_killed
,
3642 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3646 if (i2dest_in_i2src
)
3648 if (newi2pat
&& reg_set_p (i2dest
, newi2pat
))
3649 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i2dest
, NULL_RTX
),
3650 NULL_RTX
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3652 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i2dest
, NULL_RTX
),
3653 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3654 NULL_RTX
, NULL_RTX
);
3657 if (i1dest_in_i1src
)
3659 if (newi2pat
&& reg_set_p (i1dest
, newi2pat
))
3660 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i1dest
, NULL_RTX
),
3661 NULL_RTX
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3663 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i1dest
, NULL_RTX
),
3664 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3665 NULL_RTX
, NULL_RTX
);
3668 distribute_links (i3links
);
3669 distribute_links (i2links
);
3670 distribute_links (i1links
);
3675 rtx i2_insn
= 0, i2_val
= 0, set
;
3677 /* The insn that used to set this register doesn't exist, and
3678 this life of the register may not exist either. See if one of
3679 I3's links points to an insn that sets I2DEST. If it does,
3680 that is now the last known value for I2DEST. If we don't update
3681 this and I2 set the register to a value that depended on its old
3682 contents, we will get confused. If this insn is used, thing
3683 will be set correctly in combine_instructions. */
3685 for (link
= LOG_LINKS (i3
); link
; link
= XEXP (link
, 1))
3686 if ((set
= single_set (XEXP (link
, 0))) != 0
3687 && rtx_equal_p (i2dest
, SET_DEST (set
)))
3688 i2_insn
= XEXP (link
, 0), i2_val
= SET_SRC (set
);
3690 record_value_for_reg (i2dest
, i2_insn
, i2_val
);
3692 /* If the reg formerly set in I2 died only once and that was in I3,
3693 zero its use count so it won't make `reload' do any work. */
3695 && (newi2pat
== 0 || ! reg_mentioned_p (i2dest
, newi2pat
))
3696 && ! i2dest_in_i2src
)
3698 regno
= REGNO (i2dest
);
3699 INC_REG_N_SETS (regno
, -1);
3703 if (i1
&& REG_P (i1dest
))
3706 rtx i1_insn
= 0, i1_val
= 0, set
;
3708 for (link
= LOG_LINKS (i3
); link
; link
= XEXP (link
, 1))
3709 if ((set
= single_set (XEXP (link
, 0))) != 0
3710 && rtx_equal_p (i1dest
, SET_DEST (set
)))
3711 i1_insn
= XEXP (link
, 0), i1_val
= SET_SRC (set
);
3713 record_value_for_reg (i1dest
, i1_insn
, i1_val
);
3715 regno
= REGNO (i1dest
);
3716 if (! added_sets_1
&& ! i1dest_in_i1src
)
3717 INC_REG_N_SETS (regno
, -1);
3720 /* Update reg_stat[].nonzero_bits et al for any changes that may have
3721 been made to this insn. The order of
3722 set_nonzero_bits_and_sign_copies() is important. Because newi2pat
3723 can affect nonzero_bits of newpat */
3725 note_stores (newi2pat
, set_nonzero_bits_and_sign_copies
, NULL
);
3726 note_stores (newpat
, set_nonzero_bits_and_sign_copies
, NULL
);
3728 /* Set new_direct_jump_p if a new return or simple jump instruction
3731 If I3 is now an unconditional jump, ensure that it has a
3732 BARRIER following it since it may have initially been a
3733 conditional jump. It may also be the last nonnote insn. */
3735 if (returnjump_p (i3
) || any_uncondjump_p (i3
))
3737 *new_direct_jump_p
= 1;
3738 mark_jump_label (PATTERN (i3
), i3
, 0);
3740 if ((temp
= next_nonnote_insn (i3
)) == NULL_RTX
3741 || !BARRIER_P (temp
))
3742 emit_barrier_after (i3
);
3745 if (undobuf
.other_insn
!= NULL_RTX
3746 && (returnjump_p (undobuf
.other_insn
)
3747 || any_uncondjump_p (undobuf
.other_insn
)))
3749 *new_direct_jump_p
= 1;
3751 if ((temp
= next_nonnote_insn (undobuf
.other_insn
)) == NULL_RTX
3752 || !BARRIER_P (temp
))
3753 emit_barrier_after (undobuf
.other_insn
);
3756 /* An NOOP jump does not need barrier, but it does need cleaning up
3758 if (GET_CODE (newpat
) == SET
3759 && SET_SRC (newpat
) == pc_rtx
3760 && SET_DEST (newpat
) == pc_rtx
)
3761 *new_direct_jump_p
= 1;
3764 if (undobuf
.other_insn
!= NULL_RTX
)
3768 fprintf (dump_file
, "modifying other_insn ");
3769 dump_insn_slim (dump_file
, undobuf
.other_insn
);
3771 df_insn_rescan (undobuf
.other_insn
);
3774 if (i1
&& !(NOTE_P(i1
) && (NOTE_KIND (i1
) == NOTE_INSN_DELETED
)))
3778 fprintf (dump_file
, "modifying insn i1 ");
3779 dump_insn_slim (dump_file
, i1
);
3781 df_insn_rescan (i1
);
3784 if (i2
&& !(NOTE_P(i2
) && (NOTE_KIND (i2
) == NOTE_INSN_DELETED
)))
3788 fprintf (dump_file
, "modifying insn i2 ");
3789 dump_insn_slim (dump_file
, i2
);
3791 df_insn_rescan (i2
);
3794 if (i3
&& !(NOTE_P(i3
) && (NOTE_KIND (i3
) == NOTE_INSN_DELETED
)))
3798 fprintf (dump_file
, "modifying insn i3 ");
3799 dump_insn_slim (dump_file
, i3
);
3801 df_insn_rescan (i3
);
3804 combine_successes
++;
3807 if (added_links_insn
3808 && (newi2pat
== 0 || DF_INSN_LUID (added_links_insn
) < DF_INSN_LUID (i2
))
3809 && DF_INSN_LUID (added_links_insn
) < DF_INSN_LUID (i3
))
3810 return added_links_insn
;
3812 return newi2pat
? i2
: i3
;
3815 /* Undo all the modifications recorded in undobuf. */
3820 struct undo
*undo
, *next
;
3822 for (undo
= undobuf
.undos
; undo
; undo
= next
)
3828 *undo
->where
.r
= undo
->old_contents
.r
;
3831 *undo
->where
.i
= undo
->old_contents
.i
;
3834 adjust_reg_mode (*undo
->where
.r
, undo
->old_contents
.m
);
3840 undo
->next
= undobuf
.frees
;
3841 undobuf
.frees
= undo
;
3847 /* We've committed to accepting the changes we made. Move all
3848 of the undos to the free list. */
3853 struct undo
*undo
, *next
;
3855 for (undo
= undobuf
.undos
; undo
; undo
= next
)
3858 undo
->next
= undobuf
.frees
;
3859 undobuf
.frees
= undo
;
3864 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
3865 where we have an arithmetic expression and return that point. LOC will
3868 try_combine will call this function to see if an insn can be split into
3872 find_split_point (rtx
*loc
, rtx insn
)
3875 enum rtx_code code
= GET_CODE (x
);
3877 unsigned HOST_WIDE_INT len
= 0;
3878 HOST_WIDE_INT pos
= 0;
3880 rtx inner
= NULL_RTX
;
3882 /* First special-case some codes. */
3886 #ifdef INSN_SCHEDULING
3887 /* If we are making a paradoxical SUBREG invalid, it becomes a split
3889 if (MEM_P (SUBREG_REG (x
)))
3892 return find_split_point (&SUBREG_REG (x
), insn
);
3896 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3897 using LO_SUM and HIGH. */
3898 if (GET_CODE (XEXP (x
, 0)) == CONST
3899 || GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
3902 gen_rtx_LO_SUM (Pmode
,
3903 gen_rtx_HIGH (Pmode
, XEXP (x
, 0)),
3905 return &XEXP (XEXP (x
, 0), 0);
3909 /* If we have a PLUS whose second operand is a constant and the
3910 address is not valid, perhaps will can split it up using
3911 the machine-specific way to split large constants. We use
3912 the first pseudo-reg (one of the virtual regs) as a placeholder;
3913 it will not remain in the result. */
3914 if (GET_CODE (XEXP (x
, 0)) == PLUS
3915 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3916 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
3918 rtx reg
= regno_reg_rtx
[FIRST_PSEUDO_REGISTER
];
3919 rtx seq
= combine_split_insns (gen_rtx_SET (VOIDmode
, reg
,
3923 /* This should have produced two insns, each of which sets our
3924 placeholder. If the source of the second is a valid address,
3925 we can make put both sources together and make a split point
3929 && NEXT_INSN (seq
) != NULL_RTX
3930 && NEXT_INSN (NEXT_INSN (seq
)) == NULL_RTX
3931 && NONJUMP_INSN_P (seq
)
3932 && GET_CODE (PATTERN (seq
)) == SET
3933 && SET_DEST (PATTERN (seq
)) == reg
3934 && ! reg_mentioned_p (reg
,
3935 SET_SRC (PATTERN (seq
)))
3936 && NONJUMP_INSN_P (NEXT_INSN (seq
))
3937 && GET_CODE (PATTERN (NEXT_INSN (seq
))) == SET
3938 && SET_DEST (PATTERN (NEXT_INSN (seq
))) == reg
3939 && memory_address_p (GET_MODE (x
),
3940 SET_SRC (PATTERN (NEXT_INSN (seq
)))))
3942 rtx src1
= SET_SRC (PATTERN (seq
));
3943 rtx src2
= SET_SRC (PATTERN (NEXT_INSN (seq
)));
3945 /* Replace the placeholder in SRC2 with SRC1. If we can
3946 find where in SRC2 it was placed, that can become our
3947 split point and we can replace this address with SRC2.
3948 Just try two obvious places. */
3950 src2
= replace_rtx (src2
, reg
, src1
);
3952 if (XEXP (src2
, 0) == src1
)
3953 split
= &XEXP (src2
, 0);
3954 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2
, 0)))[0] == 'e'
3955 && XEXP (XEXP (src2
, 0), 0) == src1
)
3956 split
= &XEXP (XEXP (src2
, 0), 0);
3960 SUBST (XEXP (x
, 0), src2
);
3965 /* If that didn't work, perhaps the first operand is complex and
3966 needs to be computed separately, so make a split point there.
3967 This will occur on machines that just support REG + CONST
3968 and have a constant moved through some previous computation. */
3970 else if (!OBJECT_P (XEXP (XEXP (x
, 0), 0))
3971 && ! (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SUBREG
3972 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x
, 0), 0)))))
3973 return &XEXP (XEXP (x
, 0), 0);
3976 /* If we have a PLUS whose first operand is complex, try computing it
3977 separately by making a split there. */
3978 if (GET_CODE (XEXP (x
, 0)) == PLUS
3979 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3980 && ! OBJECT_P (XEXP (XEXP (x
, 0), 0))
3981 && ! (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SUBREG
3982 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x
, 0), 0)))))
3983 return &XEXP (XEXP (x
, 0), 0);
3988 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3989 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3990 we need to put the operand into a register. So split at that
3993 if (SET_DEST (x
) == cc0_rtx
3994 && GET_CODE (SET_SRC (x
)) != COMPARE
3995 && GET_CODE (SET_SRC (x
)) != ZERO_EXTRACT
3996 && !OBJECT_P (SET_SRC (x
))
3997 && ! (GET_CODE (SET_SRC (x
)) == SUBREG
3998 && OBJECT_P (SUBREG_REG (SET_SRC (x
)))))
3999 return &SET_SRC (x
);
4002 /* See if we can split SET_SRC as it stands. */
4003 split
= find_split_point (&SET_SRC (x
), insn
);
4004 if (split
&& split
!= &SET_SRC (x
))
4007 /* See if we can split SET_DEST as it stands. */
4008 split
= find_split_point (&SET_DEST (x
), insn
);
4009 if (split
&& split
!= &SET_DEST (x
))
4012 /* See if this is a bitfield assignment with everything constant. If
4013 so, this is an IOR of an AND, so split it into that. */
4014 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
4015 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
4016 <= HOST_BITS_PER_WIDE_INT
)
4017 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
4018 && GET_CODE (XEXP (SET_DEST (x
), 2)) == CONST_INT
4019 && GET_CODE (SET_SRC (x
)) == CONST_INT
4020 && ((INTVAL (XEXP (SET_DEST (x
), 1))
4021 + INTVAL (XEXP (SET_DEST (x
), 2)))
4022 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0))))
4023 && ! side_effects_p (XEXP (SET_DEST (x
), 0)))
4025 HOST_WIDE_INT pos
= INTVAL (XEXP (SET_DEST (x
), 2));
4026 unsigned HOST_WIDE_INT len
= INTVAL (XEXP (SET_DEST (x
), 1));
4027 unsigned HOST_WIDE_INT src
= INTVAL (SET_SRC (x
));
4028 rtx dest
= XEXP (SET_DEST (x
), 0);
4029 enum machine_mode mode
= GET_MODE (dest
);
4030 unsigned HOST_WIDE_INT mask
= ((HOST_WIDE_INT
) 1 << len
) - 1;
4033 if (BITS_BIG_ENDIAN
)
4034 pos
= GET_MODE_BITSIZE (mode
) - len
- pos
;
4036 or_mask
= gen_int_mode (src
<< pos
, mode
);
4039 simplify_gen_binary (IOR
, mode
, dest
, or_mask
));
4042 rtx negmask
= gen_int_mode (~(mask
<< pos
), mode
);
4044 simplify_gen_binary (IOR
, mode
,
4045 simplify_gen_binary (AND
, mode
,
4050 SUBST (SET_DEST (x
), dest
);
4052 split
= find_split_point (&SET_SRC (x
), insn
);
4053 if (split
&& split
!= &SET_SRC (x
))
4057 /* Otherwise, see if this is an operation that we can split into two.
4058 If so, try to split that. */
4059 code
= GET_CODE (SET_SRC (x
));
4064 /* If we are AND'ing with a large constant that is only a single
4065 bit and the result is only being used in a context where we
4066 need to know if it is zero or nonzero, replace it with a bit
4067 extraction. This will avoid the large constant, which might
4068 have taken more than one insn to make. If the constant were
4069 not a valid argument to the AND but took only one insn to make,
4070 this is no worse, but if it took more than one insn, it will
4073 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
4074 && REG_P (XEXP (SET_SRC (x
), 0))
4075 && (pos
= exact_log2 (INTVAL (XEXP (SET_SRC (x
), 1)))) >= 7
4076 && REG_P (SET_DEST (x
))
4077 && (split
= find_single_use (SET_DEST (x
), insn
, (rtx
*) 0)) != 0
4078 && (GET_CODE (*split
) == EQ
|| GET_CODE (*split
) == NE
)
4079 && XEXP (*split
, 0) == SET_DEST (x
)
4080 && XEXP (*split
, 1) == const0_rtx
)
4082 rtx extraction
= make_extraction (GET_MODE (SET_DEST (x
)),
4083 XEXP (SET_SRC (x
), 0),
4084 pos
, NULL_RTX
, 1, 1, 0, 0);
4085 if (extraction
!= 0)
4087 SUBST (SET_SRC (x
), extraction
);
4088 return find_split_point (loc
, insn
);
4094 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
4095 is known to be on, this can be converted into a NEG of a shift. */
4096 if (STORE_FLAG_VALUE
== -1 && XEXP (SET_SRC (x
), 1) == const0_rtx
4097 && GET_MODE (SET_SRC (x
)) == GET_MODE (XEXP (SET_SRC (x
), 0))
4098 && 1 <= (pos
= exact_log2
4099 (nonzero_bits (XEXP (SET_SRC (x
), 0),
4100 GET_MODE (XEXP (SET_SRC (x
), 0))))))
4102 enum machine_mode mode
= GET_MODE (XEXP (SET_SRC (x
), 0));
4106 gen_rtx_LSHIFTRT (mode
,
4107 XEXP (SET_SRC (x
), 0),
4110 split
= find_split_point (&SET_SRC (x
), insn
);
4111 if (split
&& split
!= &SET_SRC (x
))
4117 inner
= XEXP (SET_SRC (x
), 0);
4119 /* We can't optimize if either mode is a partial integer
4120 mode as we don't know how many bits are significant
4122 if (GET_MODE_CLASS (GET_MODE (inner
)) == MODE_PARTIAL_INT
4123 || GET_MODE_CLASS (GET_MODE (SET_SRC (x
))) == MODE_PARTIAL_INT
)
4127 len
= GET_MODE_BITSIZE (GET_MODE (inner
));
4133 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
4134 && GET_CODE (XEXP (SET_SRC (x
), 2)) == CONST_INT
)
4136 inner
= XEXP (SET_SRC (x
), 0);
4137 len
= INTVAL (XEXP (SET_SRC (x
), 1));
4138 pos
= INTVAL (XEXP (SET_SRC (x
), 2));
4140 if (BITS_BIG_ENDIAN
)
4141 pos
= GET_MODE_BITSIZE (GET_MODE (inner
)) - len
- pos
;
4142 unsignedp
= (code
== ZERO_EXTRACT
);
4150 if (len
&& pos
>= 0 && pos
+ len
<= GET_MODE_BITSIZE (GET_MODE (inner
)))
4152 enum machine_mode mode
= GET_MODE (SET_SRC (x
));
4154 /* For unsigned, we have a choice of a shift followed by an
4155 AND or two shifts. Use two shifts for field sizes where the
4156 constant might be too large. We assume here that we can
4157 always at least get 8-bit constants in an AND insn, which is
4158 true for every current RISC. */
4160 if (unsignedp
&& len
<= 8)
4165 (mode
, gen_lowpart (mode
, inner
),
4167 GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1)));
4169 split
= find_split_point (&SET_SRC (x
), insn
);
4170 if (split
&& split
!= &SET_SRC (x
))
4177 (unsignedp
? LSHIFTRT
: ASHIFTRT
, mode
,
4178 gen_rtx_ASHIFT (mode
,
4179 gen_lowpart (mode
, inner
),
4180 GEN_INT (GET_MODE_BITSIZE (mode
)
4182 GEN_INT (GET_MODE_BITSIZE (mode
) - len
)));
4184 split
= find_split_point (&SET_SRC (x
), insn
);
4185 if (split
&& split
!= &SET_SRC (x
))
4190 /* See if this is a simple operation with a constant as the second
4191 operand. It might be that this constant is out of range and hence
4192 could be used as a split point. */
4193 if (BINARY_P (SET_SRC (x
))
4194 && CONSTANT_P (XEXP (SET_SRC (x
), 1))
4195 && (OBJECT_P (XEXP (SET_SRC (x
), 0))
4196 || (GET_CODE (XEXP (SET_SRC (x
), 0)) == SUBREG
4197 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x
), 0))))))
4198 return &XEXP (SET_SRC (x
), 1);
4200 /* Finally, see if this is a simple operation with its first operand
4201 not in a register. The operation might require this operand in a
4202 register, so return it as a split point. We can always do this
4203 because if the first operand were another operation, we would have
4204 already found it as a split point. */
4205 if ((BINARY_P (SET_SRC (x
)) || UNARY_P (SET_SRC (x
)))
4206 && ! register_operand (XEXP (SET_SRC (x
), 0), VOIDmode
))
4207 return &XEXP (SET_SRC (x
), 0);
4213 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4214 it is better to write this as (not (ior A B)) so we can split it.
4215 Similarly for IOR. */
4216 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == NOT
)
4219 gen_rtx_NOT (GET_MODE (x
),
4220 gen_rtx_fmt_ee (code
== IOR
? AND
: IOR
,
4222 XEXP (XEXP (x
, 0), 0),
4223 XEXP (XEXP (x
, 1), 0))));
4224 return find_split_point (loc
, insn
);
4227 /* Many RISC machines have a large set of logical insns. If the
4228 second operand is a NOT, put it first so we will try to split the
4229 other operand first. */
4230 if (GET_CODE (XEXP (x
, 1)) == NOT
)
4232 rtx tem
= XEXP (x
, 0);
4233 SUBST (XEXP (x
, 0), XEXP (x
, 1));
4234 SUBST (XEXP (x
, 1), tem
);
4242 /* Otherwise, select our actions depending on our rtx class. */
4243 switch (GET_RTX_CLASS (code
))
4245 case RTX_BITFIELD_OPS
: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
4247 split
= find_split_point (&XEXP (x
, 2), insn
);
4250 /* ... fall through ... */
4252 case RTX_COMM_ARITH
:
4254 case RTX_COMM_COMPARE
:
4255 split
= find_split_point (&XEXP (x
, 1), insn
);
4258 /* ... fall through ... */
4260 /* Some machines have (and (shift ...) ...) insns. If X is not
4261 an AND, but XEXP (X, 0) is, use it as our split point. */
4262 if (GET_CODE (x
) != AND
&& GET_CODE (XEXP (x
, 0)) == AND
)
4263 return &XEXP (x
, 0);
4265 split
= find_split_point (&XEXP (x
, 0), insn
);
4271 /* Otherwise, we don't have a split point. */
4276 /* Throughout X, replace FROM with TO, and return the result.
4277 The result is TO if X is FROM;
4278 otherwise the result is X, but its contents may have been modified.
4279 If they were modified, a record was made in undobuf so that
4280 undo_all will (among other things) return X to its original state.
4282 If the number of changes necessary is too much to record to undo,
4283 the excess changes are not made, so the result is invalid.
4284 The changes already made can still be undone.
4285 undobuf.num_undo is incremented for such changes, so by testing that
4286 the caller can tell whether the result is valid.
4288 `n_occurrences' is incremented each time FROM is replaced.
4290 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
4292 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
4293 by copying if `n_occurrences' is nonzero. */
4296 subst (rtx x
, rtx from
, rtx to
, int in_dest
, int unique_copy
)
4298 enum rtx_code code
= GET_CODE (x
);
4299 enum machine_mode op0_mode
= VOIDmode
;
4304 /* Two expressions are equal if they are identical copies of a shared
4305 RTX or if they are both registers with the same register number
4308 #define COMBINE_RTX_EQUAL_P(X,Y) \
4310 || (REG_P (X) && REG_P (Y) \
4311 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
4313 if (! in_dest
&& COMBINE_RTX_EQUAL_P (x
, from
))
4316 return (unique_copy
&& n_occurrences
> 1 ? copy_rtx (to
) : to
);
4319 /* If X and FROM are the same register but different modes, they
4320 will not have been seen as equal above. However, the log links code
4321 will make a LOG_LINKS entry for that case. If we do nothing, we
4322 will try to rerecognize our original insn and, when it succeeds,
4323 we will delete the feeding insn, which is incorrect.
4325 So force this insn not to match in this (rare) case. */
4326 if (! in_dest
&& code
== REG
&& REG_P (from
)
4327 && reg_overlap_mentioned_p (x
, from
))
4328 return gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
4330 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
4331 of which may contain things that can be combined. */
4332 if (code
!= MEM
&& code
!= LO_SUM
&& OBJECT_P (x
))
4335 /* It is possible to have a subexpression appear twice in the insn.
4336 Suppose that FROM is a register that appears within TO.
4337 Then, after that subexpression has been scanned once by `subst',
4338 the second time it is scanned, TO may be found. If we were
4339 to scan TO here, we would find FROM within it and create a
4340 self-referent rtl structure which is completely wrong. */
4341 if (COMBINE_RTX_EQUAL_P (x
, to
))
4344 /* Parallel asm_operands need special attention because all of the
4345 inputs are shared across the arms. Furthermore, unsharing the
4346 rtl results in recognition failures. Failure to handle this case
4347 specially can result in circular rtl.
4349 Solve this by doing a normal pass across the first entry of the
4350 parallel, and only processing the SET_DESTs of the subsequent
4353 if (code
== PARALLEL
4354 && GET_CODE (XVECEXP (x
, 0, 0)) == SET
4355 && GET_CODE (SET_SRC (XVECEXP (x
, 0, 0))) == ASM_OPERANDS
)
4357 new = subst (XVECEXP (x
, 0, 0), from
, to
, 0, unique_copy
);
4359 /* If this substitution failed, this whole thing fails. */
4360 if (GET_CODE (new) == CLOBBER
4361 && XEXP (new, 0) == const0_rtx
)
4364 SUBST (XVECEXP (x
, 0, 0), new);
4366 for (i
= XVECLEN (x
, 0) - 1; i
>= 1; i
--)
4368 rtx dest
= SET_DEST (XVECEXP (x
, 0, i
));
4371 && GET_CODE (dest
) != CC0
4372 && GET_CODE (dest
) != PC
)
4374 new = subst (dest
, from
, to
, 0, unique_copy
);
4376 /* If this substitution failed, this whole thing fails. */
4377 if (GET_CODE (new) == CLOBBER
4378 && XEXP (new, 0) == const0_rtx
)
4381 SUBST (SET_DEST (XVECEXP (x
, 0, i
)), new);
4387 len
= GET_RTX_LENGTH (code
);
4388 fmt
= GET_RTX_FORMAT (code
);
4390 /* We don't need to process a SET_DEST that is a register, CC0,
4391 or PC, so set up to skip this common case. All other cases
4392 where we want to suppress replacing something inside a
4393 SET_SRC are handled via the IN_DEST operand. */
4395 && (REG_P (SET_DEST (x
))
4396 || GET_CODE (SET_DEST (x
)) == CC0
4397 || GET_CODE (SET_DEST (x
)) == PC
))
4400 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
4403 op0_mode
= GET_MODE (XEXP (x
, 0));
4405 for (i
= 0; i
< len
; i
++)
4410 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4412 if (COMBINE_RTX_EQUAL_P (XVECEXP (x
, i
, j
), from
))
4414 new = (unique_copy
&& n_occurrences
4415 ? copy_rtx (to
) : to
);
4420 new = subst (XVECEXP (x
, i
, j
), from
, to
, 0,
4423 /* If this substitution failed, this whole thing
4425 if (GET_CODE (new) == CLOBBER
4426 && XEXP (new, 0) == const0_rtx
)
4430 SUBST (XVECEXP (x
, i
, j
), new);
4433 else if (fmt
[i
] == 'e')
4435 /* If this is a register being set, ignore it. */
4439 && (((code
== SUBREG
|| code
== ZERO_EXTRACT
)
4441 || code
== STRICT_LOW_PART
))
4444 else if (COMBINE_RTX_EQUAL_P (XEXP (x
, i
), from
))
4446 /* In general, don't install a subreg involving two
4447 modes not tieable. It can worsen register
4448 allocation, and can even make invalid reload
4449 insns, since the reg inside may need to be copied
4450 from in the outside mode, and that may be invalid
4451 if it is an fp reg copied in integer mode.
4453 We allow two exceptions to this: It is valid if
4454 it is inside another SUBREG and the mode of that
4455 SUBREG and the mode of the inside of TO is
4456 tieable and it is valid if X is a SET that copies
4459 if (GET_CODE (to
) == SUBREG
4460 && ! MODES_TIEABLE_P (GET_MODE (to
),
4461 GET_MODE (SUBREG_REG (to
)))
4462 && ! (code
== SUBREG
4463 && MODES_TIEABLE_P (GET_MODE (x
),
4464 GET_MODE (SUBREG_REG (to
))))
4466 && ! (code
== SET
&& i
== 1 && XEXP (x
, 0) == cc0_rtx
)
4469 return gen_rtx_CLOBBER (VOIDmode
, const0_rtx
);
4471 #ifdef CANNOT_CHANGE_MODE_CLASS
4474 && REGNO (to
) < FIRST_PSEUDO_REGISTER
4475 && REG_CANNOT_CHANGE_MODE_P (REGNO (to
),
4478 return gen_rtx_CLOBBER (VOIDmode
, const0_rtx
);
4481 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
4485 /* If we are in a SET_DEST, suppress most cases unless we
4486 have gone inside a MEM, in which case we want to
4487 simplify the address. We assume here that things that
4488 are actually part of the destination have their inner
4489 parts in the first expression. This is true for SUBREG,
4490 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4491 things aside from REG and MEM that should appear in a
4493 new = subst (XEXP (x
, i
), from
, to
,
4495 && (code
== SUBREG
|| code
== STRICT_LOW_PART
4496 || code
== ZERO_EXTRACT
))
4498 && i
== 0), unique_copy
);
4500 /* If we found that we will have to reject this combination,
4501 indicate that by returning the CLOBBER ourselves, rather than
4502 an expression containing it. This will speed things up as
4503 well as prevent accidents where two CLOBBERs are considered
4504 to be equal, thus producing an incorrect simplification. */
4506 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
4509 if (GET_CODE (x
) == SUBREG
4510 && (GET_CODE (new) == CONST_INT
4511 || GET_CODE (new) == CONST_DOUBLE
))
4513 enum machine_mode mode
= GET_MODE (x
);
4515 x
= simplify_subreg (GET_MODE (x
), new,
4516 GET_MODE (SUBREG_REG (x
)),
4519 x
= gen_rtx_CLOBBER (mode
, const0_rtx
);
4521 else if (GET_CODE (new) == CONST_INT
4522 && GET_CODE (x
) == ZERO_EXTEND
)
4524 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
4525 new, GET_MODE (XEXP (x
, 0)));
4529 SUBST (XEXP (x
, i
), new);
4534 /* Check if we are loading something from the constant pool via float
4535 extension; in this case we would undo compress_float_constant
4536 optimization and degenerate constant load to an immediate value. */
4537 if (GET_CODE (x
) == FLOAT_EXTEND
4538 && MEM_P (XEXP (x
, 0))
4539 && MEM_READONLY_P (XEXP (x
, 0)))
4541 rtx tmp
= avoid_constant_pool_reference (x
);
4546 /* Try to simplify X. If the simplification changed the code, it is likely
4547 that further simplification will help, so loop, but limit the number
4548 of repetitions that will be performed. */
4550 for (i
= 0; i
< 4; i
++)
4552 /* If X is sufficiently simple, don't bother trying to do anything
4554 if (code
!= CONST_INT
&& code
!= REG
&& code
!= CLOBBER
)
4555 x
= combine_simplify_rtx (x
, op0_mode
, in_dest
);
4557 if (GET_CODE (x
) == code
)
4560 code
= GET_CODE (x
);
4562 /* We no longer know the original mode of operand 0 since we
4563 have changed the form of X) */
4564 op0_mode
= VOIDmode
;
4570 /* Simplify X, a piece of RTL. We just operate on the expression at the
4571 outer level; call `subst' to simplify recursively. Return the new
4574 OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero
4575 if we are inside a SET_DEST. */
4578 combine_simplify_rtx (rtx x
, enum machine_mode op0_mode
, int in_dest
)
4580 enum rtx_code code
= GET_CODE (x
);
4581 enum machine_mode mode
= GET_MODE (x
);
4585 /* If this is a commutative operation, put a constant last and a complex
4586 expression first. We don't need to do this for comparisons here. */
4587 if (COMMUTATIVE_ARITH_P (x
)
4588 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
4591 SUBST (XEXP (x
, 0), XEXP (x
, 1));
4592 SUBST (XEXP (x
, 1), temp
);
4595 /* If this is a simple operation applied to an IF_THEN_ELSE, try
4596 applying it to the arms of the IF_THEN_ELSE. This often simplifies
4597 things. Check for cases where both arms are testing the same
4600 Don't do anything if all operands are very simple. */
4603 && ((!OBJECT_P (XEXP (x
, 0))
4604 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
4605 && OBJECT_P (SUBREG_REG (XEXP (x
, 0)))))
4606 || (!OBJECT_P (XEXP (x
, 1))
4607 && ! (GET_CODE (XEXP (x
, 1)) == SUBREG
4608 && OBJECT_P (SUBREG_REG (XEXP (x
, 1)))))))
4610 && (!OBJECT_P (XEXP (x
, 0))
4611 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
4612 && OBJECT_P (SUBREG_REG (XEXP (x
, 0)))))))
4614 rtx cond
, true_rtx
, false_rtx
;
4616 cond
= if_then_else_cond (x
, &true_rtx
, &false_rtx
);
4618 /* If everything is a comparison, what we have is highly unlikely
4619 to be simpler, so don't use it. */
4620 && ! (COMPARISON_P (x
)
4621 && (COMPARISON_P (true_rtx
) || COMPARISON_P (false_rtx
))))
4623 rtx cop1
= const0_rtx
;
4624 enum rtx_code cond_code
= simplify_comparison (NE
, &cond
, &cop1
);
4626 if (cond_code
== NE
&& COMPARISON_P (cond
))
4629 /* Simplify the alternative arms; this may collapse the true and
4630 false arms to store-flag values. Be careful to use copy_rtx
4631 here since true_rtx or false_rtx might share RTL with x as a
4632 result of the if_then_else_cond call above. */
4633 true_rtx
= subst (copy_rtx (true_rtx
), pc_rtx
, pc_rtx
, 0, 0);
4634 false_rtx
= subst (copy_rtx (false_rtx
), pc_rtx
, pc_rtx
, 0, 0);
4636 /* If true_rtx and false_rtx are not general_operands, an if_then_else
4637 is unlikely to be simpler. */
4638 if (general_operand (true_rtx
, VOIDmode
)
4639 && general_operand (false_rtx
, VOIDmode
))
4641 enum rtx_code reversed
;
4643 /* Restarting if we generate a store-flag expression will cause
4644 us to loop. Just drop through in this case. */
4646 /* If the result values are STORE_FLAG_VALUE and zero, we can
4647 just make the comparison operation. */
4648 if (true_rtx
== const_true_rtx
&& false_rtx
== const0_rtx
)
4649 x
= simplify_gen_relational (cond_code
, mode
, VOIDmode
,
4651 else if (true_rtx
== const0_rtx
&& false_rtx
== const_true_rtx
4652 && ((reversed
= reversed_comparison_code_parts
4653 (cond_code
, cond
, cop1
, NULL
))
4655 x
= simplify_gen_relational (reversed
, mode
, VOIDmode
,
4658 /* Likewise, we can make the negate of a comparison operation
4659 if the result values are - STORE_FLAG_VALUE and zero. */
4660 else if (GET_CODE (true_rtx
) == CONST_INT
4661 && INTVAL (true_rtx
) == - STORE_FLAG_VALUE
4662 && false_rtx
== const0_rtx
)
4663 x
= simplify_gen_unary (NEG
, mode
,
4664 simplify_gen_relational (cond_code
,
4668 else if (GET_CODE (false_rtx
) == CONST_INT
4669 && INTVAL (false_rtx
) == - STORE_FLAG_VALUE
4670 && true_rtx
== const0_rtx
4671 && ((reversed
= reversed_comparison_code_parts
4672 (cond_code
, cond
, cop1
, NULL
))
4674 x
= simplify_gen_unary (NEG
, mode
,
4675 simplify_gen_relational (reversed
,
4680 return gen_rtx_IF_THEN_ELSE (mode
,
4681 simplify_gen_relational (cond_code
,
4686 true_rtx
, false_rtx
);
4688 code
= GET_CODE (x
);
4689 op0_mode
= VOIDmode
;
4694 /* Try to fold this expression in case we have constants that weren't
4697 switch (GET_RTX_CLASS (code
))
4700 if (op0_mode
== VOIDmode
)
4701 op0_mode
= GET_MODE (XEXP (x
, 0));
4702 temp
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
4705 case RTX_COMM_COMPARE
:
4707 enum machine_mode cmp_mode
= GET_MODE (XEXP (x
, 0));
4708 if (cmp_mode
== VOIDmode
)
4710 cmp_mode
= GET_MODE (XEXP (x
, 1));
4711 if (cmp_mode
== VOIDmode
)
4712 cmp_mode
= op0_mode
;
4714 temp
= simplify_relational_operation (code
, mode
, cmp_mode
,
4715 XEXP (x
, 0), XEXP (x
, 1));
4718 case RTX_COMM_ARITH
:
4720 temp
= simplify_binary_operation (code
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4722 case RTX_BITFIELD_OPS
:
4724 temp
= simplify_ternary_operation (code
, mode
, op0_mode
, XEXP (x
, 0),
4725 XEXP (x
, 1), XEXP (x
, 2));
4734 code
= GET_CODE (temp
);
4735 op0_mode
= VOIDmode
;
4736 mode
= GET_MODE (temp
);
4739 /* First see if we can apply the inverse distributive law. */
4740 if (code
== PLUS
|| code
== MINUS
4741 || code
== AND
|| code
== IOR
|| code
== XOR
)
4743 x
= apply_distributive_law (x
);
4744 code
= GET_CODE (x
);
4745 op0_mode
= VOIDmode
;
4748 /* If CODE is an associative operation not otherwise handled, see if we
4749 can associate some operands. This can win if they are constants or
4750 if they are logically related (i.e. (a & b) & a). */
4751 if ((code
== PLUS
|| code
== MINUS
|| code
== MULT
|| code
== DIV
4752 || code
== AND
|| code
== IOR
|| code
== XOR
4753 || code
== SMAX
|| code
== SMIN
|| code
== UMAX
|| code
== UMIN
)
4754 && ((INTEGRAL_MODE_P (mode
) && code
!= DIV
)
4755 || (flag_associative_math
&& FLOAT_MODE_P (mode
))))
4757 if (GET_CODE (XEXP (x
, 0)) == code
)
4759 rtx other
= XEXP (XEXP (x
, 0), 0);
4760 rtx inner_op0
= XEXP (XEXP (x
, 0), 1);
4761 rtx inner_op1
= XEXP (x
, 1);
4764 /* Make sure we pass the constant operand if any as the second
4765 one if this is a commutative operation. */
4766 if (CONSTANT_P (inner_op0
) && COMMUTATIVE_ARITH_P (x
))
4768 rtx tem
= inner_op0
;
4769 inner_op0
= inner_op1
;
4772 inner
= simplify_binary_operation (code
== MINUS
? PLUS
4773 : code
== DIV
? MULT
4775 mode
, inner_op0
, inner_op1
);
4777 /* For commutative operations, try the other pair if that one
4779 if (inner
== 0 && COMMUTATIVE_ARITH_P (x
))
4781 other
= XEXP (XEXP (x
, 0), 1);
4782 inner
= simplify_binary_operation (code
, mode
,
4783 XEXP (XEXP (x
, 0), 0),
4788 return simplify_gen_binary (code
, mode
, other
, inner
);
4792 /* A little bit of algebraic simplification here. */
4796 /* Ensure that our address has any ASHIFTs converted to MULT in case
4797 address-recognizing predicates are called later. */
4798 temp
= make_compound_operation (XEXP (x
, 0), MEM
);
4799 SUBST (XEXP (x
, 0), temp
);
4803 if (op0_mode
== VOIDmode
)
4804 op0_mode
= GET_MODE (SUBREG_REG (x
));
4806 /* See if this can be moved to simplify_subreg. */
4807 if (CONSTANT_P (SUBREG_REG (x
))
4808 && subreg_lowpart_offset (mode
, op0_mode
) == SUBREG_BYTE (x
)
4809 /* Don't call gen_lowpart if the inner mode
4810 is VOIDmode and we cannot simplify it, as SUBREG without
4811 inner mode is invalid. */
4812 && (GET_MODE (SUBREG_REG (x
)) != VOIDmode
4813 || gen_lowpart_common (mode
, SUBREG_REG (x
))))
4814 return gen_lowpart (mode
, SUBREG_REG (x
));
4816 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_CC
)
4820 temp
= simplify_subreg (mode
, SUBREG_REG (x
), op0_mode
,
4826 /* Don't change the mode of the MEM if that would change the meaning
4828 if (MEM_P (SUBREG_REG (x
))
4829 && (MEM_VOLATILE_P (SUBREG_REG (x
))
4830 || mode_dependent_address_p (XEXP (SUBREG_REG (x
), 0))))
4831 return gen_rtx_CLOBBER (mode
, const0_rtx
);
4833 /* Note that we cannot do any narrowing for non-constants since
4834 we might have been counting on using the fact that some bits were
4835 zero. We now do this in the SET. */
4840 temp
= expand_compound_operation (XEXP (x
, 0));
4842 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4843 replaced by (lshiftrt X C). This will convert
4844 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4846 if (GET_CODE (temp
) == ASHIFTRT
4847 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
4848 && INTVAL (XEXP (temp
, 1)) == GET_MODE_BITSIZE (mode
) - 1)
4849 return simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
, XEXP (temp
, 0),
4850 INTVAL (XEXP (temp
, 1)));
4852 /* If X has only a single bit that might be nonzero, say, bit I, convert
4853 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4854 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4855 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4856 or a SUBREG of one since we'd be making the expression more
4857 complex if it was just a register. */
4860 && ! (GET_CODE (temp
) == SUBREG
4861 && REG_P (SUBREG_REG (temp
)))
4862 && (i
= exact_log2 (nonzero_bits (temp
, mode
))) >= 0)
4864 rtx temp1
= simplify_shift_const
4865 (NULL_RTX
, ASHIFTRT
, mode
,
4866 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, temp
,
4867 GET_MODE_BITSIZE (mode
) - 1 - i
),
4868 GET_MODE_BITSIZE (mode
) - 1 - i
);
4870 /* If all we did was surround TEMP with the two shifts, we
4871 haven't improved anything, so don't use it. Otherwise,
4872 we are better off with TEMP1. */
4873 if (GET_CODE (temp1
) != ASHIFTRT
4874 || GET_CODE (XEXP (temp1
, 0)) != ASHIFT
4875 || XEXP (XEXP (temp1
, 0), 0) != temp
)
4881 /* We can't handle truncation to a partial integer mode here
4882 because we don't know the real bitsize of the partial
4884 if (GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
4887 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4888 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
4889 GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))))
4891 force_to_mode (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
4892 GET_MODE_MASK (mode
), 0));
4894 /* Similarly to what we do in simplify-rtx.c, a truncate of a register
4895 whose value is a comparison can be replaced with a subreg if
4896 STORE_FLAG_VALUE permits. */
4897 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4898 && ((HOST_WIDE_INT
) STORE_FLAG_VALUE
& ~GET_MODE_MASK (mode
)) == 0
4899 && (temp
= get_last_value (XEXP (x
, 0)))
4900 && COMPARISON_P (temp
))
4901 return gen_lowpart (mode
, XEXP (x
, 0));
4906 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4907 using cc0, in which case we want to leave it as a COMPARE
4908 so we can distinguish it from a register-register-copy. */
4909 if (XEXP (x
, 1) == const0_rtx
)
4912 /* x - 0 is the same as x unless x's mode has signed zeros and
4913 allows rounding towards -infinity. Under those conditions,
4915 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x
, 0)))
4916 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x
, 0))))
4917 && XEXP (x
, 1) == CONST0_RTX (GET_MODE (XEXP (x
, 0))))
4923 /* (const (const X)) can become (const X). Do it this way rather than
4924 returning the inner CONST since CONST can be shared with a
4926 if (GET_CODE (XEXP (x
, 0)) == CONST
)
4927 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
4932 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4933 can add in an offset. find_split_point will split this address up
4934 again if it doesn't match. */
4935 if (GET_CODE (XEXP (x
, 0)) == HIGH
4936 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)))
4942 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4943 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4944 bit-field and can be replaced by either a sign_extend or a
4945 sign_extract. The `and' may be a zero_extend and the two
4946 <c>, -<c> constants may be reversed. */
4947 if (GET_CODE (XEXP (x
, 0)) == XOR
4948 && GET_CODE (XEXP (x
, 1)) == CONST_INT
4949 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4950 && INTVAL (XEXP (x
, 1)) == -INTVAL (XEXP (XEXP (x
, 0), 1))
4951 && ((i
= exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) >= 0
4952 || (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
4953 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4954 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == AND
4955 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
4956 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
4957 == ((HOST_WIDE_INT
) 1 << (i
+ 1)) - 1))
4958 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ZERO_EXTEND
4959 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)))
4960 == (unsigned int) i
+ 1))))
4961 return simplify_shift_const
4962 (NULL_RTX
, ASHIFTRT
, mode
,
4963 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
4964 XEXP (XEXP (XEXP (x
, 0), 0), 0),
4965 GET_MODE_BITSIZE (mode
) - (i
+ 1)),
4966 GET_MODE_BITSIZE (mode
) - (i
+ 1));
4968 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4969 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4970 the bitsize of the mode - 1. This allows simplification of
4971 "a = (b & 8) == 0;" */
4972 if (XEXP (x
, 1) == constm1_rtx
4973 && !REG_P (XEXP (x
, 0))
4974 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
4975 && REG_P (SUBREG_REG (XEXP (x
, 0))))
4976 && nonzero_bits (XEXP (x
, 0), mode
) == 1)
4977 return simplify_shift_const (NULL_RTX
, ASHIFTRT
, mode
,
4978 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
4979 gen_rtx_XOR (mode
, XEXP (x
, 0), const1_rtx
),
4980 GET_MODE_BITSIZE (mode
) - 1),
4981 GET_MODE_BITSIZE (mode
) - 1);
4983 /* If we are adding two things that have no bits in common, convert
4984 the addition into an IOR. This will often be further simplified,
4985 for example in cases like ((a & 1) + (a & 2)), which can
4988 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4989 && (nonzero_bits (XEXP (x
, 0), mode
)
4990 & nonzero_bits (XEXP (x
, 1), mode
)) == 0)
4992 /* Try to simplify the expression further. */
4993 rtx tor
= simplify_gen_binary (IOR
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4994 temp
= combine_simplify_rtx (tor
, mode
, in_dest
);
4996 /* If we could, great. If not, do not go ahead with the IOR
4997 replacement, since PLUS appears in many special purpose
4998 address arithmetic instructions. */
4999 if (GET_CODE (temp
) != CLOBBER
&& temp
!= tor
)
5005 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5006 (and <foo> (const_int pow2-1)) */
5007 if (GET_CODE (XEXP (x
, 1)) == AND
5008 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
5009 && exact_log2 (-INTVAL (XEXP (XEXP (x
, 1), 1))) >= 0
5010 && rtx_equal_p (XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)))
5011 return simplify_and_const_int (NULL_RTX
, mode
, XEXP (x
, 0),
5012 -INTVAL (XEXP (XEXP (x
, 1), 1)) - 1);
5016 /* If we have (mult (plus A B) C), apply the distributive law and then
5017 the inverse distributive law to see if things simplify. This
5018 occurs mostly in addresses, often when unrolling loops. */
5020 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
5022 rtx result
= distribute_and_simplify_rtx (x
, 0);
5027 /* Try simplify a*(b/c) as (a*b)/c. */
5028 if (FLOAT_MODE_P (mode
) && flag_associative_math
5029 && GET_CODE (XEXP (x
, 0)) == DIV
)
5031 rtx tem
= simplify_binary_operation (MULT
, mode
,
5032 XEXP (XEXP (x
, 0), 0),
5035 return simplify_gen_binary (DIV
, mode
, tem
, XEXP (XEXP (x
, 0), 1));
5040 /* If this is a divide by a power of two, treat it as a shift if
5041 its first operand is a shift. */
5042 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5043 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
5044 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
5045 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
5046 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
5047 || GET_CODE (XEXP (x
, 0)) == ROTATE
5048 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
5049 return simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
, XEXP (x
, 0), i
);
5053 case GT
: case GTU
: case GE
: case GEU
:
5054 case LT
: case LTU
: case LE
: case LEU
:
5055 case UNEQ
: case LTGT
:
5056 case UNGT
: case UNGE
:
5057 case UNLT
: case UNLE
:
5058 case UNORDERED
: case ORDERED
:
5059 /* If the first operand is a condition code, we can't do anything
5061 if (GET_CODE (XEXP (x
, 0)) == COMPARE
5062 || (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) != MODE_CC
5063 && ! CC0_P (XEXP (x
, 0))))
5065 rtx op0
= XEXP (x
, 0);
5066 rtx op1
= XEXP (x
, 1);
5067 enum rtx_code new_code
;
5069 if (GET_CODE (op0
) == COMPARE
)
5070 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
5072 /* Simplify our comparison, if possible. */
5073 new_code
= simplify_comparison (code
, &op0
, &op1
);
5075 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
5076 if only the low-order bit is possibly nonzero in X (such as when
5077 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
5078 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
5079 known to be either 0 or -1, NE becomes a NEG and EQ becomes
5082 Remove any ZERO_EXTRACT we made when thinking this was a
5083 comparison. It may now be simpler to use, e.g., an AND. If a
5084 ZERO_EXTRACT is indeed appropriate, it will be placed back by
5085 the call to make_compound_operation in the SET case. */
5087 if (STORE_FLAG_VALUE
== 1
5088 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
5089 && op1
== const0_rtx
5090 && mode
== GET_MODE (op0
)
5091 && nonzero_bits (op0
, mode
) == 1)
5092 return gen_lowpart (mode
,
5093 expand_compound_operation (op0
));
5095 else if (STORE_FLAG_VALUE
== 1
5096 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
5097 && op1
== const0_rtx
5098 && mode
== GET_MODE (op0
)
5099 && (num_sign_bit_copies (op0
, mode
)
5100 == GET_MODE_BITSIZE (mode
)))
5102 op0
= expand_compound_operation (op0
);
5103 return simplify_gen_unary (NEG
, mode
,
5104 gen_lowpart (mode
, op0
),
5108 else if (STORE_FLAG_VALUE
== 1
5109 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
5110 && op1
== const0_rtx
5111 && mode
== GET_MODE (op0
)
5112 && nonzero_bits (op0
, mode
) == 1)
5114 op0
= expand_compound_operation (op0
);
5115 return simplify_gen_binary (XOR
, mode
,
5116 gen_lowpart (mode
, op0
),
5120 else if (STORE_FLAG_VALUE
== 1
5121 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
5122 && op1
== const0_rtx
5123 && mode
== GET_MODE (op0
)
5124 && (num_sign_bit_copies (op0
, mode
)
5125 == GET_MODE_BITSIZE (mode
)))
5127 op0
= expand_compound_operation (op0
);
5128 return plus_constant (gen_lowpart (mode
, op0
), 1);
5131 /* If STORE_FLAG_VALUE is -1, we have cases similar to
5133 if (STORE_FLAG_VALUE
== -1
5134 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
5135 && op1
== const0_rtx
5136 && (num_sign_bit_copies (op0
, mode
)
5137 == GET_MODE_BITSIZE (mode
)))
5138 return gen_lowpart (mode
,
5139 expand_compound_operation (op0
));
5141 else if (STORE_FLAG_VALUE
== -1
5142 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
5143 && op1
== const0_rtx
5144 && mode
== GET_MODE (op0
)
5145 && nonzero_bits (op0
, mode
) == 1)
5147 op0
= expand_compound_operation (op0
);
5148 return simplify_gen_unary (NEG
, mode
,
5149 gen_lowpart (mode
, op0
),
5153 else if (STORE_FLAG_VALUE
== -1
5154 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
5155 && op1
== const0_rtx
5156 && mode
== GET_MODE (op0
)
5157 && (num_sign_bit_copies (op0
, mode
)
5158 == GET_MODE_BITSIZE (mode
)))
5160 op0
= expand_compound_operation (op0
);
5161 return simplify_gen_unary (NOT
, mode
,
5162 gen_lowpart (mode
, op0
),
5166 /* If X is 0/1, (eq X 0) is X-1. */
5167 else if (STORE_FLAG_VALUE
== -1
5168 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
5169 && op1
== const0_rtx
5170 && mode
== GET_MODE (op0
)
5171 && nonzero_bits (op0
, mode
) == 1)
5173 op0
= expand_compound_operation (op0
);
5174 return plus_constant (gen_lowpart (mode
, op0
), -1);
5177 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
5178 one bit that might be nonzero, we can convert (ne x 0) to
5179 (ashift x c) where C puts the bit in the sign bit. Remove any
5180 AND with STORE_FLAG_VALUE when we are done, since we are only
5181 going to test the sign bit. */
5182 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
5183 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5184 && ((STORE_FLAG_VALUE
& GET_MODE_MASK (mode
))
5185 == (unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (mode
) - 1))
5186 && op1
== const0_rtx
5187 && mode
== GET_MODE (op0
)
5188 && (i
= exact_log2 (nonzero_bits (op0
, mode
))) >= 0)
5190 x
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
5191 expand_compound_operation (op0
),
5192 GET_MODE_BITSIZE (mode
) - 1 - i
);
5193 if (GET_CODE (x
) == AND
&& XEXP (x
, 1) == const_true_rtx
)
5199 /* If the code changed, return a whole new comparison. */
5200 if (new_code
!= code
)
5201 return gen_rtx_fmt_ee (new_code
, mode
, op0
, op1
);
5203 /* Otherwise, keep this operation, but maybe change its operands.
5204 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
5205 SUBST (XEXP (x
, 0), op0
);
5206 SUBST (XEXP (x
, 1), op1
);
5211 return simplify_if_then_else (x
);
5217 /* If we are processing SET_DEST, we are done. */
5221 return expand_compound_operation (x
);
5224 return simplify_set (x
);
5228 return simplify_logical (x
);
5235 /* If this is a shift by a constant amount, simplify it. */
5236 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
5237 return simplify_shift_const (x
, code
, mode
, XEXP (x
, 0),
5238 INTVAL (XEXP (x
, 1)));
5240 else if (SHIFT_COUNT_TRUNCATED
&& !REG_P (XEXP (x
, 1)))
5242 force_to_mode (XEXP (x
, 1), GET_MODE (XEXP (x
, 1)),
5244 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x
))))
5256 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5259 simplify_if_then_else (rtx x
)
5261 enum machine_mode mode
= GET_MODE (x
);
5262 rtx cond
= XEXP (x
, 0);
5263 rtx true_rtx
= XEXP (x
, 1);
5264 rtx false_rtx
= XEXP (x
, 2);
5265 enum rtx_code true_code
= GET_CODE (cond
);
5266 int comparison_p
= COMPARISON_P (cond
);
5269 enum rtx_code false_code
;
5272 /* Simplify storing of the truth value. */
5273 if (comparison_p
&& true_rtx
== const_true_rtx
&& false_rtx
== const0_rtx
)
5274 return simplify_gen_relational (true_code
, mode
, VOIDmode
,
5275 XEXP (cond
, 0), XEXP (cond
, 1));
5277 /* Also when the truth value has to be reversed. */
5279 && true_rtx
== const0_rtx
&& false_rtx
== const_true_rtx
5280 && (reversed
= reversed_comparison (cond
, mode
)))
5283 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
5284 in it is being compared against certain values. Get the true and false
5285 comparisons and see if that says anything about the value of each arm. */
5288 && ((false_code
= reversed_comparison_code (cond
, NULL
))
5290 && REG_P (XEXP (cond
, 0)))
5293 rtx from
= XEXP (cond
, 0);
5294 rtx true_val
= XEXP (cond
, 1);
5295 rtx false_val
= true_val
;
5298 /* If FALSE_CODE is EQ, swap the codes and arms. */
5300 if (false_code
== EQ
)
5302 swapped
= 1, true_code
= EQ
, false_code
= NE
;
5303 temp
= true_rtx
, true_rtx
= false_rtx
, false_rtx
= temp
;
5306 /* If we are comparing against zero and the expression being tested has
5307 only a single bit that might be nonzero, that is its value when it is
5308 not equal to zero. Similarly if it is known to be -1 or 0. */
5310 if (true_code
== EQ
&& true_val
== const0_rtx
5311 && exact_log2 (nzb
= nonzero_bits (from
, GET_MODE (from
))) >= 0)
5314 false_val
= GEN_INT (trunc_int_for_mode (nzb
, GET_MODE (from
)));
5316 else if (true_code
== EQ
&& true_val
== const0_rtx
5317 && (num_sign_bit_copies (from
, GET_MODE (from
))
5318 == GET_MODE_BITSIZE (GET_MODE (from
))))
5321 false_val
= constm1_rtx
;
5324 /* Now simplify an arm if we know the value of the register in the
5325 branch and it is used in the arm. Be careful due to the potential
5326 of locally-shared RTL. */
5328 if (reg_mentioned_p (from
, true_rtx
))
5329 true_rtx
= subst (known_cond (copy_rtx (true_rtx
), true_code
,
5331 pc_rtx
, pc_rtx
, 0, 0);
5332 if (reg_mentioned_p (from
, false_rtx
))
5333 false_rtx
= subst (known_cond (copy_rtx (false_rtx
), false_code
,
5335 pc_rtx
, pc_rtx
, 0, 0);
5337 SUBST (XEXP (x
, 1), swapped
? false_rtx
: true_rtx
);
5338 SUBST (XEXP (x
, 2), swapped
? true_rtx
: false_rtx
);
5340 true_rtx
= XEXP (x
, 1);
5341 false_rtx
= XEXP (x
, 2);
5342 true_code
= GET_CODE (cond
);
5345 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
5346 reversed, do so to avoid needing two sets of patterns for
5347 subtract-and-branch insns. Similarly if we have a constant in the true
5348 arm, the false arm is the same as the first operand of the comparison, or
5349 the false arm is more complicated than the true arm. */
5352 && reversed_comparison_code (cond
, NULL
) != UNKNOWN
5353 && (true_rtx
== pc_rtx
5354 || (CONSTANT_P (true_rtx
)
5355 && GET_CODE (false_rtx
) != CONST_INT
&& false_rtx
!= pc_rtx
)
5356 || true_rtx
== const0_rtx
5357 || (OBJECT_P (true_rtx
) && !OBJECT_P (false_rtx
))
5358 || (GET_CODE (true_rtx
) == SUBREG
&& OBJECT_P (SUBREG_REG (true_rtx
))
5359 && !OBJECT_P (false_rtx
))
5360 || reg_mentioned_p (true_rtx
, false_rtx
)
5361 || rtx_equal_p (false_rtx
, XEXP (cond
, 0))))
5363 true_code
= reversed_comparison_code (cond
, NULL
);
5364 SUBST (XEXP (x
, 0), reversed_comparison (cond
, GET_MODE (cond
)));
5365 SUBST (XEXP (x
, 1), false_rtx
);
5366 SUBST (XEXP (x
, 2), true_rtx
);
5368 temp
= true_rtx
, true_rtx
= false_rtx
, false_rtx
= temp
;
5371 /* It is possible that the conditional has been simplified out. */
5372 true_code
= GET_CODE (cond
);
5373 comparison_p
= COMPARISON_P (cond
);
5376 /* If the two arms are identical, we don't need the comparison. */
5378 if (rtx_equal_p (true_rtx
, false_rtx
) && ! side_effects_p (cond
))
5381 /* Convert a == b ? b : a to "a". */
5382 if (true_code
== EQ
&& ! side_effects_p (cond
)
5383 && !HONOR_NANS (mode
)
5384 && rtx_equal_p (XEXP (cond
, 0), false_rtx
)
5385 && rtx_equal_p (XEXP (cond
, 1), true_rtx
))
5387 else if (true_code
== NE
&& ! side_effects_p (cond
)
5388 && !HONOR_NANS (mode
)
5389 && rtx_equal_p (XEXP (cond
, 0), true_rtx
)
5390 && rtx_equal_p (XEXP (cond
, 1), false_rtx
))
5393 /* Look for cases where we have (abs x) or (neg (abs X)). */
5395 if (GET_MODE_CLASS (mode
) == MODE_INT
5396 && GET_CODE (false_rtx
) == NEG
5397 && rtx_equal_p (true_rtx
, XEXP (false_rtx
, 0))
5399 && rtx_equal_p (true_rtx
, XEXP (cond
, 0))
5400 && ! side_effects_p (true_rtx
))
5405 return simplify_gen_unary (ABS
, mode
, true_rtx
, mode
);
5409 simplify_gen_unary (NEG
, mode
,
5410 simplify_gen_unary (ABS
, mode
, true_rtx
, mode
),
5416 /* Look for MIN or MAX. */
5418 if ((! FLOAT_MODE_P (mode
) || flag_unsafe_math_optimizations
)
5420 && rtx_equal_p (XEXP (cond
, 0), true_rtx
)
5421 && rtx_equal_p (XEXP (cond
, 1), false_rtx
)
5422 && ! side_effects_p (cond
))
5427 return simplify_gen_binary (SMAX
, mode
, true_rtx
, false_rtx
);
5430 return simplify_gen_binary (SMIN
, mode
, true_rtx
, false_rtx
);
5433 return simplify_gen_binary (UMAX
, mode
, true_rtx
, false_rtx
);
5436 return simplify_gen_binary (UMIN
, mode
, true_rtx
, false_rtx
);
5441 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5442 second operand is zero, this can be done as (OP Z (mult COND C2)) where
5443 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5444 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5445 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5446 neither 1 or -1, but it isn't worth checking for. */
5448 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
5450 && GET_MODE_CLASS (mode
) == MODE_INT
5451 && ! side_effects_p (x
))
5453 rtx t
= make_compound_operation (true_rtx
, SET
);
5454 rtx f
= make_compound_operation (false_rtx
, SET
);
5455 rtx cond_op0
= XEXP (cond
, 0);
5456 rtx cond_op1
= XEXP (cond
, 1);
5457 enum rtx_code op
= UNKNOWN
, extend_op
= UNKNOWN
;
5458 enum machine_mode m
= mode
;
5459 rtx z
= 0, c1
= NULL_RTX
;
5461 if ((GET_CODE (t
) == PLUS
|| GET_CODE (t
) == MINUS
5462 || GET_CODE (t
) == IOR
|| GET_CODE (t
) == XOR
5463 || GET_CODE (t
) == ASHIFT
5464 || GET_CODE (t
) == LSHIFTRT
|| GET_CODE (t
) == ASHIFTRT
)
5465 && rtx_equal_p (XEXP (t
, 0), f
))
5466 c1
= XEXP (t
, 1), op
= GET_CODE (t
), z
= f
;
5468 /* If an identity-zero op is commutative, check whether there
5469 would be a match if we swapped the operands. */
5470 else if ((GET_CODE (t
) == PLUS
|| GET_CODE (t
) == IOR
5471 || GET_CODE (t
) == XOR
)
5472 && rtx_equal_p (XEXP (t
, 1), f
))
5473 c1
= XEXP (t
, 0), op
= GET_CODE (t
), z
= f
;
5474 else if (GET_CODE (t
) == SIGN_EXTEND
5475 && (GET_CODE (XEXP (t
, 0)) == PLUS
5476 || GET_CODE (XEXP (t
, 0)) == MINUS
5477 || GET_CODE (XEXP (t
, 0)) == IOR
5478 || GET_CODE (XEXP (t
, 0)) == XOR
5479 || GET_CODE (XEXP (t
, 0)) == ASHIFT
5480 || GET_CODE (XEXP (t
, 0)) == LSHIFTRT
5481 || GET_CODE (XEXP (t
, 0)) == ASHIFTRT
)
5482 && GET_CODE (XEXP (XEXP (t
, 0), 0)) == SUBREG
5483 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 0))
5484 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 0)), f
)
5485 && (num_sign_bit_copies (f
, GET_MODE (f
))
5487 (GET_MODE_BITSIZE (mode
)
5488 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t
, 0), 0))))))
5490 c1
= XEXP (XEXP (t
, 0), 1); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5491 extend_op
= SIGN_EXTEND
;
5492 m
= GET_MODE (XEXP (t
, 0));
5494 else if (GET_CODE (t
) == SIGN_EXTEND
5495 && (GET_CODE (XEXP (t
, 0)) == PLUS
5496 || GET_CODE (XEXP (t
, 0)) == IOR
5497 || GET_CODE (XEXP (t
, 0)) == XOR
)
5498 && GET_CODE (XEXP (XEXP (t
, 0), 1)) == SUBREG
5499 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 1))
5500 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 1)), f
)
5501 && (num_sign_bit_copies (f
, GET_MODE (f
))
5503 (GET_MODE_BITSIZE (mode
)
5504 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t
, 0), 1))))))
5506 c1
= XEXP (XEXP (t
, 0), 0); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5507 extend_op
= SIGN_EXTEND
;
5508 m
= GET_MODE (XEXP (t
, 0));
5510 else if (GET_CODE (t
) == ZERO_EXTEND
5511 && (GET_CODE (XEXP (t
, 0)) == PLUS
5512 || GET_CODE (XEXP (t
, 0)) == MINUS
5513 || GET_CODE (XEXP (t
, 0)) == IOR
5514 || GET_CODE (XEXP (t
, 0)) == XOR
5515 || GET_CODE (XEXP (t
, 0)) == ASHIFT
5516 || GET_CODE (XEXP (t
, 0)) == LSHIFTRT
5517 || GET_CODE (XEXP (t
, 0)) == ASHIFTRT
)
5518 && GET_CODE (XEXP (XEXP (t
, 0), 0)) == SUBREG
5519 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5520 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 0))
5521 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 0)), f
)
5522 && ((nonzero_bits (f
, GET_MODE (f
))
5523 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t
, 0), 0))))
5526 c1
= XEXP (XEXP (t
, 0), 1); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5527 extend_op
= ZERO_EXTEND
;
5528 m
= GET_MODE (XEXP (t
, 0));
5530 else if (GET_CODE (t
) == ZERO_EXTEND
5531 && (GET_CODE (XEXP (t
, 0)) == PLUS
5532 || GET_CODE (XEXP (t
, 0)) == IOR
5533 || GET_CODE (XEXP (t
, 0)) == XOR
)
5534 && GET_CODE (XEXP (XEXP (t
, 0), 1)) == SUBREG
5535 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5536 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 1))
5537 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 1)), f
)
5538 && ((nonzero_bits (f
, GET_MODE (f
))
5539 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t
, 0), 1))))
5542 c1
= XEXP (XEXP (t
, 0), 0); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5543 extend_op
= ZERO_EXTEND
;
5544 m
= GET_MODE (XEXP (t
, 0));
5549 temp
= subst (simplify_gen_relational (true_code
, m
, VOIDmode
,
5550 cond_op0
, cond_op1
),
5551 pc_rtx
, pc_rtx
, 0, 0);
5552 temp
= simplify_gen_binary (MULT
, m
, temp
,
5553 simplify_gen_binary (MULT
, m
, c1
,
5555 temp
= subst (temp
, pc_rtx
, pc_rtx
, 0, 0);
5556 temp
= simplify_gen_binary (op
, m
, gen_lowpart (m
, z
), temp
);
5558 if (extend_op
!= UNKNOWN
)
5559 temp
= simplify_gen_unary (extend_op
, mode
, temp
, m
);
5565 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5566 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5567 negation of a single bit, we can convert this operation to a shift. We
5568 can actually do this more generally, but it doesn't seem worth it. */
5570 if (true_code
== NE
&& XEXP (cond
, 1) == const0_rtx
5571 && false_rtx
== const0_rtx
&& GET_CODE (true_rtx
) == CONST_INT
5572 && ((1 == nonzero_bits (XEXP (cond
, 0), mode
)
5573 && (i
= exact_log2 (INTVAL (true_rtx
))) >= 0)
5574 || ((num_sign_bit_copies (XEXP (cond
, 0), mode
)
5575 == GET_MODE_BITSIZE (mode
))
5576 && (i
= exact_log2 (-INTVAL (true_rtx
))) >= 0)))
5578 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
5579 gen_lowpart (mode
, XEXP (cond
, 0)), i
);
5581 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
5582 if (true_code
== NE
&& XEXP (cond
, 1) == const0_rtx
5583 && false_rtx
== const0_rtx
&& GET_CODE (true_rtx
) == CONST_INT
5584 && GET_MODE (XEXP (cond
, 0)) == mode
5585 && (INTVAL (true_rtx
) & GET_MODE_MASK (mode
))
5586 == nonzero_bits (XEXP (cond
, 0), mode
)
5587 && (i
= exact_log2 (INTVAL (true_rtx
) & GET_MODE_MASK (mode
))) >= 0)
5588 return XEXP (cond
, 0);
5593 /* Simplify X, a SET expression. Return the new expression. */
5596 simplify_set (rtx x
)
5598 rtx src
= SET_SRC (x
);
5599 rtx dest
= SET_DEST (x
);
5600 enum machine_mode mode
5601 = GET_MODE (src
) != VOIDmode
? GET_MODE (src
) : GET_MODE (dest
);
5605 /* (set (pc) (return)) gets written as (return). */
5606 if (GET_CODE (dest
) == PC
&& GET_CODE (src
) == RETURN
)
5609 /* Now that we know for sure which bits of SRC we are using, see if we can
5610 simplify the expression for the object knowing that we only need the
5613 if (GET_MODE_CLASS (mode
) == MODE_INT
5614 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
5616 src
= force_to_mode (src
, mode
, ~(HOST_WIDE_INT
) 0, 0);
5617 SUBST (SET_SRC (x
), src
);
5620 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5621 the comparison result and try to simplify it unless we already have used
5622 undobuf.other_insn. */
5623 if ((GET_MODE_CLASS (mode
) == MODE_CC
5624 || GET_CODE (src
) == COMPARE
5626 && (cc_use
= find_single_use (dest
, subst_insn
, &other_insn
)) != 0
5627 && (undobuf
.other_insn
== 0 || other_insn
== undobuf
.other_insn
)
5628 && COMPARISON_P (*cc_use
)
5629 && rtx_equal_p (XEXP (*cc_use
, 0), dest
))
5631 enum rtx_code old_code
= GET_CODE (*cc_use
);
5632 enum rtx_code new_code
;
5634 int other_changed
= 0;
5635 enum machine_mode compare_mode
= GET_MODE (dest
);
5637 if (GET_CODE (src
) == COMPARE
)
5638 op0
= XEXP (src
, 0), op1
= XEXP (src
, 1);
5640 op0
= src
, op1
= CONST0_RTX (GET_MODE (src
));
5642 tmp
= simplify_relational_operation (old_code
, compare_mode
, VOIDmode
,
5645 new_code
= old_code
;
5646 else if (!CONSTANT_P (tmp
))
5648 new_code
= GET_CODE (tmp
);
5649 op0
= XEXP (tmp
, 0);
5650 op1
= XEXP (tmp
, 1);
5654 rtx pat
= PATTERN (other_insn
);
5655 undobuf
.other_insn
= other_insn
;
5656 SUBST (*cc_use
, tmp
);
5658 /* Attempt to simplify CC user. */
5659 if (GET_CODE (pat
) == SET
)
5661 rtx
new = simplify_rtx (SET_SRC (pat
));
5662 if (new != NULL_RTX
)
5663 SUBST (SET_SRC (pat
), new);
5666 /* Convert X into a no-op move. */
5667 SUBST (SET_DEST (x
), pc_rtx
);
5668 SUBST (SET_SRC (x
), pc_rtx
);
5672 /* Simplify our comparison, if possible. */
5673 new_code
= simplify_comparison (new_code
, &op0
, &op1
);
5675 #ifdef SELECT_CC_MODE
5676 /* If this machine has CC modes other than CCmode, check to see if we
5677 need to use a different CC mode here. */
5678 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
5679 compare_mode
= GET_MODE (op0
);
5681 compare_mode
= SELECT_CC_MODE (new_code
, op0
, op1
);
5684 /* If the mode changed, we have to change SET_DEST, the mode in the
5685 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5686 a hard register, just build new versions with the proper mode. If it
5687 is a pseudo, we lose unless it is only time we set the pseudo, in
5688 which case we can safely change its mode. */
5689 if (compare_mode
!= GET_MODE (dest
))
5691 if (can_change_dest_mode (dest
, 0, compare_mode
))
5693 unsigned int regno
= REGNO (dest
);
5696 if (regno
< FIRST_PSEUDO_REGISTER
)
5697 new_dest
= gen_rtx_REG (compare_mode
, regno
);
5700 SUBST_MODE (regno_reg_rtx
[regno
], compare_mode
);
5701 new_dest
= regno_reg_rtx
[regno
];
5704 SUBST (SET_DEST (x
), new_dest
);
5705 SUBST (XEXP (*cc_use
, 0), new_dest
);
5712 #endif /* SELECT_CC_MODE */
5714 /* If the code changed, we have to build a new comparison in
5715 undobuf.other_insn. */
5716 if (new_code
!= old_code
)
5718 int other_changed_previously
= other_changed
;
5719 unsigned HOST_WIDE_INT mask
;
5721 SUBST (*cc_use
, gen_rtx_fmt_ee (new_code
, GET_MODE (*cc_use
),
5725 /* If the only change we made was to change an EQ into an NE or
5726 vice versa, OP0 has only one bit that might be nonzero, and OP1
5727 is zero, check if changing the user of the condition code will
5728 produce a valid insn. If it won't, we can keep the original code
5729 in that insn by surrounding our operation with an XOR. */
5731 if (((old_code
== NE
&& new_code
== EQ
)
5732 || (old_code
== EQ
&& new_code
== NE
))
5733 && ! other_changed_previously
&& op1
== const0_rtx
5734 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
5735 && exact_log2 (mask
= nonzero_bits (op0
, GET_MODE (op0
))) >= 0)
5737 rtx pat
= PATTERN (other_insn
), note
= 0;
5739 if ((recog_for_combine (&pat
, other_insn
, ¬e
) < 0
5740 && ! check_asm_operands (pat
)))
5742 PUT_CODE (*cc_use
, old_code
);
5745 op0
= simplify_gen_binary (XOR
, GET_MODE (op0
),
5746 op0
, GEN_INT (mask
));
5752 undobuf
.other_insn
= other_insn
;
5755 /* If we are now comparing against zero, change our source if
5756 needed. If we do not use cc0, we always have a COMPARE. */
5757 if (op1
== const0_rtx
&& dest
== cc0_rtx
)
5759 SUBST (SET_SRC (x
), op0
);
5765 /* Otherwise, if we didn't previously have a COMPARE in the
5766 correct mode, we need one. */
5767 if (GET_CODE (src
) != COMPARE
|| GET_MODE (src
) != compare_mode
)
5769 SUBST (SET_SRC (x
), gen_rtx_COMPARE (compare_mode
, op0
, op1
));
5772 else if (GET_MODE (op0
) == compare_mode
&& op1
== const0_rtx
)
5774 SUBST (SET_SRC (x
), op0
);
5777 /* Otherwise, update the COMPARE if needed. */
5778 else if (XEXP (src
, 0) != op0
|| XEXP (src
, 1) != op1
)
5780 SUBST (SET_SRC (x
), gen_rtx_COMPARE (compare_mode
, op0
, op1
));
5786 /* Get SET_SRC in a form where we have placed back any
5787 compound expressions. Then do the checks below. */
5788 src
= make_compound_operation (src
, SET
);
5789 SUBST (SET_SRC (x
), src
);
5792 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5793 and X being a REG or (subreg (reg)), we may be able to convert this to
5794 (set (subreg:m2 x) (op)).
5796 We can always do this if M1 is narrower than M2 because that means that
5797 we only care about the low bits of the result.
5799 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5800 perform a narrower operation than requested since the high-order bits will
5801 be undefined. On machine where it is defined, this transformation is safe
5802 as long as M1 and M2 have the same number of words. */
5804 if (GET_CODE (src
) == SUBREG
&& subreg_lowpart_p (src
)
5805 && !OBJECT_P (SUBREG_REG (src
))
5806 && (((GET_MODE_SIZE (GET_MODE (src
)) + (UNITS_PER_WORD
- 1))
5808 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
5809 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
5810 #ifndef WORD_REGISTER_OPERATIONS
5811 && (GET_MODE_SIZE (GET_MODE (src
))
5812 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
))))
5814 #ifdef CANNOT_CHANGE_MODE_CLASS
5815 && ! (REG_P (dest
) && REGNO (dest
) < FIRST_PSEUDO_REGISTER
5816 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest
),
5817 GET_MODE (SUBREG_REG (src
)),
5821 || (GET_CODE (dest
) == SUBREG
5822 && REG_P (SUBREG_REG (dest
)))))
5824 SUBST (SET_DEST (x
),
5825 gen_lowpart (GET_MODE (SUBREG_REG (src
)),
5827 SUBST (SET_SRC (x
), SUBREG_REG (src
));
5829 src
= SET_SRC (x
), dest
= SET_DEST (x
);
5833 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5836 && GET_CODE (src
) == SUBREG
5837 && subreg_lowpart_p (src
)
5838 && (GET_MODE_BITSIZE (GET_MODE (src
))
5839 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src
)))))
5841 rtx inner
= SUBREG_REG (src
);
5842 enum machine_mode inner_mode
= GET_MODE (inner
);
5844 /* Here we make sure that we don't have a sign bit on. */
5845 if (GET_MODE_BITSIZE (inner_mode
) <= HOST_BITS_PER_WIDE_INT
5846 && (nonzero_bits (inner
, inner_mode
)
5847 < ((unsigned HOST_WIDE_INT
) 1
5848 << (GET_MODE_BITSIZE (GET_MODE (src
)) - 1))))
5850 SUBST (SET_SRC (x
), inner
);
5856 #ifdef LOAD_EXTEND_OP
5857 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5858 would require a paradoxical subreg. Replace the subreg with a
5859 zero_extend to avoid the reload that would otherwise be required. */
5861 if (GET_CODE (src
) == SUBREG
&& subreg_lowpart_p (src
)
5862 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src
))) != UNKNOWN
5863 && SUBREG_BYTE (src
) == 0
5864 && (GET_MODE_SIZE (GET_MODE (src
))
5865 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
))))
5866 && MEM_P (SUBREG_REG (src
)))
5869 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src
))),
5870 GET_MODE (src
), SUBREG_REG (src
)));
5876 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5877 are comparing an item known to be 0 or -1 against 0, use a logical
5878 operation instead. Check for one of the arms being an IOR of the other
5879 arm with some value. We compute three terms to be IOR'ed together. In
5880 practice, at most two will be nonzero. Then we do the IOR's. */
5882 if (GET_CODE (dest
) != PC
5883 && GET_CODE (src
) == IF_THEN_ELSE
5884 && GET_MODE_CLASS (GET_MODE (src
)) == MODE_INT
5885 && (GET_CODE (XEXP (src
, 0)) == EQ
|| GET_CODE (XEXP (src
, 0)) == NE
)
5886 && XEXP (XEXP (src
, 0), 1) == const0_rtx
5887 && GET_MODE (src
) == GET_MODE (XEXP (XEXP (src
, 0), 0))
5888 #ifdef HAVE_conditional_move
5889 && ! can_conditionally_move_p (GET_MODE (src
))
5891 && (num_sign_bit_copies (XEXP (XEXP (src
, 0), 0),
5892 GET_MODE (XEXP (XEXP (src
, 0), 0)))
5893 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src
, 0), 0))))
5894 && ! side_effects_p (src
))
5896 rtx true_rtx
= (GET_CODE (XEXP (src
, 0)) == NE
5897 ? XEXP (src
, 1) : XEXP (src
, 2));
5898 rtx false_rtx
= (GET_CODE (XEXP (src
, 0)) == NE
5899 ? XEXP (src
, 2) : XEXP (src
, 1));
5900 rtx term1
= const0_rtx
, term2
, term3
;
5902 if (GET_CODE (true_rtx
) == IOR
5903 && rtx_equal_p (XEXP (true_rtx
, 0), false_rtx
))
5904 term1
= false_rtx
, true_rtx
= XEXP (true_rtx
, 1), false_rtx
= const0_rtx
;
5905 else if (GET_CODE (true_rtx
) == IOR
5906 && rtx_equal_p (XEXP (true_rtx
, 1), false_rtx
))
5907 term1
= false_rtx
, true_rtx
= XEXP (true_rtx
, 0), false_rtx
= const0_rtx
;
5908 else if (GET_CODE (false_rtx
) == IOR
5909 && rtx_equal_p (XEXP (false_rtx
, 0), true_rtx
))
5910 term1
= true_rtx
, false_rtx
= XEXP (false_rtx
, 1), true_rtx
= const0_rtx
;
5911 else if (GET_CODE (false_rtx
) == IOR
5912 && rtx_equal_p (XEXP (false_rtx
, 1), true_rtx
))
5913 term1
= true_rtx
, false_rtx
= XEXP (false_rtx
, 0), true_rtx
= const0_rtx
;
5915 term2
= simplify_gen_binary (AND
, GET_MODE (src
),
5916 XEXP (XEXP (src
, 0), 0), true_rtx
);
5917 term3
= simplify_gen_binary (AND
, GET_MODE (src
),
5918 simplify_gen_unary (NOT
, GET_MODE (src
),
5919 XEXP (XEXP (src
, 0), 0),
5924 simplify_gen_binary (IOR
, GET_MODE (src
),
5925 simplify_gen_binary (IOR
, GET_MODE (src
),
5932 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5933 whole thing fail. */
5934 if (GET_CODE (src
) == CLOBBER
&& XEXP (src
, 0) == const0_rtx
)
5936 else if (GET_CODE (dest
) == CLOBBER
&& XEXP (dest
, 0) == const0_rtx
)
5939 /* Convert this into a field assignment operation, if possible. */
5940 return make_field_assignment (x
);
5943 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5947 simplify_logical (rtx x
)
5949 enum machine_mode mode
= GET_MODE (x
);
5950 rtx op0
= XEXP (x
, 0);
5951 rtx op1
= XEXP (x
, 1);
5953 switch (GET_CODE (x
))
5956 /* We can call simplify_and_const_int only if we don't lose
5957 any (sign) bits when converting INTVAL (op1) to
5958 "unsigned HOST_WIDE_INT". */
5959 if (GET_CODE (op1
) == CONST_INT
5960 && (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5961 || INTVAL (op1
) > 0))
5963 x
= simplify_and_const_int (x
, mode
, op0
, INTVAL (op1
));
5964 if (GET_CODE (x
) != AND
)
5971 /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5972 apply the distributive law and then the inverse distributive
5973 law to see if things simplify. */
5974 if (GET_CODE (op0
) == IOR
|| GET_CODE (op0
) == XOR
)
5976 rtx result
= distribute_and_simplify_rtx (x
, 0);
5980 if (GET_CODE (op1
) == IOR
|| GET_CODE (op1
) == XOR
)
5982 rtx result
= distribute_and_simplify_rtx (x
, 1);
5989 /* If we have (ior (and A B) C), apply the distributive law and then
5990 the inverse distributive law to see if things simplify. */
5992 if (GET_CODE (op0
) == AND
)
5994 rtx result
= distribute_and_simplify_rtx (x
, 0);
5999 if (GET_CODE (op1
) == AND
)
6001 rtx result
= distribute_and_simplify_rtx (x
, 1);
6014 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6015 operations" because they can be replaced with two more basic operations.
6016 ZERO_EXTEND is also considered "compound" because it can be replaced with
6017 an AND operation, which is simpler, though only one operation.
6019 The function expand_compound_operation is called with an rtx expression
6020 and will convert it to the appropriate shifts and AND operations,
6021 simplifying at each stage.
6023 The function make_compound_operation is called to convert an expression
6024 consisting of shifts and ANDs into the equivalent compound expression.
6025 It is the inverse of this function, loosely speaking. */
6028 expand_compound_operation (rtx x
)
6030 unsigned HOST_WIDE_INT pos
= 0, len
;
6032 unsigned int modewidth
;
6035 switch (GET_CODE (x
))
6040 /* We can't necessarily use a const_int for a multiword mode;
6041 it depends on implicitly extending the value.
6042 Since we don't know the right way to extend it,
6043 we can't tell whether the implicit way is right.
6045 Even for a mode that is no wider than a const_int,
6046 we can't win, because we need to sign extend one of its bits through
6047 the rest of it, and we don't know which bit. */
6048 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
6051 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6052 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
6053 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6054 reloaded. If not for that, MEM's would very rarely be safe.
6056 Reject MODEs bigger than a word, because we might not be able
6057 to reference a two-register group starting with an arbitrary register
6058 (and currently gen_lowpart might crash for a SUBREG). */
6060 if (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))) > UNITS_PER_WORD
)
6063 /* Reject MODEs that aren't scalar integers because turning vector
6064 or complex modes into shifts causes problems. */
6066 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x
, 0))))
6069 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)));
6070 /* If the inner object has VOIDmode (the only way this can happen
6071 is if it is an ASM_OPERANDS), we can't do anything since we don't
6072 know how much masking to do. */
6081 /* ... fall through ... */
6084 /* If the operand is a CLOBBER, just return it. */
6085 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6088 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
6089 || GET_CODE (XEXP (x
, 2)) != CONST_INT
6090 || GET_MODE (XEXP (x
, 0)) == VOIDmode
)
6093 /* Reject MODEs that aren't scalar integers because turning vector
6094 or complex modes into shifts causes problems. */
6096 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x
, 0))))
6099 len
= INTVAL (XEXP (x
, 1));
6100 pos
= INTVAL (XEXP (x
, 2));
6102 /* This should stay within the object being extracted, fail otherwise. */
6103 if (len
+ pos
> GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
6106 if (BITS_BIG_ENDIAN
)
6107 pos
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - len
- pos
;
6114 /* Convert sign extension to zero extension, if we know that the high
6115 bit is not set, as this is easier to optimize. It will be converted
6116 back to cheaper alternative in make_extraction. */
6117 if (GET_CODE (x
) == SIGN_EXTEND
6118 && (GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
6119 && ((nonzero_bits (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
6120 & ~(((unsigned HOST_WIDE_INT
)
6121 GET_MODE_MASK (GET_MODE (XEXP (x
, 0))))
6125 rtx temp
= gen_rtx_ZERO_EXTEND (GET_MODE (x
), XEXP (x
, 0));
6126 rtx temp2
= expand_compound_operation (temp
);
6128 /* Make sure this is a profitable operation. */
6129 if (rtx_cost (x
, SET
) > rtx_cost (temp2
, SET
))
6131 else if (rtx_cost (x
, SET
) > rtx_cost (temp
, SET
))
6137 /* We can optimize some special cases of ZERO_EXTEND. */
6138 if (GET_CODE (x
) == ZERO_EXTEND
)
6140 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6141 know that the last value didn't have any inappropriate bits
6143 if (GET_CODE (XEXP (x
, 0)) == TRUNCATE
6144 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == GET_MODE (x
)
6145 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
6146 && (nonzero_bits (XEXP (XEXP (x
, 0), 0), GET_MODE (x
))
6147 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
6148 return XEXP (XEXP (x
, 0), 0);
6150 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6151 if (GET_CODE (XEXP (x
, 0)) == SUBREG
6152 && GET_MODE (SUBREG_REG (XEXP (x
, 0))) == GET_MODE (x
)
6153 && subreg_lowpart_p (XEXP (x
, 0))
6154 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
6155 && (nonzero_bits (SUBREG_REG (XEXP (x
, 0)), GET_MODE (x
))
6156 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
6157 return SUBREG_REG (XEXP (x
, 0));
6159 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6160 is a comparison and STORE_FLAG_VALUE permits. This is like
6161 the first case, but it works even when GET_MODE (x) is larger
6162 than HOST_WIDE_INT. */
6163 if (GET_CODE (XEXP (x
, 0)) == TRUNCATE
6164 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == GET_MODE (x
)
6165 && COMPARISON_P (XEXP (XEXP (x
, 0), 0))
6166 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
6167 <= HOST_BITS_PER_WIDE_INT
)
6168 && ((HOST_WIDE_INT
) STORE_FLAG_VALUE
6169 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
6170 return XEXP (XEXP (x
, 0), 0);
6172 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6173 if (GET_CODE (XEXP (x
, 0)) == SUBREG
6174 && GET_MODE (SUBREG_REG (XEXP (x
, 0))) == GET_MODE (x
)
6175 && subreg_lowpart_p (XEXP (x
, 0))
6176 && COMPARISON_P (SUBREG_REG (XEXP (x
, 0)))
6177 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
6178 <= HOST_BITS_PER_WIDE_INT
)
6179 && ((HOST_WIDE_INT
) STORE_FLAG_VALUE
6180 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
6181 return SUBREG_REG (XEXP (x
, 0));
6185 /* If we reach here, we want to return a pair of shifts. The inner
6186 shift is a left shift of BITSIZE - POS - LEN bits. The outer
6187 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
6188 logical depending on the value of UNSIGNEDP.
6190 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6191 converted into an AND of a shift.
6193 We must check for the case where the left shift would have a negative
6194 count. This can happen in a case like (x >> 31) & 255 on machines
6195 that can't shift by a constant. On those machines, we would first
6196 combine the shift with the AND to produce a variable-position
6197 extraction. Then the constant of 31 would be substituted in to produce
6198 a such a position. */
6200 modewidth
= GET_MODE_BITSIZE (GET_MODE (x
));
6201 if (modewidth
+ len
>= pos
)
6203 enum machine_mode mode
= GET_MODE (x
);
6204 tem
= gen_lowpart (mode
, XEXP (x
, 0));
6205 if (!tem
|| GET_CODE (tem
) == CLOBBER
)
6207 tem
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
6208 tem
, modewidth
- pos
- len
);
6209 tem
= simplify_shift_const (NULL_RTX
, unsignedp
? LSHIFTRT
: ASHIFTRT
,
6210 mode
, tem
, modewidth
- len
);
6212 else if (unsignedp
&& len
< HOST_BITS_PER_WIDE_INT
)
6213 tem
= simplify_and_const_int (NULL_RTX
, GET_MODE (x
),
6214 simplify_shift_const (NULL_RTX
, LSHIFTRT
,
6217 ((HOST_WIDE_INT
) 1 << len
) - 1);
6219 /* Any other cases we can't handle. */
6222 /* If we couldn't do this for some reason, return the original
6224 if (GET_CODE (tem
) == CLOBBER
)
6230 /* X is a SET which contains an assignment of one object into
6231 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6232 or certain SUBREGS). If possible, convert it into a series of
6235 We half-heartedly support variable positions, but do not at all
6236 support variable lengths. */
6239 expand_field_assignment (const_rtx x
)
6242 rtx pos
; /* Always counts from low bit. */
6244 rtx mask
, cleared
, masked
;
6245 enum machine_mode compute_mode
;
6247 /* Loop until we find something we can't simplify. */
6250 if (GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
6251 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
)
6253 inner
= SUBREG_REG (XEXP (SET_DEST (x
), 0));
6254 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)));
6255 pos
= GEN_INT (subreg_lsb (XEXP (SET_DEST (x
), 0)));
6257 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
6258 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
)
6260 inner
= XEXP (SET_DEST (x
), 0);
6261 len
= INTVAL (XEXP (SET_DEST (x
), 1));
6262 pos
= XEXP (SET_DEST (x
), 2);
6264 /* A constant position should stay within the width of INNER. */
6265 if (GET_CODE (pos
) == CONST_INT
6266 && INTVAL (pos
) + len
> GET_MODE_BITSIZE (GET_MODE (inner
)))
6269 if (BITS_BIG_ENDIAN
)
6271 if (GET_CODE (pos
) == CONST_INT
)
6272 pos
= GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner
)) - len
6274 else if (GET_CODE (pos
) == MINUS
6275 && GET_CODE (XEXP (pos
, 1)) == CONST_INT
6276 && (INTVAL (XEXP (pos
, 1))
6277 == GET_MODE_BITSIZE (GET_MODE (inner
)) - len
))
6278 /* If position is ADJUST - X, new position is X. */
6279 pos
= XEXP (pos
, 0);
6281 pos
= simplify_gen_binary (MINUS
, GET_MODE (pos
),
6282 GEN_INT (GET_MODE_BITSIZE (
6289 /* A SUBREG between two modes that occupy the same numbers of words
6290 can be done by moving the SUBREG to the source. */
6291 else if (GET_CODE (SET_DEST (x
)) == SUBREG
6292 /* We need SUBREGs to compute nonzero_bits properly. */
6293 && nonzero_sign_valid
6294 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
6295 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
6296 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
6297 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
6299 x
= gen_rtx_SET (VOIDmode
, SUBREG_REG (SET_DEST (x
)),
6301 (GET_MODE (SUBREG_REG (SET_DEST (x
))),
6308 while (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
6309 inner
= SUBREG_REG (inner
);
6311 compute_mode
= GET_MODE (inner
);
6313 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
6314 if (! SCALAR_INT_MODE_P (compute_mode
))
6316 enum machine_mode imode
;
6318 /* Don't do anything for vector or complex integral types. */
6319 if (! FLOAT_MODE_P (compute_mode
))
6322 /* Try to find an integral mode to pun with. */
6323 imode
= mode_for_size (GET_MODE_BITSIZE (compute_mode
), MODE_INT
, 0);
6324 if (imode
== BLKmode
)
6327 compute_mode
= imode
;
6328 inner
= gen_lowpart (imode
, inner
);
6331 /* Compute a mask of LEN bits, if we can do this on the host machine. */
6332 if (len
>= HOST_BITS_PER_WIDE_INT
)
6335 /* Now compute the equivalent expression. Make a copy of INNER
6336 for the SET_DEST in case it is a MEM into which we will substitute;
6337 we don't want shared RTL in that case. */
6338 mask
= GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1);
6339 cleared
= simplify_gen_binary (AND
, compute_mode
,
6340 simplify_gen_unary (NOT
, compute_mode
,
6341 simplify_gen_binary (ASHIFT
,
6346 masked
= simplify_gen_binary (ASHIFT
, compute_mode
,
6347 simplify_gen_binary (
6349 gen_lowpart (compute_mode
, SET_SRC (x
)),
6353 x
= gen_rtx_SET (VOIDmode
, copy_rtx (inner
),
6354 simplify_gen_binary (IOR
, compute_mode
,
6361 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
6362 it is an RTX that represents a variable starting position; otherwise,
6363 POS is the (constant) starting bit position (counted from the LSB).
6365 UNSIGNEDP is nonzero for an unsigned reference and zero for a
6368 IN_DEST is nonzero if this is a reference in the destination of a
6369 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
6370 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6373 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
6374 ZERO_EXTRACT should be built even for bits starting at bit 0.
6376 MODE is the desired mode of the result (if IN_DEST == 0).
6378 The result is an RTX for the extraction or NULL_RTX if the target
6382 make_extraction (enum machine_mode mode
, rtx inner
, HOST_WIDE_INT pos
,
6383 rtx pos_rtx
, unsigned HOST_WIDE_INT len
, int unsignedp
,
6384 int in_dest
, int in_compare
)
6386 /* This mode describes the size of the storage area
6387 to fetch the overall value from. Within that, we
6388 ignore the POS lowest bits, etc. */
6389 enum machine_mode is_mode
= GET_MODE (inner
);
6390 enum machine_mode inner_mode
;
6391 enum machine_mode wanted_inner_mode
;
6392 enum machine_mode wanted_inner_reg_mode
= word_mode
;
6393 enum machine_mode pos_mode
= word_mode
;
6394 enum machine_mode extraction_mode
= word_mode
;
6395 enum machine_mode tmode
= mode_for_size (len
, MODE_INT
, 1);
6397 rtx orig_pos_rtx
= pos_rtx
;
6398 HOST_WIDE_INT orig_pos
;
6400 if (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
6402 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6403 consider just the QI as the memory to extract from.
6404 The subreg adds or removes high bits; its mode is
6405 irrelevant to the meaning of this extraction,
6406 since POS and LEN count from the lsb. */
6407 if (MEM_P (SUBREG_REG (inner
)))
6408 is_mode
= GET_MODE (SUBREG_REG (inner
));
6409 inner
= SUBREG_REG (inner
);
6411 else if (GET_CODE (inner
) == ASHIFT
6412 && GET_CODE (XEXP (inner
, 1)) == CONST_INT
6413 && pos_rtx
== 0 && pos
== 0
6414 && len
> (unsigned HOST_WIDE_INT
) INTVAL (XEXP (inner
, 1)))
6416 /* We're extracting the least significant bits of an rtx
6417 (ashift X (const_int C)), where LEN > C. Extract the
6418 least significant (LEN - C) bits of X, giving an rtx
6419 whose mode is MODE, then shift it left C times. */
6420 new = make_extraction (mode
, XEXP (inner
, 0),
6421 0, 0, len
- INTVAL (XEXP (inner
, 1)),
6422 unsignedp
, in_dest
, in_compare
);
6424 return gen_rtx_ASHIFT (mode
, new, XEXP (inner
, 1));
6427 inner_mode
= GET_MODE (inner
);
6429 if (pos_rtx
&& GET_CODE (pos_rtx
) == CONST_INT
)
6430 pos
= INTVAL (pos_rtx
), pos_rtx
= 0;
6432 /* See if this can be done without an extraction. We never can if the
6433 width of the field is not the same as that of some integer mode. For
6434 registers, we can only avoid the extraction if the position is at the
6435 low-order bit and this is either not in the destination or we have the
6436 appropriate STRICT_LOW_PART operation available.
6438 For MEM, we can avoid an extract if the field starts on an appropriate
6439 boundary and we can change the mode of the memory reference. */
6441 if (tmode
!= BLKmode
6442 && ((pos_rtx
== 0 && (pos
% BITS_PER_WORD
) == 0
6444 && (inner_mode
== tmode
6446 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode
),
6447 GET_MODE_BITSIZE (inner_mode
))
6448 || reg_truncated_to_mode (tmode
, inner
))
6451 && have_insn_for (STRICT_LOW_PART
, tmode
))))
6452 || (MEM_P (inner
) && pos_rtx
== 0
6454 % (STRICT_ALIGNMENT
? GET_MODE_ALIGNMENT (tmode
)
6455 : BITS_PER_UNIT
)) == 0
6456 /* We can't do this if we are widening INNER_MODE (it
6457 may not be aligned, for one thing). */
6458 && GET_MODE_BITSIZE (inner_mode
) >= GET_MODE_BITSIZE (tmode
)
6459 && (inner_mode
== tmode
6460 || (! mode_dependent_address_p (XEXP (inner
, 0))
6461 && ! MEM_VOLATILE_P (inner
))))))
6463 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6464 field. If the original and current mode are the same, we need not
6465 adjust the offset. Otherwise, we do if bytes big endian.
6467 If INNER is not a MEM, get a piece consisting of just the field
6468 of interest (in this case POS % BITS_PER_WORD must be 0). */
6472 HOST_WIDE_INT offset
;
6474 /* POS counts from lsb, but make OFFSET count in memory order. */
6475 if (BYTES_BIG_ENDIAN
)
6476 offset
= (GET_MODE_BITSIZE (is_mode
) - len
- pos
) / BITS_PER_UNIT
;
6478 offset
= pos
/ BITS_PER_UNIT
;
6480 new = adjust_address_nv (inner
, tmode
, offset
);
6482 else if (REG_P (inner
))
6484 if (tmode
!= inner_mode
)
6486 /* We can't call gen_lowpart in a DEST since we
6487 always want a SUBREG (see below) and it would sometimes
6488 return a new hard register. */
6491 HOST_WIDE_INT final_word
= pos
/ BITS_PER_WORD
;
6493 if (WORDS_BIG_ENDIAN
6494 && GET_MODE_SIZE (inner_mode
) > UNITS_PER_WORD
)
6495 final_word
= ((GET_MODE_SIZE (inner_mode
)
6496 - GET_MODE_SIZE (tmode
))
6497 / UNITS_PER_WORD
) - final_word
;
6499 final_word
*= UNITS_PER_WORD
;
6500 if (BYTES_BIG_ENDIAN
&&
6501 GET_MODE_SIZE (inner_mode
) > GET_MODE_SIZE (tmode
))
6502 final_word
+= (GET_MODE_SIZE (inner_mode
)
6503 - GET_MODE_SIZE (tmode
)) % UNITS_PER_WORD
;
6505 /* Avoid creating invalid subregs, for example when
6506 simplifying (x>>32)&255. */
6507 if (!validate_subreg (tmode
, inner_mode
, inner
, final_word
))
6510 new = gen_rtx_SUBREG (tmode
, inner
, final_word
);
6513 new = gen_lowpart (tmode
, inner
);
6519 new = force_to_mode (inner
, tmode
,
6520 len
>= HOST_BITS_PER_WIDE_INT
6521 ? ~(unsigned HOST_WIDE_INT
) 0
6522 : ((unsigned HOST_WIDE_INT
) 1 << len
) - 1,
6525 /* If this extraction is going into the destination of a SET,
6526 make a STRICT_LOW_PART unless we made a MEM. */
6529 return (MEM_P (new) ? new
6530 : (GET_CODE (new) != SUBREG
6531 ? gen_rtx_CLOBBER (tmode
, const0_rtx
)
6532 : gen_rtx_STRICT_LOW_PART (VOIDmode
, new)));
6537 if (GET_CODE (new) == CONST_INT
)
6538 return gen_int_mode (INTVAL (new), mode
);
6540 /* If we know that no extraneous bits are set, and that the high
6541 bit is not set, convert the extraction to the cheaper of
6542 sign and zero extension, that are equivalent in these cases. */
6543 if (flag_expensive_optimizations
6544 && (GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
6545 && ((nonzero_bits (new, tmode
)
6546 & ~(((unsigned HOST_WIDE_INT
)
6547 GET_MODE_MASK (tmode
))
6551 rtx temp
= gen_rtx_ZERO_EXTEND (mode
, new);
6552 rtx temp1
= gen_rtx_SIGN_EXTEND (mode
, new);
6554 /* Prefer ZERO_EXTENSION, since it gives more information to
6556 if (rtx_cost (temp
, SET
) <= rtx_cost (temp1
, SET
))
6561 /* Otherwise, sign- or zero-extend unless we already are in the
6564 return (gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
,
6568 /* Unless this is a COMPARE or we have a funny memory reference,
6569 don't do anything with zero-extending field extracts starting at
6570 the low-order bit since they are simple AND operations. */
6571 if (pos_rtx
== 0 && pos
== 0 && ! in_dest
6572 && ! in_compare
&& unsignedp
)
6575 /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6576 if the position is not a constant and the length is not 1. In all
6577 other cases, we would only be going outside our object in cases when
6578 an original shift would have been undefined. */
6580 && ((pos_rtx
== 0 && pos
+ len
> GET_MODE_BITSIZE (is_mode
))
6581 || (pos_rtx
!= 0 && len
!= 1)))
6584 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6585 and the mode for the result. */
6586 if (in_dest
&& mode_for_extraction (EP_insv
, -1) != MAX_MACHINE_MODE
)
6588 wanted_inner_reg_mode
= mode_for_extraction (EP_insv
, 0);
6589 pos_mode
= mode_for_extraction (EP_insv
, 2);
6590 extraction_mode
= mode_for_extraction (EP_insv
, 3);
6593 if (! in_dest
&& unsignedp
6594 && mode_for_extraction (EP_extzv
, -1) != MAX_MACHINE_MODE
)
6596 wanted_inner_reg_mode
= mode_for_extraction (EP_extzv
, 1);
6597 pos_mode
= mode_for_extraction (EP_extzv
, 3);
6598 extraction_mode
= mode_for_extraction (EP_extzv
, 0);
6601 if (! in_dest
&& ! unsignedp
6602 && mode_for_extraction (EP_extv
, -1) != MAX_MACHINE_MODE
)
6604 wanted_inner_reg_mode
= mode_for_extraction (EP_extv
, 1);
6605 pos_mode
= mode_for_extraction (EP_extv
, 3);
6606 extraction_mode
= mode_for_extraction (EP_extv
, 0);
6609 /* Never narrow an object, since that might not be safe. */
6611 if (mode
!= VOIDmode
6612 && GET_MODE_SIZE (extraction_mode
) < GET_MODE_SIZE (mode
))
6613 extraction_mode
= mode
;
6615 if (pos_rtx
&& GET_MODE (pos_rtx
) != VOIDmode
6616 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
6617 pos_mode
= GET_MODE (pos_rtx
);
6619 /* If this is not from memory, the desired mode is the preferred mode
6620 for an extraction pattern's first input operand, or word_mode if there
6623 wanted_inner_mode
= wanted_inner_reg_mode
;
6626 /* Be careful not to go beyond the extracted object and maintain the
6627 natural alignment of the memory. */
6628 wanted_inner_mode
= smallest_mode_for_size (len
, MODE_INT
);
6629 while (pos
% GET_MODE_BITSIZE (wanted_inner_mode
) + len
6630 > GET_MODE_BITSIZE (wanted_inner_mode
))
6632 wanted_inner_mode
= GET_MODE_WIDER_MODE (wanted_inner_mode
);
6633 gcc_assert (wanted_inner_mode
!= VOIDmode
);
6636 /* If we have to change the mode of memory and cannot, the desired mode
6637 is EXTRACTION_MODE. */
6638 if (inner_mode
!= wanted_inner_mode
6639 && (mode_dependent_address_p (XEXP (inner
, 0))
6640 || MEM_VOLATILE_P (inner
)
6642 wanted_inner_mode
= extraction_mode
;
6647 if (BITS_BIG_ENDIAN
)
6649 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6650 BITS_BIG_ENDIAN style. If position is constant, compute new
6651 position. Otherwise, build subtraction.
6652 Note that POS is relative to the mode of the original argument.
6653 If it's a MEM we need to recompute POS relative to that.
6654 However, if we're extracting from (or inserting into) a register,
6655 we want to recompute POS relative to wanted_inner_mode. */
6656 int width
= (MEM_P (inner
)
6657 ? GET_MODE_BITSIZE (is_mode
)
6658 : GET_MODE_BITSIZE (wanted_inner_mode
));
6661 pos
= width
- len
- pos
;
6664 = gen_rtx_MINUS (GET_MODE (pos_rtx
), GEN_INT (width
- len
), pos_rtx
);
6665 /* POS may be less than 0 now, but we check for that below.
6666 Note that it can only be less than 0 if !MEM_P (inner). */
6669 /* If INNER has a wider mode, and this is a constant extraction, try to
6670 make it smaller and adjust the byte to point to the byte containing
6672 if (wanted_inner_mode
!= VOIDmode
6673 && inner_mode
!= wanted_inner_mode
6675 && GET_MODE_SIZE (wanted_inner_mode
) < GET_MODE_SIZE (is_mode
)
6677 && ! mode_dependent_address_p (XEXP (inner
, 0))
6678 && ! MEM_VOLATILE_P (inner
))
6682 /* The computations below will be correct if the machine is big
6683 endian in both bits and bytes or little endian in bits and bytes.
6684 If it is mixed, we must adjust. */
6686 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6687 adjust OFFSET to compensate. */
6688 if (BYTES_BIG_ENDIAN
6689 && GET_MODE_SIZE (inner_mode
) < GET_MODE_SIZE (is_mode
))
6690 offset
-= GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (inner_mode
);
6692 /* We can now move to the desired byte. */
6693 offset
+= (pos
/ GET_MODE_BITSIZE (wanted_inner_mode
))
6694 * GET_MODE_SIZE (wanted_inner_mode
);
6695 pos
%= GET_MODE_BITSIZE (wanted_inner_mode
);
6697 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
6698 && is_mode
!= wanted_inner_mode
)
6699 offset
= (GET_MODE_SIZE (is_mode
)
6700 - GET_MODE_SIZE (wanted_inner_mode
) - offset
);
6702 inner
= adjust_address_nv (inner
, wanted_inner_mode
, offset
);
6705 /* If INNER is not memory, we can always get it into the proper mode. If we
6706 are changing its mode, POS must be a constant and smaller than the size
6708 else if (!MEM_P (inner
))
6710 if (GET_MODE (inner
) != wanted_inner_mode
6712 || orig_pos
+ len
> GET_MODE_BITSIZE (wanted_inner_mode
)))
6718 inner
= force_to_mode (inner
, wanted_inner_mode
,
6720 || len
+ orig_pos
>= HOST_BITS_PER_WIDE_INT
6721 ? ~(unsigned HOST_WIDE_INT
) 0
6722 : ((((unsigned HOST_WIDE_INT
) 1 << len
) - 1)
6727 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6728 have to zero extend. Otherwise, we can just use a SUBREG. */
6730 && GET_MODE_SIZE (pos_mode
) > GET_MODE_SIZE (GET_MODE (pos_rtx
)))
6732 rtx temp
= gen_rtx_ZERO_EXTEND (pos_mode
, pos_rtx
);
6734 /* If we know that no extraneous bits are set, and that the high
6735 bit is not set, convert extraction to cheaper one - either
6736 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6738 if (flag_expensive_optimizations
6739 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx
)) <= HOST_BITS_PER_WIDE_INT
6740 && ((nonzero_bits (pos_rtx
, GET_MODE (pos_rtx
))
6741 & ~(((unsigned HOST_WIDE_INT
)
6742 GET_MODE_MASK (GET_MODE (pos_rtx
)))
6746 rtx temp1
= gen_rtx_SIGN_EXTEND (pos_mode
, pos_rtx
);
6748 /* Prefer ZERO_EXTENSION, since it gives more information to
6750 if (rtx_cost (temp1
, SET
) < rtx_cost (temp
, SET
))
6755 else if (pos_rtx
!= 0
6756 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
6757 pos_rtx
= gen_lowpart (pos_mode
, pos_rtx
);
6759 /* Make POS_RTX unless we already have it and it is correct. If we don't
6760 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6762 if (pos_rtx
== 0 && orig_pos_rtx
!= 0 && INTVAL (orig_pos_rtx
) == pos
)
6763 pos_rtx
= orig_pos_rtx
;
6765 else if (pos_rtx
== 0)
6766 pos_rtx
= GEN_INT (pos
);
6768 /* Make the required operation. See if we can use existing rtx. */
6769 new = gen_rtx_fmt_eee (unsignedp
? ZERO_EXTRACT
: SIGN_EXTRACT
,
6770 extraction_mode
, inner
, GEN_INT (len
), pos_rtx
);
6772 new = gen_lowpart (mode
, new);
6777 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6778 with any other operations in X. Return X without that shift if so. */
6781 extract_left_shift (rtx x
, int count
)
6783 enum rtx_code code
= GET_CODE (x
);
6784 enum machine_mode mode
= GET_MODE (x
);
6790 /* This is the shift itself. If it is wide enough, we will return
6791 either the value being shifted if the shift count is equal to
6792 COUNT or a shift for the difference. */
6793 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
6794 && INTVAL (XEXP (x
, 1)) >= count
)
6795 return simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, XEXP (x
, 0),
6796 INTVAL (XEXP (x
, 1)) - count
);
6800 if ((tem
= extract_left_shift (XEXP (x
, 0), count
)) != 0)
6801 return simplify_gen_unary (code
, mode
, tem
, mode
);
6805 case PLUS
: case IOR
: case XOR
: case AND
:
6806 /* If we can safely shift this constant and we find the inner shift,
6807 make a new operation. */
6808 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
6809 && (INTVAL (XEXP (x
, 1)) & ((((HOST_WIDE_INT
) 1 << count
)) - 1)) == 0
6810 && (tem
= extract_left_shift (XEXP (x
, 0), count
)) != 0)
6811 return simplify_gen_binary (code
, mode
, tem
,
6812 GEN_INT (INTVAL (XEXP (x
, 1)) >> count
));
6823 /* Look at the expression rooted at X. Look for expressions
6824 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6825 Form these expressions.
6827 Return the new rtx, usually just X.
6829 Also, for machines like the VAX that don't have logical shift insns,
6830 try to convert logical to arithmetic shift operations in cases where
6831 they are equivalent. This undoes the canonicalizations to logical
6832 shifts done elsewhere.
6834 We try, as much as possible, to re-use rtl expressions to save memory.
6836 IN_CODE says what kind of expression we are processing. Normally, it is
6837 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6838 being kludges), it is MEM. When processing the arguments of a comparison
6839 or a COMPARE against zero, it is COMPARE. */
6842 make_compound_operation (rtx x
, enum rtx_code in_code
)
6844 enum rtx_code code
= GET_CODE (x
);
6845 enum machine_mode mode
= GET_MODE (x
);
6846 int mode_width
= GET_MODE_BITSIZE (mode
);
6848 enum rtx_code next_code
;
6854 /* Select the code to be used in recursive calls. Once we are inside an
6855 address, we stay there. If we have a comparison, set to COMPARE,
6856 but once inside, go back to our default of SET. */
6858 next_code
= (code
== MEM
|| code
== PLUS
|| code
== MINUS
? MEM
6859 : ((code
== COMPARE
|| COMPARISON_P (x
))
6860 && XEXP (x
, 1) == const0_rtx
) ? COMPARE
6861 : in_code
== COMPARE
? SET
: in_code
);
6863 /* Process depending on the code of this operation. If NEW is set
6864 nonzero, it will be returned. */
6869 /* Convert shifts by constants into multiplications if inside
6871 if (in_code
== MEM
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
6872 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
6873 && INTVAL (XEXP (x
, 1)) >= 0)
6875 new = make_compound_operation (XEXP (x
, 0), next_code
);
6876 new = gen_rtx_MULT (mode
, new,
6877 GEN_INT ((HOST_WIDE_INT
) 1
6878 << INTVAL (XEXP (x
, 1))));
6883 /* If the second operand is not a constant, we can't do anything
6885 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6888 /* If the constant is a power of two minus one and the first operand
6889 is a logical right shift, make an extraction. */
6890 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
6891 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6893 new = make_compound_operation (XEXP (XEXP (x
, 0), 0), next_code
);
6894 new = make_extraction (mode
, new, 0, XEXP (XEXP (x
, 0), 1), i
, 1,
6895 0, in_code
== COMPARE
);
6898 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6899 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
6900 && subreg_lowpart_p (XEXP (x
, 0))
6901 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == LSHIFTRT
6902 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6904 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x
, 0)), 0),
6906 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x
, 0))), new, 0,
6907 XEXP (SUBREG_REG (XEXP (x
, 0)), 1), i
, 1,
6908 0, in_code
== COMPARE
);
6910 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6911 else if ((GET_CODE (XEXP (x
, 0)) == XOR
6912 || GET_CODE (XEXP (x
, 0)) == IOR
)
6913 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == LSHIFTRT
6914 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == LSHIFTRT
6915 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6917 /* Apply the distributive law, and then try to make extractions. */
6918 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x
, 0)), mode
,
6919 gen_rtx_AND (mode
, XEXP (XEXP (x
, 0), 0),
6921 gen_rtx_AND (mode
, XEXP (XEXP (x
, 0), 1),
6923 new = make_compound_operation (new, in_code
);
6926 /* If we are have (and (rotate X C) M) and C is larger than the number
6927 of bits in M, this is an extraction. */
6929 else if (GET_CODE (XEXP (x
, 0)) == ROTATE
6930 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
6931 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0
6932 && i
<= INTVAL (XEXP (XEXP (x
, 0), 1)))
6934 new = make_compound_operation (XEXP (XEXP (x
, 0), 0), next_code
);
6935 new = make_extraction (mode
, new,
6936 (GET_MODE_BITSIZE (mode
)
6937 - INTVAL (XEXP (XEXP (x
, 0), 1))),
6938 NULL_RTX
, i
, 1, 0, in_code
== COMPARE
);
6941 /* On machines without logical shifts, if the operand of the AND is
6942 a logical shift and our mask turns off all the propagated sign
6943 bits, we can replace the logical shift with an arithmetic shift. */
6944 else if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
6945 && !have_insn_for (LSHIFTRT
, mode
)
6946 && have_insn_for (ASHIFTRT
, mode
)
6947 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
6948 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
6949 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
6950 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
6952 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
6954 mask
>>= INTVAL (XEXP (XEXP (x
, 0), 1));
6955 if ((INTVAL (XEXP (x
, 1)) & ~mask
) == 0)
6957 gen_rtx_ASHIFTRT (mode
,
6958 make_compound_operation
6959 (XEXP (XEXP (x
, 0), 0), next_code
),
6960 XEXP (XEXP (x
, 0), 1)));
6963 /* If the constant is one less than a power of two, this might be
6964 representable by an extraction even if no shift is present.
6965 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6966 we are in a COMPARE. */
6967 else if ((i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6968 new = make_extraction (mode
,
6969 make_compound_operation (XEXP (x
, 0),
6971 0, NULL_RTX
, i
, 1, 0, in_code
== COMPARE
);
6973 /* If we are in a comparison and this is an AND with a power of two,
6974 convert this into the appropriate bit extract. */
6975 else if (in_code
== COMPARE
6976 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
6977 new = make_extraction (mode
,
6978 make_compound_operation (XEXP (x
, 0),
6980 i
, NULL_RTX
, 1, 1, 0, 1);
6985 /* If the sign bit is known to be zero, replace this with an
6986 arithmetic shift. */
6987 if (have_insn_for (ASHIFTRT
, mode
)
6988 && ! have_insn_for (LSHIFTRT
, mode
)
6989 && mode_width
<= HOST_BITS_PER_WIDE_INT
6990 && (nonzero_bits (XEXP (x
, 0), mode
) & (1 << (mode_width
- 1))) == 0)
6992 new = gen_rtx_ASHIFTRT (mode
,
6993 make_compound_operation (XEXP (x
, 0),
6999 /* ... fall through ... */
7005 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7006 this is a SIGN_EXTRACT. */
7007 if (GET_CODE (rhs
) == CONST_INT
7008 && GET_CODE (lhs
) == ASHIFT
7009 && GET_CODE (XEXP (lhs
, 1)) == CONST_INT
7010 && INTVAL (rhs
) >= INTVAL (XEXP (lhs
, 1)))
7012 new = make_compound_operation (XEXP (lhs
, 0), next_code
);
7013 new = make_extraction (mode
, new,
7014 INTVAL (rhs
) - INTVAL (XEXP (lhs
, 1)),
7015 NULL_RTX
, mode_width
- INTVAL (rhs
),
7016 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
7020 /* See if we have operations between an ASHIFTRT and an ASHIFT.
7021 If so, try to merge the shifts into a SIGN_EXTEND. We could
7022 also do this for some cases of SIGN_EXTRACT, but it doesn't
7023 seem worth the effort; the case checked for occurs on Alpha. */
7026 && ! (GET_CODE (lhs
) == SUBREG
7027 && (OBJECT_P (SUBREG_REG (lhs
))))
7028 && GET_CODE (rhs
) == CONST_INT
7029 && INTVAL (rhs
) < HOST_BITS_PER_WIDE_INT
7030 && (new = extract_left_shift (lhs
, INTVAL (rhs
))) != 0)
7031 new = make_extraction (mode
, make_compound_operation (new, next_code
),
7032 0, NULL_RTX
, mode_width
- INTVAL (rhs
),
7033 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
7038 /* Call ourselves recursively on the inner expression. If we are
7039 narrowing the object and it has a different RTL code from
7040 what it originally did, do this SUBREG as a force_to_mode. */
7042 tem
= make_compound_operation (SUBREG_REG (x
), in_code
);
7046 simplified
= simplify_subreg (GET_MODE (x
), tem
, GET_MODE (tem
),
7052 if (GET_CODE (tem
) != GET_CODE (SUBREG_REG (x
))
7053 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (tem
))
7054 && subreg_lowpart_p (x
))
7056 rtx newer
= force_to_mode (tem
, mode
, ~(HOST_WIDE_INT
) 0,
7059 /* If we have something other than a SUBREG, we might have
7060 done an expansion, so rerun ourselves. */
7061 if (GET_CODE (newer
) != SUBREG
)
7062 newer
= make_compound_operation (newer
, in_code
);
7078 x
= gen_lowpart (mode
, new);
7079 code
= GET_CODE (x
);
7082 /* Now recursively process each operand of this operation. */
7083 fmt
= GET_RTX_FORMAT (code
);
7084 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
7087 new = make_compound_operation (XEXP (x
, i
), next_code
);
7088 SUBST (XEXP (x
, i
), new);
7091 /* If this is a commutative operation, the changes to the operands
7092 may have made it noncanonical. */
7093 if (COMMUTATIVE_ARITH_P (x
)
7094 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
7097 SUBST (XEXP (x
, 0), XEXP (x
, 1));
7098 SUBST (XEXP (x
, 1), tem
);
7104 /* Given M see if it is a value that would select a field of bits
7105 within an item, but not the entire word. Return -1 if not.
7106 Otherwise, return the starting position of the field, where 0 is the
7109 *PLEN is set to the length of the field. */
7112 get_pos_from_mask (unsigned HOST_WIDE_INT m
, unsigned HOST_WIDE_INT
*plen
)
7114 /* Get the bit number of the first 1 bit from the right, -1 if none. */
7115 int pos
= exact_log2 (m
& -m
);
7119 /* Now shift off the low-order zero bits and see if we have a
7120 power of two minus 1. */
7121 len
= exact_log2 ((m
>> pos
) + 1);
7130 /* If X refers to a register that equals REG in value, replace these
7131 references with REG. */
7133 canon_reg_for_combine (rtx x
, rtx reg
)
7140 enum rtx_code code
= GET_CODE (x
);
7141 switch (GET_RTX_CLASS (code
))
7144 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
7145 if (op0
!= XEXP (x
, 0))
7146 return simplify_gen_unary (GET_CODE (x
), GET_MODE (x
), op0
,
7151 case RTX_COMM_ARITH
:
7152 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
7153 op1
= canon_reg_for_combine (XEXP (x
, 1), reg
);
7154 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
7155 return simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), op0
, op1
);
7159 case RTX_COMM_COMPARE
:
7160 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
7161 op1
= canon_reg_for_combine (XEXP (x
, 1), reg
);
7162 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
7163 return simplify_gen_relational (GET_CODE (x
), GET_MODE (x
),
7164 GET_MODE (op0
), op0
, op1
);
7168 case RTX_BITFIELD_OPS
:
7169 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
7170 op1
= canon_reg_for_combine (XEXP (x
, 1), reg
);
7171 op2
= canon_reg_for_combine (XEXP (x
, 2), reg
);
7172 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1) || op2
!= XEXP (x
, 2))
7173 return simplify_gen_ternary (GET_CODE (x
), GET_MODE (x
),
7174 GET_MODE (op0
), op0
, op1
, op2
);
7179 if (rtx_equal_p (get_last_value (reg
), x
)
7180 || rtx_equal_p (reg
, get_last_value (x
)))
7189 fmt
= GET_RTX_FORMAT (code
);
7191 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7194 rtx op
= canon_reg_for_combine (XEXP (x
, i
), reg
);
7195 if (op
!= XEXP (x
, i
))
7205 else if (fmt
[i
] == 'E')
7208 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
7210 rtx op
= canon_reg_for_combine (XVECEXP (x
, i
, j
), reg
);
7211 if (op
!= XVECEXP (x
, i
, j
))
7218 XVECEXP (x
, i
, j
) = op
;
7229 /* Return X converted to MODE. If the value is already truncated to
7230 MODE we can just return a subreg even though in the general case we
7231 would need an explicit truncation. */
7234 gen_lowpart_or_truncate (enum machine_mode mode
, rtx x
)
7236 if (GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (mode
)
7237 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
7238 GET_MODE_BITSIZE (GET_MODE (x
)))
7239 || (REG_P (x
) && reg_truncated_to_mode (mode
, x
)))
7240 return gen_lowpart (mode
, x
);
7242 return simplify_gen_unary (TRUNCATE
, mode
, x
, GET_MODE (x
));
7245 /* See if X can be simplified knowing that we will only refer to it in
7246 MODE and will only refer to those bits that are nonzero in MASK.
7247 If other bits are being computed or if masking operations are done
7248 that select a superset of the bits in MASK, they can sometimes be
7251 Return a possibly simplified expression, but always convert X to
7252 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
7254 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7255 are all off in X. This is used when X will be complemented, by either
7256 NOT, NEG, or XOR. */
7259 force_to_mode (rtx x
, enum machine_mode mode
, unsigned HOST_WIDE_INT mask
,
7262 enum rtx_code code
= GET_CODE (x
);
7263 int next_select
= just_select
|| code
== XOR
|| code
== NOT
|| code
== NEG
;
7264 enum machine_mode op_mode
;
7265 unsigned HOST_WIDE_INT fuller_mask
, nonzero
;
7268 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
7269 code below will do the wrong thing since the mode of such an
7270 expression is VOIDmode.
7272 Also do nothing if X is a CLOBBER; this can happen if X was
7273 the return value from a call to gen_lowpart. */
7274 if (code
== CALL
|| code
== ASM_OPERANDS
|| code
== CLOBBER
)
7277 /* We want to perform the operation is its present mode unless we know
7278 that the operation is valid in MODE, in which case we do the operation
7280 op_mode
= ((GET_MODE_CLASS (mode
) == GET_MODE_CLASS (GET_MODE (x
))
7281 && have_insn_for (code
, mode
))
7282 ? mode
: GET_MODE (x
));
7284 /* It is not valid to do a right-shift in a narrower mode
7285 than the one it came in with. */
7286 if ((code
== LSHIFTRT
|| code
== ASHIFTRT
)
7287 && GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (GET_MODE (x
)))
7288 op_mode
= GET_MODE (x
);
7290 /* Truncate MASK to fit OP_MODE. */
7292 mask
&= GET_MODE_MASK (op_mode
);
7294 /* When we have an arithmetic operation, or a shift whose count we
7295 do not know, we need to assume that all bits up to the highest-order
7296 bit in MASK will be needed. This is how we form such a mask. */
7297 if (mask
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1)))
7298 fuller_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7300 fuller_mask
= (((unsigned HOST_WIDE_INT
) 1 << (floor_log2 (mask
) + 1))
7303 /* Determine what bits of X are guaranteed to be (non)zero. */
7304 nonzero
= nonzero_bits (x
, mode
);
7306 /* If none of the bits in X are needed, return a zero. */
7307 if (!just_select
&& (nonzero
& mask
) == 0 && !side_effects_p (x
))
7310 /* If X is a CONST_INT, return a new one. Do this here since the
7311 test below will fail. */
7312 if (GET_CODE (x
) == CONST_INT
)
7314 if (SCALAR_INT_MODE_P (mode
))
7315 return gen_int_mode (INTVAL (x
) & mask
, mode
);
7318 x
= GEN_INT (INTVAL (x
) & mask
);
7319 return gen_lowpart_common (mode
, x
);
7323 /* If X is narrower than MODE and we want all the bits in X's mode, just
7324 get X in the proper mode. */
7325 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
)
7326 && (GET_MODE_MASK (GET_MODE (x
)) & ~mask
) == 0)
7327 return gen_lowpart (mode
, x
);
7332 /* If X is a (clobber (const_int)), return it since we know we are
7333 generating something that won't match. */
7340 x
= expand_compound_operation (x
);
7341 if (GET_CODE (x
) != code
)
7342 return force_to_mode (x
, mode
, mask
, next_select
);
7346 if (subreg_lowpart_p (x
)
7347 /* We can ignore the effect of this SUBREG if it narrows the mode or
7348 if the constant masks to zero all the bits the mode doesn't
7350 && ((GET_MODE_SIZE (GET_MODE (x
))
7351 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
7353 & GET_MODE_MASK (GET_MODE (x
))
7354 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x
)))))))
7355 return force_to_mode (SUBREG_REG (x
), mode
, mask
, next_select
);
7359 /* If this is an AND with a constant, convert it into an AND
7360 whose constant is the AND of that constant with MASK. If it
7361 remains an AND of MASK, delete it since it is redundant. */
7363 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
7365 x
= simplify_and_const_int (x
, op_mode
, XEXP (x
, 0),
7366 mask
& INTVAL (XEXP (x
, 1)));
7368 /* If X is still an AND, see if it is an AND with a mask that
7369 is just some low-order bits. If so, and it is MASK, we don't
7372 if (GET_CODE (x
) == AND
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
7373 && ((INTVAL (XEXP (x
, 1)) & GET_MODE_MASK (GET_MODE (x
)))
7377 /* If it remains an AND, try making another AND with the bits
7378 in the mode mask that aren't in MASK turned on. If the
7379 constant in the AND is wide enough, this might make a
7380 cheaper constant. */
7382 if (GET_CODE (x
) == AND
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
7383 && GET_MODE_MASK (GET_MODE (x
)) != mask
7384 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
)
7386 HOST_WIDE_INT cval
= (INTVAL (XEXP (x
, 1))
7387 | (GET_MODE_MASK (GET_MODE (x
)) & ~mask
));
7388 int width
= GET_MODE_BITSIZE (GET_MODE (x
));
7391 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7392 number, sign extend it. */
7393 if (width
> 0 && width
< HOST_BITS_PER_WIDE_INT
7394 && (cval
& ((HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
7395 cval
|= (HOST_WIDE_INT
) -1 << width
;
7397 y
= simplify_gen_binary (AND
, GET_MODE (x
),
7398 XEXP (x
, 0), GEN_INT (cval
));
7399 if (rtx_cost (y
, SET
) < rtx_cost (x
, SET
))
7409 /* In (and (plus FOO C1) M), if M is a mask that just turns off
7410 low-order bits (as in an alignment operation) and FOO is already
7411 aligned to that boundary, mask C1 to that boundary as well.
7412 This may eliminate that PLUS and, later, the AND. */
7415 unsigned int width
= GET_MODE_BITSIZE (mode
);
7416 unsigned HOST_WIDE_INT smask
= mask
;
7418 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7419 number, sign extend it. */
7421 if (width
< HOST_BITS_PER_WIDE_INT
7422 && (smask
& ((HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
7423 smask
|= (HOST_WIDE_INT
) -1 << width
;
7425 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7426 && exact_log2 (- smask
) >= 0
7427 && (nonzero_bits (XEXP (x
, 0), mode
) & ~smask
) == 0
7428 && (INTVAL (XEXP (x
, 1)) & ~smask
) != 0)
7429 return force_to_mode (plus_constant (XEXP (x
, 0),
7430 (INTVAL (XEXP (x
, 1)) & smask
)),
7431 mode
, smask
, next_select
);
7434 /* ... fall through ... */
7437 /* For PLUS, MINUS and MULT, we need any bits less significant than the
7438 most significant bit in MASK since carries from those bits will
7439 affect the bits we are interested in. */
7444 /* If X is (minus C Y) where C's least set bit is larger than any bit
7445 in the mask, then we may replace with (neg Y). */
7446 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
7447 && (((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 0))
7448 & -INTVAL (XEXP (x
, 0))))
7451 x
= simplify_gen_unary (NEG
, GET_MODE (x
), XEXP (x
, 1),
7453 return force_to_mode (x
, mode
, mask
, next_select
);
7456 /* Similarly, if C contains every bit in the fuller_mask, then we may
7457 replace with (not Y). */
7458 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
7459 && ((INTVAL (XEXP (x
, 0)) | (HOST_WIDE_INT
) fuller_mask
)
7460 == INTVAL (XEXP (x
, 0))))
7462 x
= simplify_gen_unary (NOT
, GET_MODE (x
),
7463 XEXP (x
, 1), GET_MODE (x
));
7464 return force_to_mode (x
, mode
, mask
, next_select
);
7472 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7473 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7474 operation which may be a bitfield extraction. Ensure that the
7475 constant we form is not wider than the mode of X. */
7477 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
7478 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
7479 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
7480 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
7481 && GET_CODE (XEXP (x
, 1)) == CONST_INT
7482 && ((INTVAL (XEXP (XEXP (x
, 0), 1))
7483 + floor_log2 (INTVAL (XEXP (x
, 1))))
7484 < GET_MODE_BITSIZE (GET_MODE (x
)))
7485 && (INTVAL (XEXP (x
, 1))
7486 & ~nonzero_bits (XEXP (x
, 0), GET_MODE (x
))) == 0)
7488 temp
= GEN_INT ((INTVAL (XEXP (x
, 1)) & mask
)
7489 << INTVAL (XEXP (XEXP (x
, 0), 1)));
7490 temp
= simplify_gen_binary (GET_CODE (x
), GET_MODE (x
),
7491 XEXP (XEXP (x
, 0), 0), temp
);
7492 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
), temp
,
7493 XEXP (XEXP (x
, 0), 1));
7494 return force_to_mode (x
, mode
, mask
, next_select
);
7498 /* For most binary operations, just propagate into the operation and
7499 change the mode if we have an operation of that mode. */
7501 op0
= gen_lowpart_or_truncate (op_mode
,
7502 force_to_mode (XEXP (x
, 0), mode
, mask
,
7504 op1
= gen_lowpart_or_truncate (op_mode
,
7505 force_to_mode (XEXP (x
, 1), mode
, mask
,
7508 if (op_mode
!= GET_MODE (x
) || op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
7509 x
= simplify_gen_binary (code
, op_mode
, op0
, op1
);
7513 /* For left shifts, do the same, but just for the first operand.
7514 However, we cannot do anything with shifts where we cannot
7515 guarantee that the counts are smaller than the size of the mode
7516 because such a count will have a different meaning in a
7519 if (! (GET_CODE (XEXP (x
, 1)) == CONST_INT
7520 && INTVAL (XEXP (x
, 1)) >= 0
7521 && INTVAL (XEXP (x
, 1)) < GET_MODE_BITSIZE (mode
))
7522 && ! (GET_MODE (XEXP (x
, 1)) != VOIDmode
7523 && (nonzero_bits (XEXP (x
, 1), GET_MODE (XEXP (x
, 1)))
7524 < (unsigned HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
))))
7527 /* If the shift count is a constant and we can do arithmetic in
7528 the mode of the shift, refine which bits we need. Otherwise, use the
7529 conservative form of the mask. */
7530 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7531 && INTVAL (XEXP (x
, 1)) >= 0
7532 && INTVAL (XEXP (x
, 1)) < GET_MODE_BITSIZE (op_mode
)
7533 && GET_MODE_BITSIZE (op_mode
) <= HOST_BITS_PER_WIDE_INT
)
7534 mask
>>= INTVAL (XEXP (x
, 1));
7538 op0
= gen_lowpart_or_truncate (op_mode
,
7539 force_to_mode (XEXP (x
, 0), op_mode
,
7540 mask
, next_select
));
7542 if (op_mode
!= GET_MODE (x
) || op0
!= XEXP (x
, 0))
7543 x
= simplify_gen_binary (code
, op_mode
, op0
, XEXP (x
, 1));
7547 /* Here we can only do something if the shift count is a constant,
7548 this shift constant is valid for the host, and we can do arithmetic
7551 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7552 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
7553 && GET_MODE_BITSIZE (op_mode
) <= HOST_BITS_PER_WIDE_INT
)
7555 rtx inner
= XEXP (x
, 0);
7556 unsigned HOST_WIDE_INT inner_mask
;
7558 /* Select the mask of the bits we need for the shift operand. */
7559 inner_mask
= mask
<< INTVAL (XEXP (x
, 1));
7561 /* We can only change the mode of the shift if we can do arithmetic
7562 in the mode of the shift and INNER_MASK is no wider than the
7563 width of X's mode. */
7564 if ((inner_mask
& ~GET_MODE_MASK (GET_MODE (x
))) != 0)
7565 op_mode
= GET_MODE (x
);
7567 inner
= force_to_mode (inner
, op_mode
, inner_mask
, next_select
);
7569 if (GET_MODE (x
) != op_mode
|| inner
!= XEXP (x
, 0))
7570 x
= simplify_gen_binary (LSHIFTRT
, op_mode
, inner
, XEXP (x
, 1));
7573 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7574 shift and AND produces only copies of the sign bit (C2 is one less
7575 than a power of two), we can do this with just a shift. */
7577 if (GET_CODE (x
) == LSHIFTRT
7578 && GET_CODE (XEXP (x
, 1)) == CONST_INT
7579 /* The shift puts one of the sign bit copies in the least significant
7581 && ((INTVAL (XEXP (x
, 1))
7582 + num_sign_bit_copies (XEXP (x
, 0), GET_MODE (XEXP (x
, 0))))
7583 >= GET_MODE_BITSIZE (GET_MODE (x
)))
7584 && exact_log2 (mask
+ 1) >= 0
7585 /* Number of bits left after the shift must be more than the mask
7587 && ((INTVAL (XEXP (x
, 1)) + exact_log2 (mask
+ 1))
7588 <= GET_MODE_BITSIZE (GET_MODE (x
)))
7589 /* Must be more sign bit copies than the mask needs. */
7590 && ((int) num_sign_bit_copies (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
7591 >= exact_log2 (mask
+ 1)))
7592 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
), XEXP (x
, 0),
7593 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x
))
7594 - exact_log2 (mask
+ 1)));
7599 /* If we are just looking for the sign bit, we don't need this shift at
7600 all, even if it has a variable count. */
7601 if (GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
7602 && (mask
== ((unsigned HOST_WIDE_INT
) 1
7603 << (GET_MODE_BITSIZE (GET_MODE (x
)) - 1))))
7604 return force_to_mode (XEXP (x
, 0), mode
, mask
, next_select
);
7606 /* If this is a shift by a constant, get a mask that contains those bits
7607 that are not copies of the sign bit. We then have two cases: If
7608 MASK only includes those bits, this can be a logical shift, which may
7609 allow simplifications. If MASK is a single-bit field not within
7610 those bits, we are requesting a copy of the sign bit and hence can
7611 shift the sign bit to the appropriate location. */
7613 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) >= 0
7614 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
7618 /* If the considered data is wider than HOST_WIDE_INT, we can't
7619 represent a mask for all its bits in a single scalar.
7620 But we only care about the lower bits, so calculate these. */
7622 if (GET_MODE_BITSIZE (GET_MODE (x
)) > HOST_BITS_PER_WIDE_INT
)
7624 nonzero
= ~(HOST_WIDE_INT
) 0;
7626 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7627 is the number of bits a full-width mask would have set.
7628 We need only shift if these are fewer than nonzero can
7629 hold. If not, we must keep all bits set in nonzero. */
7631 if (GET_MODE_BITSIZE (GET_MODE (x
)) - INTVAL (XEXP (x
, 1))
7632 < HOST_BITS_PER_WIDE_INT
)
7633 nonzero
>>= INTVAL (XEXP (x
, 1))
7634 + HOST_BITS_PER_WIDE_INT
7635 - GET_MODE_BITSIZE (GET_MODE (x
)) ;
7639 nonzero
= GET_MODE_MASK (GET_MODE (x
));
7640 nonzero
>>= INTVAL (XEXP (x
, 1));
7643 if ((mask
& ~nonzero
) == 0)
7645 x
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, GET_MODE (x
),
7646 XEXP (x
, 0), INTVAL (XEXP (x
, 1)));
7647 if (GET_CODE (x
) != ASHIFTRT
)
7648 return force_to_mode (x
, mode
, mask
, next_select
);
7651 else if ((i
= exact_log2 (mask
)) >= 0)
7653 x
= simplify_shift_const
7654 (NULL_RTX
, LSHIFTRT
, GET_MODE (x
), XEXP (x
, 0),
7655 GET_MODE_BITSIZE (GET_MODE (x
)) - 1 - i
);
7657 if (GET_CODE (x
) != ASHIFTRT
)
7658 return force_to_mode (x
, mode
, mask
, next_select
);
7662 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7663 even if the shift count isn't a constant. */
7665 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
),
7666 XEXP (x
, 0), XEXP (x
, 1));
7670 /* If this is a zero- or sign-extension operation that just affects bits
7671 we don't care about, remove it. Be sure the call above returned
7672 something that is still a shift. */
7674 if ((GET_CODE (x
) == LSHIFTRT
|| GET_CODE (x
) == ASHIFTRT
)
7675 && GET_CODE (XEXP (x
, 1)) == CONST_INT
7676 && INTVAL (XEXP (x
, 1)) >= 0
7677 && (INTVAL (XEXP (x
, 1))
7678 <= GET_MODE_BITSIZE (GET_MODE (x
)) - (floor_log2 (mask
) + 1))
7679 && GET_CODE (XEXP (x
, 0)) == ASHIFT
7680 && XEXP (XEXP (x
, 0), 1) == XEXP (x
, 1))
7681 return force_to_mode (XEXP (XEXP (x
, 0), 0), mode
, mask
,
7688 /* If the shift count is constant and we can do computations
7689 in the mode of X, compute where the bits we care about are.
7690 Otherwise, we can't do anything. Don't change the mode of
7691 the shift or propagate MODE into the shift, though. */
7692 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7693 && INTVAL (XEXP (x
, 1)) >= 0)
7695 temp
= simplify_binary_operation (code
== ROTATE
? ROTATERT
: ROTATE
,
7696 GET_MODE (x
), GEN_INT (mask
),
7698 if (temp
&& GET_CODE (temp
) == CONST_INT
)
7700 force_to_mode (XEXP (x
, 0), GET_MODE (x
),
7701 INTVAL (temp
), next_select
));
7706 /* If we just want the low-order bit, the NEG isn't needed since it
7707 won't change the low-order bit. */
7709 return force_to_mode (XEXP (x
, 0), mode
, mask
, just_select
);
7711 /* We need any bits less significant than the most significant bit in
7712 MASK since carries from those bits will affect the bits we are
7718 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7719 same as the XOR case above. Ensure that the constant we form is not
7720 wider than the mode of X. */
7722 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
7723 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
7724 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
7725 && (INTVAL (XEXP (XEXP (x
, 0), 1)) + floor_log2 (mask
)
7726 < GET_MODE_BITSIZE (GET_MODE (x
)))
7727 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
7729 temp
= gen_int_mode (mask
<< INTVAL (XEXP (XEXP (x
, 0), 1)),
7731 temp
= simplify_gen_binary (XOR
, GET_MODE (x
),
7732 XEXP (XEXP (x
, 0), 0), temp
);
7733 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
),
7734 temp
, XEXP (XEXP (x
, 0), 1));
7736 return force_to_mode (x
, mode
, mask
, next_select
);
7739 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7740 use the full mask inside the NOT. */
7744 op0
= gen_lowpart_or_truncate (op_mode
,
7745 force_to_mode (XEXP (x
, 0), mode
, mask
,
7747 if (op_mode
!= GET_MODE (x
) || op0
!= XEXP (x
, 0))
7748 x
= simplify_gen_unary (code
, op_mode
, op0
, op_mode
);
7752 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7753 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7754 which is equal to STORE_FLAG_VALUE. */
7755 if ((mask
& ~STORE_FLAG_VALUE
) == 0 && XEXP (x
, 1) == const0_rtx
7756 && GET_MODE (XEXP (x
, 0)) == mode
7757 && exact_log2 (nonzero_bits (XEXP (x
, 0), mode
)) >= 0
7758 && (nonzero_bits (XEXP (x
, 0), mode
)
7759 == (unsigned HOST_WIDE_INT
) STORE_FLAG_VALUE
))
7760 return force_to_mode (XEXP (x
, 0), mode
, mask
, next_select
);
7765 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7766 written in a narrower mode. We play it safe and do not do so. */
7769 gen_lowpart_or_truncate (GET_MODE (x
),
7770 force_to_mode (XEXP (x
, 1), mode
,
7771 mask
, next_select
)));
7773 gen_lowpart_or_truncate (GET_MODE (x
),
7774 force_to_mode (XEXP (x
, 2), mode
,
7775 mask
, next_select
)));
7782 /* Ensure we return a value of the proper mode. */
7783 return gen_lowpart_or_truncate (mode
, x
);
7786 /* Return nonzero if X is an expression that has one of two values depending on
7787 whether some other value is zero or nonzero. In that case, we return the
7788 value that is being tested, *PTRUE is set to the value if the rtx being
7789 returned has a nonzero value, and *PFALSE is set to the other alternative.
7791 If we return zero, we set *PTRUE and *PFALSE to X. */
7794 if_then_else_cond (rtx x
, rtx
*ptrue
, rtx
*pfalse
)
7796 enum machine_mode mode
= GET_MODE (x
);
7797 enum rtx_code code
= GET_CODE (x
);
7798 rtx cond0
, cond1
, true0
, true1
, false0
, false1
;
7799 unsigned HOST_WIDE_INT nz
;
7801 /* If we are comparing a value against zero, we are done. */
7802 if ((code
== NE
|| code
== EQ
)
7803 && XEXP (x
, 1) == const0_rtx
)
7805 *ptrue
= (code
== NE
) ? const_true_rtx
: const0_rtx
;
7806 *pfalse
= (code
== NE
) ? const0_rtx
: const_true_rtx
;
7810 /* If this is a unary operation whose operand has one of two values, apply
7811 our opcode to compute those values. */
7812 else if (UNARY_P (x
)
7813 && (cond0
= if_then_else_cond (XEXP (x
, 0), &true0
, &false0
)) != 0)
7815 *ptrue
= simplify_gen_unary (code
, mode
, true0
, GET_MODE (XEXP (x
, 0)));
7816 *pfalse
= simplify_gen_unary (code
, mode
, false0
,
7817 GET_MODE (XEXP (x
, 0)));
7821 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7822 make can't possibly match and would suppress other optimizations. */
7823 else if (code
== COMPARE
)
7826 /* If this is a binary operation, see if either side has only one of two
7827 values. If either one does or if both do and they are conditional on
7828 the same value, compute the new true and false values. */
7829 else if (BINARY_P (x
))
7831 cond0
= if_then_else_cond (XEXP (x
, 0), &true0
, &false0
);
7832 cond1
= if_then_else_cond (XEXP (x
, 1), &true1
, &false1
);
7834 if ((cond0
!= 0 || cond1
!= 0)
7835 && ! (cond0
!= 0 && cond1
!= 0 && ! rtx_equal_p (cond0
, cond1
)))
7837 /* If if_then_else_cond returned zero, then true/false are the
7838 same rtl. We must copy one of them to prevent invalid rtl
7841 true0
= copy_rtx (true0
);
7842 else if (cond1
== 0)
7843 true1
= copy_rtx (true1
);
7845 if (COMPARISON_P (x
))
7847 *ptrue
= simplify_gen_relational (code
, mode
, VOIDmode
,
7849 *pfalse
= simplify_gen_relational (code
, mode
, VOIDmode
,
7854 *ptrue
= simplify_gen_binary (code
, mode
, true0
, true1
);
7855 *pfalse
= simplify_gen_binary (code
, mode
, false0
, false1
);
7858 return cond0
? cond0
: cond1
;
7861 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7862 operands is zero when the other is nonzero, and vice-versa,
7863 and STORE_FLAG_VALUE is 1 or -1. */
7865 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
7866 && (code
== PLUS
|| code
== IOR
|| code
== XOR
|| code
== MINUS
7868 && GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == MULT
)
7870 rtx op0
= XEXP (XEXP (x
, 0), 1);
7871 rtx op1
= XEXP (XEXP (x
, 1), 1);
7873 cond0
= XEXP (XEXP (x
, 0), 0);
7874 cond1
= XEXP (XEXP (x
, 1), 0);
7876 if (COMPARISON_P (cond0
)
7877 && COMPARISON_P (cond1
)
7878 && ((GET_CODE (cond0
) == reversed_comparison_code (cond1
, NULL
)
7879 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 0))
7880 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 1)))
7881 || ((swap_condition (GET_CODE (cond0
))
7882 == reversed_comparison_code (cond1
, NULL
))
7883 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 1))
7884 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 0))))
7885 && ! side_effects_p (x
))
7887 *ptrue
= simplify_gen_binary (MULT
, mode
, op0
, const_true_rtx
);
7888 *pfalse
= simplify_gen_binary (MULT
, mode
,
7890 ? simplify_gen_unary (NEG
, mode
,
7898 /* Similarly for MULT, AND and UMIN, except that for these the result
7900 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
7901 && (code
== MULT
|| code
== AND
|| code
== UMIN
)
7902 && GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == MULT
)
7904 cond0
= XEXP (XEXP (x
, 0), 0);
7905 cond1
= XEXP (XEXP (x
, 1), 0);
7907 if (COMPARISON_P (cond0
)
7908 && COMPARISON_P (cond1
)
7909 && ((GET_CODE (cond0
) == reversed_comparison_code (cond1
, NULL
)
7910 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 0))
7911 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 1)))
7912 || ((swap_condition (GET_CODE (cond0
))
7913 == reversed_comparison_code (cond1
, NULL
))
7914 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 1))
7915 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 0))))
7916 && ! side_effects_p (x
))
7918 *ptrue
= *pfalse
= const0_rtx
;
7924 else if (code
== IF_THEN_ELSE
)
7926 /* If we have IF_THEN_ELSE already, extract the condition and
7927 canonicalize it if it is NE or EQ. */
7928 cond0
= XEXP (x
, 0);
7929 *ptrue
= XEXP (x
, 1), *pfalse
= XEXP (x
, 2);
7930 if (GET_CODE (cond0
) == NE
&& XEXP (cond0
, 1) == const0_rtx
)
7931 return XEXP (cond0
, 0);
7932 else if (GET_CODE (cond0
) == EQ
&& XEXP (cond0
, 1) == const0_rtx
)
7934 *ptrue
= XEXP (x
, 2), *pfalse
= XEXP (x
, 1);
7935 return XEXP (cond0
, 0);
7941 /* If X is a SUBREG, we can narrow both the true and false values
7942 if the inner expression, if there is a condition. */
7943 else if (code
== SUBREG
7944 && 0 != (cond0
= if_then_else_cond (SUBREG_REG (x
),
7947 true0
= simplify_gen_subreg (mode
, true0
,
7948 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
7949 false0
= simplify_gen_subreg (mode
, false0
,
7950 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
7951 if (true0
&& false0
)
7959 /* If X is a constant, this isn't special and will cause confusions
7960 if we treat it as such. Likewise if it is equivalent to a constant. */
7961 else if (CONSTANT_P (x
)
7962 || ((cond0
= get_last_value (x
)) != 0 && CONSTANT_P (cond0
)))
7965 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7966 will be least confusing to the rest of the compiler. */
7967 else if (mode
== BImode
)
7969 *ptrue
= GEN_INT (STORE_FLAG_VALUE
), *pfalse
= const0_rtx
;
7973 /* If X is known to be either 0 or -1, those are the true and
7974 false values when testing X. */
7975 else if (x
== constm1_rtx
|| x
== const0_rtx
7976 || (mode
!= VOIDmode
7977 && num_sign_bit_copies (x
, mode
) == GET_MODE_BITSIZE (mode
)))
7979 *ptrue
= constm1_rtx
, *pfalse
= const0_rtx
;
7983 /* Likewise for 0 or a single bit. */
7984 else if (SCALAR_INT_MODE_P (mode
)
7985 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7986 && exact_log2 (nz
= nonzero_bits (x
, mode
)) >= 0)
7988 *ptrue
= gen_int_mode (nz
, mode
), *pfalse
= const0_rtx
;
7992 /* Otherwise fail; show no condition with true and false values the same. */
7993 *ptrue
= *pfalse
= x
;
7997 /* Return the value of expression X given the fact that condition COND
7998 is known to be true when applied to REG as its first operand and VAL
7999 as its second. X is known to not be shared and so can be modified in
8002 We only handle the simplest cases, and specifically those cases that
8003 arise with IF_THEN_ELSE expressions. */
8006 known_cond (rtx x
, enum rtx_code cond
, rtx reg
, rtx val
)
8008 enum rtx_code code
= GET_CODE (x
);
8013 if (side_effects_p (x
))
8016 /* If either operand of the condition is a floating point value,
8017 then we have to avoid collapsing an EQ comparison. */
8019 && rtx_equal_p (x
, reg
)
8020 && ! FLOAT_MODE_P (GET_MODE (x
))
8021 && ! FLOAT_MODE_P (GET_MODE (val
)))
8024 if (cond
== UNEQ
&& rtx_equal_p (x
, reg
))
8027 /* If X is (abs REG) and we know something about REG's relationship
8028 with zero, we may be able to simplify this. */
8030 if (code
== ABS
&& rtx_equal_p (XEXP (x
, 0), reg
) && val
== const0_rtx
)
8033 case GE
: case GT
: case EQ
:
8036 return simplify_gen_unary (NEG
, GET_MODE (XEXP (x
, 0)),
8038 GET_MODE (XEXP (x
, 0)));
8043 /* The only other cases we handle are MIN, MAX, and comparisons if the
8044 operands are the same as REG and VAL. */
8046 else if (COMPARISON_P (x
) || COMMUTATIVE_ARITH_P (x
))
8048 if (rtx_equal_p (XEXP (x
, 0), val
))
8049 cond
= swap_condition (cond
), temp
= val
, val
= reg
, reg
= temp
;
8051 if (rtx_equal_p (XEXP (x
, 0), reg
) && rtx_equal_p (XEXP (x
, 1), val
))
8053 if (COMPARISON_P (x
))
8055 if (comparison_dominates_p (cond
, code
))
8056 return const_true_rtx
;
8058 code
= reversed_comparison_code (x
, NULL
);
8060 && comparison_dominates_p (cond
, code
))
8065 else if (code
== SMAX
|| code
== SMIN
8066 || code
== UMIN
|| code
== UMAX
)
8068 int unsignedp
= (code
== UMIN
|| code
== UMAX
);
8070 /* Do not reverse the condition when it is NE or EQ.
8071 This is because we cannot conclude anything about
8072 the value of 'SMAX (x, y)' when x is not equal to y,
8073 but we can when x equals y. */
8074 if ((code
== SMAX
|| code
== UMAX
)
8075 && ! (cond
== EQ
|| cond
== NE
))
8076 cond
= reverse_condition (cond
);
8081 return unsignedp
? x
: XEXP (x
, 1);
8083 return unsignedp
? x
: XEXP (x
, 0);
8085 return unsignedp
? XEXP (x
, 1) : x
;
8087 return unsignedp
? XEXP (x
, 0) : x
;
8094 else if (code
== SUBREG
)
8096 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
8097 rtx
new, r
= known_cond (SUBREG_REG (x
), cond
, reg
, val
);
8099 if (SUBREG_REG (x
) != r
)
8101 /* We must simplify subreg here, before we lose track of the
8102 original inner_mode. */
8103 new = simplify_subreg (GET_MODE (x
), r
,
8104 inner_mode
, SUBREG_BYTE (x
));
8108 SUBST (SUBREG_REG (x
), r
);
8113 /* We don't have to handle SIGN_EXTEND here, because even in the
8114 case of replacing something with a modeless CONST_INT, a
8115 CONST_INT is already (supposed to be) a valid sign extension for
8116 its narrower mode, which implies it's already properly
8117 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
8118 story is different. */
8119 else if (code
== ZERO_EXTEND
)
8121 enum machine_mode inner_mode
= GET_MODE (XEXP (x
, 0));
8122 rtx
new, r
= known_cond (XEXP (x
, 0), cond
, reg
, val
);
8124 if (XEXP (x
, 0) != r
)
8126 /* We must simplify the zero_extend here, before we lose
8127 track of the original inner_mode. */
8128 new = simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
8133 SUBST (XEXP (x
, 0), r
);
8139 fmt
= GET_RTX_FORMAT (code
);
8140 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8143 SUBST (XEXP (x
, i
), known_cond (XEXP (x
, i
), cond
, reg
, val
));
8144 else if (fmt
[i
] == 'E')
8145 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
8146 SUBST (XVECEXP (x
, i
, j
), known_cond (XVECEXP (x
, i
, j
),
8153 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
8154 assignment as a field assignment. */
8157 rtx_equal_for_field_assignment_p (rtx x
, rtx y
)
8159 if (x
== y
|| rtx_equal_p (x
, y
))
8162 if (x
== 0 || y
== 0 || GET_MODE (x
) != GET_MODE (y
))
8165 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
8166 Note that all SUBREGs of MEM are paradoxical; otherwise they
8167 would have been rewritten. */
8168 if (MEM_P (x
) && GET_CODE (y
) == SUBREG
8169 && MEM_P (SUBREG_REG (y
))
8170 && rtx_equal_p (SUBREG_REG (y
),
8171 gen_lowpart (GET_MODE (SUBREG_REG (y
)), x
)))
8174 if (MEM_P (y
) && GET_CODE (x
) == SUBREG
8175 && MEM_P (SUBREG_REG (x
))
8176 && rtx_equal_p (SUBREG_REG (x
),
8177 gen_lowpart (GET_MODE (SUBREG_REG (x
)), y
)))
8180 /* We used to see if get_last_value of X and Y were the same but that's
8181 not correct. In one direction, we'll cause the assignment to have
8182 the wrong destination and in the case, we'll import a register into this
8183 insn that might have already have been dead. So fail if none of the
8184 above cases are true. */
8188 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
8189 Return that assignment if so.
8191 We only handle the most common cases. */
8194 make_field_assignment (rtx x
)
8196 rtx dest
= SET_DEST (x
);
8197 rtx src
= SET_SRC (x
);
8202 unsigned HOST_WIDE_INT len
;
8204 enum machine_mode mode
;
8206 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
8207 a clear of a one-bit field. We will have changed it to
8208 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
8211 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == ROTATE
8212 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == CONST_INT
8213 && INTVAL (XEXP (XEXP (src
, 0), 0)) == -2
8214 && rtx_equal_for_field_assignment_p (dest
, XEXP (src
, 1)))
8216 assign
= make_extraction (VOIDmode
, dest
, 0, XEXP (XEXP (src
, 0), 1),
8219 return gen_rtx_SET (VOIDmode
, assign
, const0_rtx
);
8223 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == SUBREG
8224 && subreg_lowpart_p (XEXP (src
, 0))
8225 && (GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
8226 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src
, 0)))))
8227 && GET_CODE (SUBREG_REG (XEXP (src
, 0))) == ROTATE
8228 && GET_CODE (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == CONST_INT
8229 && INTVAL (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == -2
8230 && rtx_equal_for_field_assignment_p (dest
, XEXP (src
, 1)))
8232 assign
= make_extraction (VOIDmode
, dest
, 0,
8233 XEXP (SUBREG_REG (XEXP (src
, 0)), 1),
8236 return gen_rtx_SET (VOIDmode
, assign
, const0_rtx
);
8240 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
8242 if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == ASHIFT
8243 && XEXP (XEXP (src
, 0), 0) == const1_rtx
8244 && rtx_equal_for_field_assignment_p (dest
, XEXP (src
, 1)))
8246 assign
= make_extraction (VOIDmode
, dest
, 0, XEXP (XEXP (src
, 0), 1),
8249 return gen_rtx_SET (VOIDmode
, assign
, const1_rtx
);
8253 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8254 SRC is an AND with all bits of that field set, then we can discard
8256 if (GET_CODE (dest
) == ZERO_EXTRACT
8257 && GET_CODE (XEXP (dest
, 1)) == CONST_INT
8258 && GET_CODE (src
) == AND
8259 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
8261 HOST_WIDE_INT width
= INTVAL (XEXP (dest
, 1));
8262 unsigned HOST_WIDE_INT and_mask
= INTVAL (XEXP (src
, 1));
8263 unsigned HOST_WIDE_INT ze_mask
;
8265 if (width
>= HOST_BITS_PER_WIDE_INT
)
8268 ze_mask
= ((unsigned HOST_WIDE_INT
)1 << width
) - 1;
8270 /* Complete overlap. We can remove the source AND. */
8271 if ((and_mask
& ze_mask
) == ze_mask
)
8272 return gen_rtx_SET (VOIDmode
, dest
, XEXP (src
, 0));
8274 /* Partial overlap. We can reduce the source AND. */
8275 if ((and_mask
& ze_mask
) != and_mask
)
8277 mode
= GET_MODE (src
);
8278 src
= gen_rtx_AND (mode
, XEXP (src
, 0),
8279 gen_int_mode (and_mask
& ze_mask
, mode
));
8280 return gen_rtx_SET (VOIDmode
, dest
, src
);
8284 /* The other case we handle is assignments into a constant-position
8285 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
8286 a mask that has all one bits except for a group of zero bits and
8287 OTHER is known to have zeros where C1 has ones, this is such an
8288 assignment. Compute the position and length from C1. Shift OTHER
8289 to the appropriate position, force it to the required mode, and
8290 make the extraction. Check for the AND in both operands. */
8292 if (GET_CODE (src
) != IOR
&& GET_CODE (src
) != XOR
)
8295 rhs
= expand_compound_operation (XEXP (src
, 0));
8296 lhs
= expand_compound_operation (XEXP (src
, 1));
8298 if (GET_CODE (rhs
) == AND
8299 && GET_CODE (XEXP (rhs
, 1)) == CONST_INT
8300 && rtx_equal_for_field_assignment_p (XEXP (rhs
, 0), dest
))
8301 c1
= INTVAL (XEXP (rhs
, 1)), other
= lhs
;
8302 else if (GET_CODE (lhs
) == AND
8303 && GET_CODE (XEXP (lhs
, 1)) == CONST_INT
8304 && rtx_equal_for_field_assignment_p (XEXP (lhs
, 0), dest
))
8305 c1
= INTVAL (XEXP (lhs
, 1)), other
= rhs
;
8309 pos
= get_pos_from_mask ((~c1
) & GET_MODE_MASK (GET_MODE (dest
)), &len
);
8310 if (pos
< 0 || pos
+ len
> GET_MODE_BITSIZE (GET_MODE (dest
))
8311 || GET_MODE_BITSIZE (GET_MODE (dest
)) > HOST_BITS_PER_WIDE_INT
8312 || (c1
& nonzero_bits (other
, GET_MODE (dest
))) != 0)
8315 assign
= make_extraction (VOIDmode
, dest
, pos
, NULL_RTX
, len
, 1, 1, 0);
8319 /* The mode to use for the source is the mode of the assignment, or of
8320 what is inside a possible STRICT_LOW_PART. */
8321 mode
= (GET_CODE (assign
) == STRICT_LOW_PART
8322 ? GET_MODE (XEXP (assign
, 0)) : GET_MODE (assign
));
8324 /* Shift OTHER right POS places and make it the source, restricting it
8325 to the proper length and mode. */
8327 src
= canon_reg_for_combine (simplify_shift_const (NULL_RTX
, LSHIFTRT
,
8331 src
= force_to_mode (src
, mode
,
8332 GET_MODE_BITSIZE (mode
) >= HOST_BITS_PER_WIDE_INT
8333 ? ~(unsigned HOST_WIDE_INT
) 0
8334 : ((unsigned HOST_WIDE_INT
) 1 << len
) - 1,
8337 /* If SRC is masked by an AND that does not make a difference in
8338 the value being stored, strip it. */
8339 if (GET_CODE (assign
) == ZERO_EXTRACT
8340 && GET_CODE (XEXP (assign
, 1)) == CONST_INT
8341 && INTVAL (XEXP (assign
, 1)) < HOST_BITS_PER_WIDE_INT
8342 && GET_CODE (src
) == AND
8343 && GET_CODE (XEXP (src
, 1)) == CONST_INT
8344 && ((unsigned HOST_WIDE_INT
) INTVAL (XEXP (src
, 1))
8345 == ((unsigned HOST_WIDE_INT
) 1 << INTVAL (XEXP (assign
, 1))) - 1))
8346 src
= XEXP (src
, 0);
8348 return gen_rtx_SET (VOIDmode
, assign
, src
);
8351 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8355 apply_distributive_law (rtx x
)
8357 enum rtx_code code
= GET_CODE (x
);
8358 enum rtx_code inner_code
;
8359 rtx lhs
, rhs
, other
;
8362 /* Distributivity is not true for floating point as it can change the
8363 value. So we don't do it unless -funsafe-math-optimizations. */
8364 if (FLOAT_MODE_P (GET_MODE (x
))
8365 && ! flag_unsafe_math_optimizations
)
8368 /* The outer operation can only be one of the following: */
8369 if (code
!= IOR
&& code
!= AND
&& code
!= XOR
8370 && code
!= PLUS
&& code
!= MINUS
)
8376 /* If either operand is a primitive we can't do anything, so get out
8378 if (OBJECT_P (lhs
) || OBJECT_P (rhs
))
8381 lhs
= expand_compound_operation (lhs
);
8382 rhs
= expand_compound_operation (rhs
);
8383 inner_code
= GET_CODE (lhs
);
8384 if (inner_code
!= GET_CODE (rhs
))
8387 /* See if the inner and outer operations distribute. */
8394 /* These all distribute except over PLUS. */
8395 if (code
== PLUS
|| code
== MINUS
)
8400 if (code
!= PLUS
&& code
!= MINUS
)
8405 /* This is also a multiply, so it distributes over everything. */
8409 /* Non-paradoxical SUBREGs distributes over all operations,
8410 provided the inner modes and byte offsets are the same, this
8411 is an extraction of a low-order part, we don't convert an fp
8412 operation to int or vice versa, this is not a vector mode,
8413 and we would not be converting a single-word operation into a
8414 multi-word operation. The latter test is not required, but
8415 it prevents generating unneeded multi-word operations. Some
8416 of the previous tests are redundant given the latter test,
8417 but are retained because they are required for correctness.
8419 We produce the result slightly differently in this case. */
8421 if (GET_MODE (SUBREG_REG (lhs
)) != GET_MODE (SUBREG_REG (rhs
))
8422 || SUBREG_BYTE (lhs
) != SUBREG_BYTE (rhs
)
8423 || ! subreg_lowpart_p (lhs
)
8424 || (GET_MODE_CLASS (GET_MODE (lhs
))
8425 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs
))))
8426 || (GET_MODE_SIZE (GET_MODE (lhs
))
8427 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))))
8428 || VECTOR_MODE_P (GET_MODE (lhs
))
8429 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))) > UNITS_PER_WORD
8430 /* Result might need to be truncated. Don't change mode if
8431 explicit truncation is needed. */
8432 || !TRULY_NOOP_TRUNCATION
8433 (GET_MODE_BITSIZE (GET_MODE (x
)),
8434 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs
)))))
8437 tem
= simplify_gen_binary (code
, GET_MODE (SUBREG_REG (lhs
)),
8438 SUBREG_REG (lhs
), SUBREG_REG (rhs
));
8439 return gen_lowpart (GET_MODE (x
), tem
);
8445 /* Set LHS and RHS to the inner operands (A and B in the example
8446 above) and set OTHER to the common operand (C in the example).
8447 There is only one way to do this unless the inner operation is
8449 if (COMMUTATIVE_ARITH_P (lhs
)
8450 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 0)))
8451 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 1);
8452 else if (COMMUTATIVE_ARITH_P (lhs
)
8453 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 1)))
8454 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 0);
8455 else if (COMMUTATIVE_ARITH_P (lhs
)
8456 && rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 0)))
8457 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 1);
8458 else if (rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 1)))
8459 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 0);
8463 /* Form the new inner operation, seeing if it simplifies first. */
8464 tem
= simplify_gen_binary (code
, GET_MODE (x
), lhs
, rhs
);
8466 /* There is one exception to the general way of distributing:
8467 (a | c) ^ (b | c) -> (a ^ b) & ~c */
8468 if (code
== XOR
&& inner_code
== IOR
)
8471 other
= simplify_gen_unary (NOT
, GET_MODE (x
), other
, GET_MODE (x
));
8474 /* We may be able to continuing distributing the result, so call
8475 ourselves recursively on the inner operation before forming the
8476 outer operation, which we return. */
8477 return simplify_gen_binary (inner_code
, GET_MODE (x
),
8478 apply_distributive_law (tem
), other
);
8481 /* See if X is of the form (* (+ A B) C), and if so convert to
8482 (+ (* A C) (* B C)) and try to simplify.
8484 Most of the time, this results in no change. However, if some of
8485 the operands are the same or inverses of each other, simplifications
8488 For example, (and (ior A B) (not B)) can occur as the result of
8489 expanding a bit field assignment. When we apply the distributive
8490 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8491 which then simplifies to (and (A (not B))).
8493 Note that no checks happen on the validity of applying the inverse
8494 distributive law. This is pointless since we can do it in the
8495 few places where this routine is called.
8497 N is the index of the term that is decomposed (the arithmetic operation,
8498 i.e. (+ A B) in the first example above). !N is the index of the term that
8499 is distributed, i.e. of C in the first example above. */
8501 distribute_and_simplify_rtx (rtx x
, int n
)
8503 enum machine_mode mode
;
8504 enum rtx_code outer_code
, inner_code
;
8505 rtx decomposed
, distributed
, inner_op0
, inner_op1
, new_op0
, new_op1
, tmp
;
8507 decomposed
= XEXP (x
, n
);
8508 if (!ARITHMETIC_P (decomposed
))
8511 mode
= GET_MODE (x
);
8512 outer_code
= GET_CODE (x
);
8513 distributed
= XEXP (x
, !n
);
8515 inner_code
= GET_CODE (decomposed
);
8516 inner_op0
= XEXP (decomposed
, 0);
8517 inner_op1
= XEXP (decomposed
, 1);
8519 /* Special case (and (xor B C) (not A)), which is equivalent to
8520 (xor (ior A B) (ior A C)) */
8521 if (outer_code
== AND
&& inner_code
== XOR
&& GET_CODE (distributed
) == NOT
)
8523 distributed
= XEXP (distributed
, 0);
8529 /* Distribute the second term. */
8530 new_op0
= simplify_gen_binary (outer_code
, mode
, inner_op0
, distributed
);
8531 new_op1
= simplify_gen_binary (outer_code
, mode
, inner_op1
, distributed
);
8535 /* Distribute the first term. */
8536 new_op0
= simplify_gen_binary (outer_code
, mode
, distributed
, inner_op0
);
8537 new_op1
= simplify_gen_binary (outer_code
, mode
, distributed
, inner_op1
);
8540 tmp
= apply_distributive_law (simplify_gen_binary (inner_code
, mode
,
8542 if (GET_CODE (tmp
) != outer_code
8543 && rtx_cost (tmp
, SET
) < rtx_cost (x
, SET
))
8549 /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8550 in MODE. Return an equivalent form, if different from (and VAROP
8551 (const_int CONSTOP)). Otherwise, return NULL_RTX. */
8554 simplify_and_const_int_1 (enum machine_mode mode
, rtx varop
,
8555 unsigned HOST_WIDE_INT constop
)
8557 unsigned HOST_WIDE_INT nonzero
;
8558 unsigned HOST_WIDE_INT orig_constop
;
8563 orig_constop
= constop
;
8564 if (GET_CODE (varop
) == CLOBBER
)
8567 /* Simplify VAROP knowing that we will be only looking at some of the
8570 Note by passing in CONSTOP, we guarantee that the bits not set in
8571 CONSTOP are not significant and will never be examined. We must
8572 ensure that is the case by explicitly masking out those bits
8573 before returning. */
8574 varop
= force_to_mode (varop
, mode
, constop
, 0);
8576 /* If VAROP is a CLOBBER, we will fail so return it. */
8577 if (GET_CODE (varop
) == CLOBBER
)
8580 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8581 to VAROP and return the new constant. */
8582 if (GET_CODE (varop
) == CONST_INT
)
8583 return gen_int_mode (INTVAL (varop
) & constop
, mode
);
8585 /* See what bits may be nonzero in VAROP. Unlike the general case of
8586 a call to nonzero_bits, here we don't care about bits outside
8589 nonzero
= nonzero_bits (varop
, mode
) & GET_MODE_MASK (mode
);
8591 /* Turn off all bits in the constant that are known to already be zero.
8592 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8593 which is tested below. */
8597 /* If we don't have any bits left, return zero. */
8601 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8602 a power of two, we can replace this with an ASHIFT. */
8603 if (GET_CODE (varop
) == NEG
&& nonzero_bits (XEXP (varop
, 0), mode
) == 1
8604 && (i
= exact_log2 (constop
)) >= 0)
8605 return simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, XEXP (varop
, 0), i
);
8607 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8608 or XOR, then try to apply the distributive law. This may eliminate
8609 operations if either branch can be simplified because of the AND.
8610 It may also make some cases more complex, but those cases probably
8611 won't match a pattern either with or without this. */
8613 if (GET_CODE (varop
) == IOR
|| GET_CODE (varop
) == XOR
)
8617 apply_distributive_law
8618 (simplify_gen_binary (GET_CODE (varop
), GET_MODE (varop
),
8619 simplify_and_const_int (NULL_RTX
,
8623 simplify_and_const_int (NULL_RTX
,
8628 /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8629 the AND and see if one of the operands simplifies to zero. If so, we
8630 may eliminate it. */
8632 if (GET_CODE (varop
) == PLUS
8633 && exact_log2 (constop
+ 1) >= 0)
8637 o0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (varop
, 0), constop
);
8638 o1
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (varop
, 1), constop
);
8639 if (o0
== const0_rtx
)
8641 if (o1
== const0_rtx
)
8645 /* Make a SUBREG if necessary. If we can't make it, fail. */
8646 varop
= gen_lowpart (mode
, varop
);
8647 if (varop
== NULL_RTX
|| GET_CODE (varop
) == CLOBBER
)
8650 /* If we are only masking insignificant bits, return VAROP. */
8651 if (constop
== nonzero
)
8654 if (varop
== orig_varop
&& constop
== orig_constop
)
8657 /* Otherwise, return an AND. */
8658 return simplify_gen_binary (AND
, mode
, varop
, gen_int_mode (constop
, mode
));
8662 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8665 Return an equivalent form, if different from X. Otherwise, return X. If
8666 X is zero, we are to always construct the equivalent form. */
8669 simplify_and_const_int (rtx x
, enum machine_mode mode
, rtx varop
,
8670 unsigned HOST_WIDE_INT constop
)
8672 rtx tem
= simplify_and_const_int_1 (mode
, varop
, constop
);
8677 x
= simplify_gen_binary (AND
, GET_MODE (varop
), varop
,
8678 gen_int_mode (constop
, mode
));
8679 if (GET_MODE (x
) != mode
)
8680 x
= gen_lowpart (mode
, x
);
8684 /* Given a REG, X, compute which bits in X can be nonzero.
8685 We don't care about bits outside of those defined in MODE.
8687 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8688 a shift, AND, or zero_extract, we can do better. */
8691 reg_nonzero_bits_for_combine (const_rtx x
, enum machine_mode mode
,
8692 const_rtx known_x ATTRIBUTE_UNUSED
,
8693 enum machine_mode known_mode ATTRIBUTE_UNUSED
,
8694 unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED
,
8695 unsigned HOST_WIDE_INT
*nonzero
)
8700 /* If X is a register whose nonzero bits value is current, use it.
8701 Otherwise, if X is a register whose value we can find, use that
8702 value. Otherwise, use the previously-computed global nonzero bits
8703 for this register. */
8705 rsp
= VEC_index (reg_stat_type
, reg_stat
, REGNO (x
));
8706 if (rsp
->last_set_value
!= 0
8707 && (rsp
->last_set_mode
== mode
8708 || (GET_MODE_CLASS (rsp
->last_set_mode
) == MODE_INT
8709 && GET_MODE_CLASS (mode
) == MODE_INT
))
8710 && ((rsp
->last_set_label
>= label_tick_ebb_start
8711 && rsp
->last_set_label
< label_tick
)
8712 || (rsp
->last_set_label
== label_tick
8713 && DF_INSN_LUID (rsp
->last_set
) < subst_low_luid
)
8714 || (REGNO (x
) >= FIRST_PSEUDO_REGISTER
8715 && REG_N_SETS (REGNO (x
)) == 1
8717 (DF_LR_IN (ENTRY_BLOCK_PTR
->next_bb
), REGNO (x
)))))
8719 *nonzero
&= rsp
->last_set_nonzero_bits
;
8723 tem
= get_last_value (x
);
8727 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8728 /* If X is narrower than MODE and TEM is a non-negative
8729 constant that would appear negative in the mode of X,
8730 sign-extend it for use in reg_nonzero_bits because some
8731 machines (maybe most) will actually do the sign-extension
8732 and this is the conservative approach.
8734 ??? For 2.5, try to tighten up the MD files in this regard
8735 instead of this kludge. */
8737 if (GET_MODE_BITSIZE (GET_MODE (x
)) < GET_MODE_BITSIZE (mode
)
8738 && GET_CODE (tem
) == CONST_INT
8740 && 0 != (INTVAL (tem
)
8741 & ((HOST_WIDE_INT
) 1
8742 << (GET_MODE_BITSIZE (GET_MODE (x
)) - 1))))
8743 tem
= GEN_INT (INTVAL (tem
)
8744 | ((HOST_WIDE_INT
) (-1)
8745 << GET_MODE_BITSIZE (GET_MODE (x
))));
8749 else if (nonzero_sign_valid
&& rsp
->nonzero_bits
)
8751 unsigned HOST_WIDE_INT mask
= rsp
->nonzero_bits
;
8753 if (GET_MODE_BITSIZE (GET_MODE (x
)) < GET_MODE_BITSIZE (mode
))
8754 /* We don't know anything about the upper bits. */
8755 mask
|= GET_MODE_MASK (mode
) ^ GET_MODE_MASK (GET_MODE (x
));
8762 /* Return the number of bits at the high-order end of X that are known to
8763 be equal to the sign bit. X will be used in mode MODE; if MODE is
8764 VOIDmode, X will be used in its own mode. The returned value will always
8765 be between 1 and the number of bits in MODE. */
8768 reg_num_sign_bit_copies_for_combine (const_rtx x
, enum machine_mode mode
,
8769 const_rtx known_x ATTRIBUTE_UNUSED
,
8770 enum machine_mode known_mode
8772 unsigned int known_ret ATTRIBUTE_UNUSED
,
8773 unsigned int *result
)
8778 rsp
= VEC_index (reg_stat_type
, reg_stat
, REGNO (x
));
8779 if (rsp
->last_set_value
!= 0
8780 && rsp
->last_set_mode
== mode
8781 && ((rsp
->last_set_label
>= label_tick_ebb_start
8782 && rsp
->last_set_label
< label_tick
)
8783 || (rsp
->last_set_label
== label_tick
8784 && DF_INSN_LUID (rsp
->last_set
) < subst_low_luid
)
8785 || (REGNO (x
) >= FIRST_PSEUDO_REGISTER
8786 && REG_N_SETS (REGNO (x
)) == 1
8788 (DF_LR_IN (ENTRY_BLOCK_PTR
->next_bb
), REGNO (x
)))))
8790 *result
= rsp
->last_set_sign_bit_copies
;
8794 tem
= get_last_value (x
);
8798 if (nonzero_sign_valid
&& rsp
->sign_bit_copies
!= 0
8799 && GET_MODE_BITSIZE (GET_MODE (x
)) == GET_MODE_BITSIZE (mode
))
8800 *result
= rsp
->sign_bit_copies
;
8805 /* Return the number of "extended" bits there are in X, when interpreted
8806 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8807 unsigned quantities, this is the number of high-order zero bits.
8808 For signed quantities, this is the number of copies of the sign bit
8809 minus 1. In both case, this function returns the number of "spare"
8810 bits. For example, if two quantities for which this function returns
8811 at least 1 are added, the addition is known not to overflow.
8813 This function will always return 0 unless called during combine, which
8814 implies that it must be called from a define_split. */
8817 extended_count (const_rtx x
, enum machine_mode mode
, int unsignedp
)
8819 if (nonzero_sign_valid
== 0)
8823 ? (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8824 ? (unsigned int) (GET_MODE_BITSIZE (mode
) - 1
8825 - floor_log2 (nonzero_bits (x
, mode
)))
8827 : num_sign_bit_copies (x
, mode
) - 1);
8830 /* This function is called from `simplify_shift_const' to merge two
8831 outer operations. Specifically, we have already found that we need
8832 to perform operation *POP0 with constant *PCONST0 at the outermost
8833 position. We would now like to also perform OP1 with constant CONST1
8834 (with *POP0 being done last).
8836 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8837 the resulting operation. *PCOMP_P is set to 1 if we would need to
8838 complement the innermost operand, otherwise it is unchanged.
8840 MODE is the mode in which the operation will be done. No bits outside
8841 the width of this mode matter. It is assumed that the width of this mode
8842 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8844 If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
8845 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8846 result is simply *PCONST0.
8848 If the resulting operation cannot be expressed as one operation, we
8849 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8852 merge_outer_ops (enum rtx_code
*pop0
, HOST_WIDE_INT
*pconst0
, enum rtx_code op1
, HOST_WIDE_INT const1
, enum machine_mode mode
, int *pcomp_p
)
8854 enum rtx_code op0
= *pop0
;
8855 HOST_WIDE_INT const0
= *pconst0
;
8857 const0
&= GET_MODE_MASK (mode
);
8858 const1
&= GET_MODE_MASK (mode
);
8860 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8864 /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
8867 if (op1
== UNKNOWN
|| op0
== SET
)
8870 else if (op0
== UNKNOWN
)
8871 op0
= op1
, const0
= const1
;
8873 else if (op0
== op1
)
8897 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8898 else if (op0
== PLUS
|| op1
== PLUS
|| op0
== NEG
|| op1
== NEG
)
8901 /* If the two constants aren't the same, we can't do anything. The
8902 remaining six cases can all be done. */
8903 else if (const0
!= const1
)
8911 /* (a & b) | b == b */
8913 else /* op1 == XOR */
8914 /* (a ^ b) | b == a | b */
8920 /* (a & b) ^ b == (~a) & b */
8921 op0
= AND
, *pcomp_p
= 1;
8922 else /* op1 == IOR */
8923 /* (a | b) ^ b == a & ~b */
8924 op0
= AND
, const0
= ~const0
;
8929 /* (a | b) & b == b */
8931 else /* op1 == XOR */
8932 /* (a ^ b) & b) == (~a) & b */
8939 /* Check for NO-OP cases. */
8940 const0
&= GET_MODE_MASK (mode
);
8942 && (op0
== IOR
|| op0
== XOR
|| op0
== PLUS
))
8944 else if (const0
== 0 && op0
== AND
)
8946 else if ((unsigned HOST_WIDE_INT
) const0
== GET_MODE_MASK (mode
)
8950 /* ??? Slightly redundant with the above mask, but not entirely.
8951 Moving this above means we'd have to sign-extend the mode mask
8952 for the final test. */
8953 const0
= trunc_int_for_mode (const0
, mode
);
8961 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8962 The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot
8963 simplify it. Otherwise, return a simplified value.
8965 The shift is normally computed in the widest mode we find in VAROP, as
8966 long as it isn't a different number of words than RESULT_MODE. Exceptions
8967 are ASHIFTRT and ROTATE, which are always done in their original mode. */
8970 simplify_shift_const_1 (enum rtx_code code
, enum machine_mode result_mode
,
8971 rtx varop
, int orig_count
)
8973 enum rtx_code orig_code
= code
;
8974 rtx orig_varop
= varop
;
8976 enum machine_mode mode
= result_mode
;
8977 enum machine_mode shift_mode
, tmode
;
8978 unsigned int mode_words
8979 = (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
8980 /* We form (outer_op (code varop count) (outer_const)). */
8981 enum rtx_code outer_op
= UNKNOWN
;
8982 HOST_WIDE_INT outer_const
= 0;
8983 int complement_p
= 0;
8986 /* Make sure and truncate the "natural" shift on the way in. We don't
8987 want to do this inside the loop as it makes it more difficult to
8989 if (SHIFT_COUNT_TRUNCATED
)
8990 orig_count
&= GET_MODE_BITSIZE (mode
) - 1;
8992 /* If we were given an invalid count, don't do anything except exactly
8993 what was requested. */
8995 if (orig_count
< 0 || orig_count
>= (int) GET_MODE_BITSIZE (mode
))
9000 /* Unless one of the branches of the `if' in this loop does a `continue',
9001 we will `break' the loop after the `if'. */
9005 /* If we have an operand of (clobber (const_int 0)), fail. */
9006 if (GET_CODE (varop
) == CLOBBER
)
9009 /* If we discovered we had to complement VAROP, leave. Making a NOT
9010 here would cause an infinite loop. */
9014 /* Convert ROTATERT to ROTATE. */
9015 if (code
== ROTATERT
)
9017 unsigned int bitsize
= GET_MODE_BITSIZE (result_mode
);;
9019 if (VECTOR_MODE_P (result_mode
))
9020 count
= bitsize
/ GET_MODE_NUNITS (result_mode
) - count
;
9022 count
= bitsize
- count
;
9025 /* We need to determine what mode we will do the shift in. If the
9026 shift is a right shift or a ROTATE, we must always do it in the mode
9027 it was originally done in. Otherwise, we can do it in MODE, the
9028 widest mode encountered. */
9030 = (code
== ASHIFTRT
|| code
== LSHIFTRT
|| code
== ROTATE
9031 ? result_mode
: mode
);
9033 /* Handle cases where the count is greater than the size of the mode
9034 minus 1. For ASHIFT, use the size minus one as the count (this can
9035 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
9036 take the count modulo the size. For other shifts, the result is
9039 Since these shifts are being produced by the compiler by combining
9040 multiple operations, each of which are defined, we know what the
9041 result is supposed to be. */
9043 if (count
> (GET_MODE_BITSIZE (shift_mode
) - 1))
9045 if (code
== ASHIFTRT
)
9046 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
9047 else if (code
== ROTATE
|| code
== ROTATERT
)
9048 count
%= GET_MODE_BITSIZE (shift_mode
);
9051 /* We can't simply return zero because there may be an
9059 /* An arithmetic right shift of a quantity known to be -1 or 0
9061 if (code
== ASHIFTRT
9062 && (num_sign_bit_copies (varop
, shift_mode
)
9063 == GET_MODE_BITSIZE (shift_mode
)))
9069 /* If we are doing an arithmetic right shift and discarding all but
9070 the sign bit copies, this is equivalent to doing a shift by the
9071 bitsize minus one. Convert it into that shift because it will often
9072 allow other simplifications. */
9074 if (code
== ASHIFTRT
9075 && (count
+ num_sign_bit_copies (varop
, shift_mode
)
9076 >= GET_MODE_BITSIZE (shift_mode
)))
9077 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
9079 /* We simplify the tests below and elsewhere by converting
9080 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9081 `make_compound_operation' will convert it to an ASHIFTRT for
9082 those machines (such as VAX) that don't have an LSHIFTRT. */
9083 if (GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
9085 && ((nonzero_bits (varop
, shift_mode
)
9086 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (shift_mode
) - 1)))
9090 if (((code
== LSHIFTRT
9091 && GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
9092 && !(nonzero_bits (varop
, shift_mode
) >> count
))
9094 && GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
9095 && !((nonzero_bits (varop
, shift_mode
) << count
)
9096 & GET_MODE_MASK (shift_mode
))))
9097 && !side_effects_p (varop
))
9100 switch (GET_CODE (varop
))
9106 new = expand_compound_operation (varop
);
9115 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9116 minus the width of a smaller mode, we can do this with a
9117 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9118 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
9119 && ! mode_dependent_address_p (XEXP (varop
, 0))
9120 && ! MEM_VOLATILE_P (varop
)
9121 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
9122 MODE_INT
, 1)) != BLKmode
)
9124 new = adjust_address_nv (varop
, tmode
,
9125 BYTES_BIG_ENDIAN
? 0
9126 : count
/ BITS_PER_UNIT
);
9128 varop
= gen_rtx_fmt_e (code
== ASHIFTRT
? SIGN_EXTEND
9129 : ZERO_EXTEND
, mode
, new);
9136 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9137 the same number of words as what we've seen so far. Then store
9138 the widest mode in MODE. */
9139 if (subreg_lowpart_p (varop
)
9140 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
9141 > GET_MODE_SIZE (GET_MODE (varop
)))
9142 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
9143 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
9146 varop
= SUBREG_REG (varop
);
9147 if (GET_MODE_SIZE (GET_MODE (varop
)) > GET_MODE_SIZE (mode
))
9148 mode
= GET_MODE (varop
);
9154 /* Some machines use MULT instead of ASHIFT because MULT
9155 is cheaper. But it is still better on those machines to
9156 merge two shifts into one. */
9157 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
9158 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
9161 = simplify_gen_binary (ASHIFT
, GET_MODE (varop
),
9163 GEN_INT (exact_log2 (
9164 INTVAL (XEXP (varop
, 1)))));
9170 /* Similar, for when divides are cheaper. */
9171 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
9172 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
9175 = simplify_gen_binary (LSHIFTRT
, GET_MODE (varop
),
9177 GEN_INT (exact_log2 (
9178 INTVAL (XEXP (varop
, 1)))));
9184 /* If we are extracting just the sign bit of an arithmetic
9185 right shift, that shift is not needed. However, the sign
9186 bit of a wider mode may be different from what would be
9187 interpreted as the sign bit in a narrower mode, so, if
9188 the result is narrower, don't discard the shift. */
9189 if (code
== LSHIFTRT
9190 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
9191 && (GET_MODE_BITSIZE (result_mode
)
9192 >= GET_MODE_BITSIZE (GET_MODE (varop
))))
9194 varop
= XEXP (varop
, 0);
9198 /* ... fall through ... */
9203 /* Here we have two nested shifts. The result is usually the
9204 AND of a new shift with a mask. We compute the result below. */
9205 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
9206 && INTVAL (XEXP (varop
, 1)) >= 0
9207 && INTVAL (XEXP (varop
, 1)) < GET_MODE_BITSIZE (GET_MODE (varop
))
9208 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
9209 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
9210 && !VECTOR_MODE_P (result_mode
))
9212 enum rtx_code first_code
= GET_CODE (varop
);
9213 unsigned int first_count
= INTVAL (XEXP (varop
, 1));
9214 unsigned HOST_WIDE_INT mask
;
9217 /* We have one common special case. We can't do any merging if
9218 the inner code is an ASHIFTRT of a smaller mode. However, if
9219 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9220 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9221 we can convert it to
9222 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9223 This simplifies certain SIGN_EXTEND operations. */
9224 if (code
== ASHIFT
&& first_code
== ASHIFTRT
9225 && count
== (GET_MODE_BITSIZE (result_mode
)
9226 - GET_MODE_BITSIZE (GET_MODE (varop
))))
9228 /* C3 has the low-order C1 bits zero. */
9230 mask
= (GET_MODE_MASK (mode
)
9231 & ~(((HOST_WIDE_INT
) 1 << first_count
) - 1));
9233 varop
= simplify_and_const_int (NULL_RTX
, result_mode
,
9234 XEXP (varop
, 0), mask
);
9235 varop
= simplify_shift_const (NULL_RTX
, ASHIFT
, result_mode
,
9237 count
= first_count
;
9242 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9243 than C1 high-order bits equal to the sign bit, we can convert
9244 this to either an ASHIFT or an ASHIFTRT depending on the
9247 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9249 if (code
== ASHIFTRT
&& first_code
== ASHIFT
9250 && GET_MODE (varop
) == shift_mode
9251 && (num_sign_bit_copies (XEXP (varop
, 0), shift_mode
)
9254 varop
= XEXP (varop
, 0);
9255 count
-= first_count
;
9265 /* There are some cases we can't do. If CODE is ASHIFTRT,
9266 we can only do this if FIRST_CODE is also ASHIFTRT.
9268 We can't do the case when CODE is ROTATE and FIRST_CODE is
9271 If the mode of this shift is not the mode of the outer shift,
9272 we can't do this if either shift is a right shift or ROTATE.
9274 Finally, we can't do any of these if the mode is too wide
9275 unless the codes are the same.
9277 Handle the case where the shift codes are the same
9280 if (code
== first_code
)
9282 if (GET_MODE (varop
) != result_mode
9283 && (code
== ASHIFTRT
|| code
== LSHIFTRT
9287 count
+= first_count
;
9288 varop
= XEXP (varop
, 0);
9292 if (code
== ASHIFTRT
9293 || (code
== ROTATE
&& first_code
== ASHIFTRT
)
9294 || GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
9295 || (GET_MODE (varop
) != result_mode
9296 && (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
9297 || first_code
== ROTATE
9298 || code
== ROTATE
)))
9301 /* To compute the mask to apply after the shift, shift the
9302 nonzero bits of the inner shift the same way the
9303 outer shift will. */
9305 mask_rtx
= GEN_INT (nonzero_bits (varop
, GET_MODE (varop
)));
9308 = simplify_const_binary_operation (code
, result_mode
, mask_rtx
,
9311 /* Give up if we can't compute an outer operation to use. */
9313 || GET_CODE (mask_rtx
) != CONST_INT
9314 || ! merge_outer_ops (&outer_op
, &outer_const
, AND
,
9316 result_mode
, &complement_p
))
9319 /* If the shifts are in the same direction, we add the
9320 counts. Otherwise, we subtract them. */
9321 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
9322 == (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
))
9323 count
+= first_count
;
9325 count
-= first_count
;
9327 /* If COUNT is positive, the new shift is usually CODE,
9328 except for the two exceptions below, in which case it is
9329 FIRST_CODE. If the count is negative, FIRST_CODE should
9332 && ((first_code
== ROTATE
&& code
== ASHIFT
)
9333 || (first_code
== ASHIFTRT
&& code
== LSHIFTRT
)))
9336 code
= first_code
, count
= -count
;
9338 varop
= XEXP (varop
, 0);
9342 /* If we have (A << B << C) for any shift, we can convert this to
9343 (A << C << B). This wins if A is a constant. Only try this if
9344 B is not a constant. */
9346 else if (GET_CODE (varop
) == code
9347 && GET_CODE (XEXP (varop
, 0)) == CONST_INT
9348 && GET_CODE (XEXP (varop
, 1)) != CONST_INT
)
9350 rtx
new = simplify_const_binary_operation (code
, mode
,
9353 varop
= gen_rtx_fmt_ee (code
, mode
, new, XEXP (varop
, 1));
9360 if (VECTOR_MODE_P (mode
))
9363 /* Make this fit the case below. */
9364 varop
= gen_rtx_XOR (mode
, XEXP (varop
, 0),
9365 GEN_INT (GET_MODE_MASK (mode
)));
9371 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9372 with C the size of VAROP - 1 and the shift is logical if
9373 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9374 we have an (le X 0) operation. If we have an arithmetic shift
9375 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9376 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9378 if (GET_CODE (varop
) == IOR
&& GET_CODE (XEXP (varop
, 0)) == PLUS
9379 && XEXP (XEXP (varop
, 0), 1) == constm1_rtx
9380 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
9381 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
9382 && count
== (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1)
9383 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
9386 varop
= gen_rtx_LE (GET_MODE (varop
), XEXP (varop
, 1),
9389 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
9390 varop
= gen_rtx_NEG (GET_MODE (varop
), varop
);
9395 /* If we have (shift (logical)), move the logical to the outside
9396 to allow it to possibly combine with another logical and the
9397 shift to combine with another shift. This also canonicalizes to
9398 what a ZERO_EXTRACT looks like. Also, some machines have
9399 (and (shift)) insns. */
9401 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
9402 /* We can't do this if we have (ashiftrt (xor)) and the
9403 constant has its sign bit set in shift_mode. */
9404 && !(code
== ASHIFTRT
&& GET_CODE (varop
) == XOR
9405 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop
, 1)),
9407 && (new = simplify_const_binary_operation (code
, result_mode
,
9409 GEN_INT (count
))) != 0
9410 && GET_CODE (new) == CONST_INT
9411 && merge_outer_ops (&outer_op
, &outer_const
, GET_CODE (varop
),
9412 INTVAL (new), result_mode
, &complement_p
))
9414 varop
= XEXP (varop
, 0);
9418 /* If we can't do that, try to simplify the shift in each arm of the
9419 logical expression, make a new logical expression, and apply
9420 the inverse distributive law. This also can't be done
9421 for some (ashiftrt (xor)). */
9422 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
9423 && !(code
== ASHIFTRT
&& GET_CODE (varop
) == XOR
9424 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop
, 1)),
9427 rtx lhs
= simplify_shift_const (NULL_RTX
, code
, shift_mode
,
9428 XEXP (varop
, 0), count
);
9429 rtx rhs
= simplify_shift_const (NULL_RTX
, code
, shift_mode
,
9430 XEXP (varop
, 1), count
);
9432 varop
= simplify_gen_binary (GET_CODE (varop
), shift_mode
,
9434 varop
= apply_distributive_law (varop
);
9442 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9443 says that the sign bit can be tested, FOO has mode MODE, C is
9444 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9445 that may be nonzero. */
9446 if (code
== LSHIFTRT
9447 && XEXP (varop
, 1) == const0_rtx
9448 && GET_MODE (XEXP (varop
, 0)) == result_mode
9449 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
9450 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
9451 && STORE_FLAG_VALUE
== -1
9452 && nonzero_bits (XEXP (varop
, 0), result_mode
) == 1
9453 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
9454 (HOST_WIDE_INT
) 1, result_mode
,
9457 varop
= XEXP (varop
, 0);
9464 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9465 than the number of bits in the mode is equivalent to A. */
9466 if (code
== LSHIFTRT
9467 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
9468 && nonzero_bits (XEXP (varop
, 0), result_mode
) == 1)
9470 varop
= XEXP (varop
, 0);
9475 /* NEG commutes with ASHIFT since it is multiplication. Move the
9476 NEG outside to allow shifts to combine. */
9478 && merge_outer_ops (&outer_op
, &outer_const
, NEG
,
9479 (HOST_WIDE_INT
) 0, result_mode
,
9482 varop
= XEXP (varop
, 0);
9488 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9489 is one less than the number of bits in the mode is
9490 equivalent to (xor A 1). */
9491 if (code
== LSHIFTRT
9492 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
9493 && XEXP (varop
, 1) == constm1_rtx
9494 && nonzero_bits (XEXP (varop
, 0), result_mode
) == 1
9495 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
9496 (HOST_WIDE_INT
) 1, result_mode
,
9500 varop
= XEXP (varop
, 0);
9504 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9505 that might be nonzero in BAR are those being shifted out and those
9506 bits are known zero in FOO, we can replace the PLUS with FOO.
9507 Similarly in the other operand order. This code occurs when
9508 we are computing the size of a variable-size array. */
9510 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
9511 && count
< HOST_BITS_PER_WIDE_INT
9512 && nonzero_bits (XEXP (varop
, 1), result_mode
) >> count
== 0
9513 && (nonzero_bits (XEXP (varop
, 1), result_mode
)
9514 & nonzero_bits (XEXP (varop
, 0), result_mode
)) == 0)
9516 varop
= XEXP (varop
, 0);
9519 else if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
9520 && count
< HOST_BITS_PER_WIDE_INT
9521 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
9522 && 0 == (nonzero_bits (XEXP (varop
, 0), result_mode
)
9524 && 0 == (nonzero_bits (XEXP (varop
, 0), result_mode
)
9525 & nonzero_bits (XEXP (varop
, 1),
9528 varop
= XEXP (varop
, 1);
9532 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9534 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
9535 && (new = simplify_const_binary_operation (ASHIFT
, result_mode
,
9537 GEN_INT (count
))) != 0
9538 && GET_CODE (new) == CONST_INT
9539 && merge_outer_ops (&outer_op
, &outer_const
, PLUS
,
9540 INTVAL (new), result_mode
, &complement_p
))
9542 varop
= XEXP (varop
, 0);
9546 /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9547 signbit', and attempt to change the PLUS to an XOR and move it to
9548 the outer operation as is done above in the AND/IOR/XOR case
9549 leg for shift(logical). See details in logical handling above
9550 for reasoning in doing so. */
9551 if (code
== LSHIFTRT
9552 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
9553 && mode_signbit_p (result_mode
, XEXP (varop
, 1))
9554 && (new = simplify_const_binary_operation (code
, result_mode
,
9556 GEN_INT (count
))) != 0
9557 && GET_CODE (new) == CONST_INT
9558 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
9559 INTVAL (new), result_mode
, &complement_p
))
9561 varop
= XEXP (varop
, 0);
9568 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9569 with C the size of VAROP - 1 and the shift is logical if
9570 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9571 we have a (gt X 0) operation. If the shift is arithmetic with
9572 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9573 we have a (neg (gt X 0)) operation. */
9575 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
9576 && GET_CODE (XEXP (varop
, 0)) == ASHIFTRT
9577 && count
== (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1)
9578 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
9579 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
9580 && INTVAL (XEXP (XEXP (varop
, 0), 1)) == count
9581 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
9584 varop
= gen_rtx_GT (GET_MODE (varop
), XEXP (varop
, 1),
9587 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
9588 varop
= gen_rtx_NEG (GET_MODE (varop
), varop
);
9595 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9596 if the truncate does not affect the value. */
9597 if (code
== LSHIFTRT
9598 && GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
9599 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
9600 && (INTVAL (XEXP (XEXP (varop
, 0), 1))
9601 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop
, 0)))
9602 - GET_MODE_BITSIZE (GET_MODE (varop
)))))
9604 rtx varop_inner
= XEXP (varop
, 0);
9607 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner
),
9608 XEXP (varop_inner
, 0),
9610 (count
+ INTVAL (XEXP (varop_inner
, 1))));
9611 varop
= gen_rtx_TRUNCATE (GET_MODE (varop
), varop_inner
);
9624 /* We need to determine what mode to do the shift in. If the shift is
9625 a right shift or ROTATE, we must always do it in the mode it was
9626 originally done in. Otherwise, we can do it in MODE, the widest mode
9627 encountered. The code we care about is that of the shift that will
9628 actually be done, not the shift that was originally requested. */
9630 = (code
== ASHIFTRT
|| code
== LSHIFTRT
|| code
== ROTATE
9631 ? result_mode
: mode
);
9633 /* We have now finished analyzing the shift. The result should be
9634 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9635 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9636 to the result of the shift. OUTER_CONST is the relevant constant,
9637 but we must turn off all bits turned off in the shift. */
9639 if (outer_op
== UNKNOWN
9640 && orig_code
== code
&& orig_count
== count
9641 && varop
== orig_varop
9642 && shift_mode
== GET_MODE (varop
))
9645 /* Make a SUBREG if necessary. If we can't make it, fail. */
9646 varop
= gen_lowpart (shift_mode
, varop
);
9647 if (varop
== NULL_RTX
|| GET_CODE (varop
) == CLOBBER
)
9650 /* If we have an outer operation and we just made a shift, it is
9651 possible that we could have simplified the shift were it not
9652 for the outer operation. So try to do the simplification
9655 if (outer_op
!= UNKNOWN
)
9656 x
= simplify_shift_const_1 (code
, shift_mode
, varop
, count
);
9661 x
= simplify_gen_binary (code
, shift_mode
, varop
, GEN_INT (count
));
9663 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9664 turn off all the bits that the shift would have turned off. */
9665 if (orig_code
== LSHIFTRT
&& result_mode
!= shift_mode
)
9666 x
= simplify_and_const_int (NULL_RTX
, shift_mode
, x
,
9667 GET_MODE_MASK (result_mode
) >> orig_count
);
9669 /* Do the remainder of the processing in RESULT_MODE. */
9670 x
= gen_lowpart_or_truncate (result_mode
, x
);
9672 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9675 x
= simplify_gen_unary (NOT
, result_mode
, x
, result_mode
);
9677 if (outer_op
!= UNKNOWN
)
9679 if (GET_MODE_BITSIZE (result_mode
) < HOST_BITS_PER_WIDE_INT
)
9680 outer_const
= trunc_int_for_mode (outer_const
, result_mode
);
9682 if (outer_op
== AND
)
9683 x
= simplify_and_const_int (NULL_RTX
, result_mode
, x
, outer_const
);
9684 else if (outer_op
== SET
)
9686 /* This means that we have determined that the result is
9687 equivalent to a constant. This should be rare. */
9688 if (!side_effects_p (x
))
9689 x
= GEN_INT (outer_const
);
9691 else if (GET_RTX_CLASS (outer_op
) == RTX_UNARY
)
9692 x
= simplify_gen_unary (outer_op
, result_mode
, x
, result_mode
);
9694 x
= simplify_gen_binary (outer_op
, result_mode
, x
,
9695 GEN_INT (outer_const
));
9701 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
9702 The result of the shift is RESULT_MODE. If we cannot simplify it,
9703 return X or, if it is NULL, synthesize the expression with
9704 simplify_gen_binary. Otherwise, return a simplified value.
9706 The shift is normally computed in the widest mode we find in VAROP, as
9707 long as it isn't a different number of words than RESULT_MODE. Exceptions
9708 are ASHIFTRT and ROTATE, which are always done in their original mode. */
9711 simplify_shift_const (rtx x
, enum rtx_code code
, enum machine_mode result_mode
,
9712 rtx varop
, int count
)
9714 rtx tem
= simplify_shift_const_1 (code
, result_mode
, varop
, count
);
9719 x
= simplify_gen_binary (code
, GET_MODE (varop
), varop
, GEN_INT (count
));
9720 if (GET_MODE (x
) != result_mode
)
9721 x
= gen_lowpart (result_mode
, x
);
9726 /* Like recog, but we receive the address of a pointer to a new pattern.
9727 We try to match the rtx that the pointer points to.
9728 If that fails, we may try to modify or replace the pattern,
9729 storing the replacement into the same pointer object.
9731 Modifications include deletion or addition of CLOBBERs.
9733 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9734 the CLOBBERs are placed.
9736 The value is the final insn code from the pattern ultimately matched,
9740 recog_for_combine (rtx
*pnewpat
, rtx insn
, rtx
*pnotes
)
9743 int insn_code_number
;
9744 int num_clobbers_to_add
= 0;
9747 rtx old_notes
, old_pat
;
9749 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9750 we use to indicate that something didn't match. If we find such a
9751 thing, force rejection. */
9752 if (GET_CODE (pat
) == PARALLEL
)
9753 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
9754 if (GET_CODE (XVECEXP (pat
, 0, i
)) == CLOBBER
9755 && XEXP (XVECEXP (pat
, 0, i
), 0) == const0_rtx
)
9758 old_pat
= PATTERN (insn
);
9759 old_notes
= REG_NOTES (insn
);
9760 PATTERN (insn
) = pat
;
9761 REG_NOTES (insn
) = 0;
9763 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
9764 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9766 if (insn_code_number
< 0)
9767 fputs ("Failed to match this instruction:\n", dump_file
);
9769 fputs ("Successfully matched this instruction:\n", dump_file
);
9770 print_rtl_single (dump_file
, pat
);
9773 /* If it isn't, there is the possibility that we previously had an insn
9774 that clobbered some register as a side effect, but the combined
9775 insn doesn't need to do that. So try once more without the clobbers
9776 unless this represents an ASM insn. */
9778 if (insn_code_number
< 0 && ! check_asm_operands (pat
)
9779 && GET_CODE (pat
) == PARALLEL
)
9783 for (pos
= 0, i
= 0; i
< XVECLEN (pat
, 0); i
++)
9784 if (GET_CODE (XVECEXP (pat
, 0, i
)) != CLOBBER
)
9787 SUBST (XVECEXP (pat
, 0, pos
), XVECEXP (pat
, 0, i
));
9791 SUBST_INT (XVECLEN (pat
, 0), pos
);
9794 pat
= XVECEXP (pat
, 0, 0);
9796 PATTERN (insn
) = pat
;
9797 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
9798 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9800 if (insn_code_number
< 0)
9801 fputs ("Failed to match this instruction:\n", dump_file
);
9803 fputs ("Successfully matched this instruction:\n", dump_file
);
9804 print_rtl_single (dump_file
, pat
);
9807 PATTERN (insn
) = old_pat
;
9808 REG_NOTES (insn
) = old_notes
;
9810 /* Recognize all noop sets, these will be killed by followup pass. */
9811 if (insn_code_number
< 0 && GET_CODE (pat
) == SET
&& set_noop_p (pat
))
9812 insn_code_number
= NOOP_MOVE_INSN_CODE
, num_clobbers_to_add
= 0;
9814 /* If we had any clobbers to add, make a new pattern than contains
9815 them. Then check to make sure that all of them are dead. */
9816 if (num_clobbers_to_add
)
9818 rtx newpat
= gen_rtx_PARALLEL (VOIDmode
,
9819 rtvec_alloc (GET_CODE (pat
) == PARALLEL
9821 + num_clobbers_to_add
)
9822 : num_clobbers_to_add
+ 1));
9824 if (GET_CODE (pat
) == PARALLEL
)
9825 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
9826 XVECEXP (newpat
, 0, i
) = XVECEXP (pat
, 0, i
);
9828 XVECEXP (newpat
, 0, 0) = pat
;
9830 add_clobbers (newpat
, insn_code_number
);
9832 for (i
= XVECLEN (newpat
, 0) - num_clobbers_to_add
;
9833 i
< XVECLEN (newpat
, 0); i
++)
9835 if (REG_P (XEXP (XVECEXP (newpat
, 0, i
), 0))
9836 && ! reg_dead_at_p (XEXP (XVECEXP (newpat
, 0, i
), 0), insn
))
9838 if (GET_CODE (XEXP (XVECEXP (newpat
, 0, i
), 0)) != SCRATCH
)
9840 gcc_assert (REG_P (XEXP (XVECEXP (newpat
, 0, i
), 0)));
9841 notes
= gen_rtx_EXPR_LIST (REG_UNUSED
,
9842 XEXP (XVECEXP (newpat
, 0, i
), 0), notes
);
9851 return insn_code_number
;
9854 /* Like gen_lowpart_general but for use by combine. In combine it
9855 is not possible to create any new pseudoregs. However, it is
9856 safe to create invalid memory addresses, because combine will
9857 try to recognize them and all they will do is make the combine
9860 If for some reason this cannot do its job, an rtx
9861 (clobber (const_int 0)) is returned.
9862 An insn containing that will not be recognized. */
9865 gen_lowpart_for_combine (enum machine_mode omode
, rtx x
)
9867 enum machine_mode imode
= GET_MODE (x
);
9868 unsigned int osize
= GET_MODE_SIZE (omode
);
9869 unsigned int isize
= GET_MODE_SIZE (imode
);
9875 /* Return identity if this is a CONST or symbolic reference. */
9877 && (GET_CODE (x
) == CONST
9878 || GET_CODE (x
) == SYMBOL_REF
9879 || GET_CODE (x
) == LABEL_REF
))
9882 /* We can only support MODE being wider than a word if X is a
9883 constant integer or has a mode the same size. */
9884 if (GET_MODE_SIZE (omode
) > UNITS_PER_WORD
9885 && ! ((imode
== VOIDmode
9886 && (GET_CODE (x
) == CONST_INT
9887 || GET_CODE (x
) == CONST_DOUBLE
))
9891 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9892 won't know what to do. So we will strip off the SUBREG here and
9893 process normally. */
9894 if (GET_CODE (x
) == SUBREG
&& MEM_P (SUBREG_REG (x
)))
9898 /* For use in case we fall down into the address adjustments
9899 further below, we need to adjust the known mode and size of
9900 x; imode and isize, since we just adjusted x. */
9901 imode
= GET_MODE (x
);
9906 isize
= GET_MODE_SIZE (imode
);
9909 result
= gen_lowpart_common (omode
, x
);
9918 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9920 if (MEM_VOLATILE_P (x
) || mode_dependent_address_p (XEXP (x
, 0)))
9923 /* If we want to refer to something bigger than the original memref,
9924 generate a paradoxical subreg instead. That will force a reload
9925 of the original memref X. */
9927 return gen_rtx_SUBREG (omode
, x
, 0);
9929 if (WORDS_BIG_ENDIAN
)
9930 offset
= MAX (isize
, UNITS_PER_WORD
) - MAX (osize
, UNITS_PER_WORD
);
9932 /* Adjust the address so that the address-after-the-data is
9934 if (BYTES_BIG_ENDIAN
)
9935 offset
-= MIN (UNITS_PER_WORD
, osize
) - MIN (UNITS_PER_WORD
, isize
);
9937 return adjust_address_nv (x
, omode
, offset
);
9940 /* If X is a comparison operator, rewrite it in a new mode. This
9941 probably won't match, but may allow further simplifications. */
9942 else if (COMPARISON_P (x
))
9943 return gen_rtx_fmt_ee (GET_CODE (x
), omode
, XEXP (x
, 0), XEXP (x
, 1));
9945 /* If we couldn't simplify X any other way, just enclose it in a
9946 SUBREG. Normally, this SUBREG won't match, but some patterns may
9947 include an explicit SUBREG or we may simplify it further in combine. */
9953 offset
= subreg_lowpart_offset (omode
, imode
);
9954 if (imode
== VOIDmode
)
9956 imode
= int_mode_for_mode (omode
);
9957 x
= gen_lowpart_common (imode
, x
);
9961 res
= simplify_gen_subreg (omode
, x
, imode
, offset
);
9967 return gen_rtx_CLOBBER (imode
, const0_rtx
);
9970 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9971 comparison code that will be tested.
9973 The result is a possibly different comparison code to use. *POP0 and
9974 *POP1 may be updated.
9976 It is possible that we might detect that a comparison is either always
9977 true or always false. However, we do not perform general constant
9978 folding in combine, so this knowledge isn't useful. Such tautologies
9979 should have been detected earlier. Hence we ignore all such cases. */
9981 static enum rtx_code
9982 simplify_comparison (enum rtx_code code
, rtx
*pop0
, rtx
*pop1
)
9988 enum machine_mode mode
, tmode
;
9990 /* Try a few ways of applying the same transformation to both operands. */
9993 #ifndef WORD_REGISTER_OPERATIONS
9994 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9995 so check specially. */
9996 if (code
!= GTU
&& code
!= GEU
&& code
!= LTU
&& code
!= LEU
9997 && GET_CODE (op0
) == ASHIFTRT
&& GET_CODE (op1
) == ASHIFTRT
9998 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
9999 && GET_CODE (XEXP (op1
, 0)) == ASHIFT
10000 && GET_CODE (XEXP (XEXP (op0
, 0), 0)) == SUBREG
10001 && GET_CODE (XEXP (XEXP (op1
, 0), 0)) == SUBREG
10002 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0
, 0), 0)))
10003 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1
, 0), 0))))
10004 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10005 && XEXP (op0
, 1) == XEXP (op1
, 1)
10006 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
10007 && XEXP (op0
, 1) == XEXP (XEXP (op1
, 0), 1)
10008 && (INTVAL (XEXP (op0
, 1))
10009 == (GET_MODE_BITSIZE (GET_MODE (op0
))
10010 - (GET_MODE_BITSIZE
10011 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0
, 0), 0))))))))
10013 op0
= SUBREG_REG (XEXP (XEXP (op0
, 0), 0));
10014 op1
= SUBREG_REG (XEXP (XEXP (op1
, 0), 0));
10018 /* If both operands are the same constant shift, see if we can ignore the
10019 shift. We can if the shift is a rotate or if the bits shifted out of
10020 this shift are known to be zero for both inputs and if the type of
10021 comparison is compatible with the shift. */
10022 if (GET_CODE (op0
) == GET_CODE (op1
)
10023 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
10024 && ((GET_CODE (op0
) == ROTATE
&& (code
== NE
|| code
== EQ
))
10025 || ((GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFT
)
10026 && (code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
))
10027 || (GET_CODE (op0
) == ASHIFTRT
10028 && (code
!= GTU
&& code
!= LTU
10029 && code
!= GEU
&& code
!= LEU
)))
10030 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10031 && INTVAL (XEXP (op0
, 1)) >= 0
10032 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
10033 && XEXP (op0
, 1) == XEXP (op1
, 1))
10035 enum machine_mode mode
= GET_MODE (op0
);
10036 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
10037 int shift_count
= INTVAL (XEXP (op0
, 1));
10039 if (GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFTRT
)
10040 mask
&= (mask
>> shift_count
) << shift_count
;
10041 else if (GET_CODE (op0
) == ASHIFT
)
10042 mask
= (mask
& (mask
<< shift_count
)) >> shift_count
;
10044 if ((nonzero_bits (XEXP (op0
, 0), mode
) & ~mask
) == 0
10045 && (nonzero_bits (XEXP (op1
, 0), mode
) & ~mask
) == 0)
10046 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0);
10051 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10052 SUBREGs are of the same mode, and, in both cases, the AND would
10053 be redundant if the comparison was done in the narrower mode,
10054 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10055 and the operand's possibly nonzero bits are 0xffffff01; in that case
10056 if we only care about QImode, we don't need the AND). This case
10057 occurs if the output mode of an scc insn is not SImode and
10058 STORE_FLAG_VALUE == 1 (e.g., the 386).
10060 Similarly, check for a case where the AND's are ZERO_EXTEND
10061 operations from some narrower mode even though a SUBREG is not
10064 else if (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == AND
10065 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10066 && GET_CODE (XEXP (op1
, 1)) == CONST_INT
)
10068 rtx inner_op0
= XEXP (op0
, 0);
10069 rtx inner_op1
= XEXP (op1
, 0);
10070 HOST_WIDE_INT c0
= INTVAL (XEXP (op0
, 1));
10071 HOST_WIDE_INT c1
= INTVAL (XEXP (op1
, 1));
10074 if (GET_CODE (inner_op0
) == SUBREG
&& GET_CODE (inner_op1
) == SUBREG
10075 && (GET_MODE_SIZE (GET_MODE (inner_op0
))
10076 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0
))))
10077 && (GET_MODE (SUBREG_REG (inner_op0
))
10078 == GET_MODE (SUBREG_REG (inner_op1
)))
10079 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0
)))
10080 <= HOST_BITS_PER_WIDE_INT
)
10081 && (0 == ((~c0
) & nonzero_bits (SUBREG_REG (inner_op0
),
10082 GET_MODE (SUBREG_REG (inner_op0
)))))
10083 && (0 == ((~c1
) & nonzero_bits (SUBREG_REG (inner_op1
),
10084 GET_MODE (SUBREG_REG (inner_op1
))))))
10086 op0
= SUBREG_REG (inner_op0
);
10087 op1
= SUBREG_REG (inner_op1
);
10089 /* The resulting comparison is always unsigned since we masked
10090 off the original sign bit. */
10091 code
= unsigned_condition (code
);
10097 for (tmode
= GET_CLASS_NARROWEST_MODE
10098 (GET_MODE_CLASS (GET_MODE (op0
)));
10099 tmode
!= GET_MODE (op0
); tmode
= GET_MODE_WIDER_MODE (tmode
))
10100 if ((unsigned HOST_WIDE_INT
) c0
== GET_MODE_MASK (tmode
))
10102 op0
= gen_lowpart (tmode
, inner_op0
);
10103 op1
= gen_lowpart (tmode
, inner_op1
);
10104 code
= unsigned_condition (code
);
10113 /* If both operands are NOT, we can strip off the outer operation
10114 and adjust the comparison code for swapped operands; similarly for
10115 NEG, except that this must be an equality comparison. */
10116 else if ((GET_CODE (op0
) == NOT
&& GET_CODE (op1
) == NOT
)
10117 || (GET_CODE (op0
) == NEG
&& GET_CODE (op1
) == NEG
10118 && (code
== EQ
|| code
== NE
)))
10119 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0), code
= swap_condition (code
);
10125 /* If the first operand is a constant, swap the operands and adjust the
10126 comparison code appropriately, but don't do this if the second operand
10127 is already a constant integer. */
10128 if (swap_commutative_operands_p (op0
, op1
))
10130 tem
= op0
, op0
= op1
, op1
= tem
;
10131 code
= swap_condition (code
);
10134 /* We now enter a loop during which we will try to simplify the comparison.
10135 For the most part, we only are concerned with comparisons with zero,
10136 but some things may really be comparisons with zero but not start
10137 out looking that way. */
10139 while (GET_CODE (op1
) == CONST_INT
)
10141 enum machine_mode mode
= GET_MODE (op0
);
10142 unsigned int mode_width
= GET_MODE_BITSIZE (mode
);
10143 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
10144 int equality_comparison_p
;
10145 int sign_bit_comparison_p
;
10146 int unsigned_comparison_p
;
10147 HOST_WIDE_INT const_op
;
10149 /* We only want to handle integral modes. This catches VOIDmode,
10150 CCmode, and the floating-point modes. An exception is that we
10151 can handle VOIDmode if OP0 is a COMPARE or a comparison
10154 if (GET_MODE_CLASS (mode
) != MODE_INT
10155 && ! (mode
== VOIDmode
10156 && (GET_CODE (op0
) == COMPARE
|| COMPARISON_P (op0
))))
10159 /* Get the constant we are comparing against and turn off all bits
10160 not on in our mode. */
10161 const_op
= INTVAL (op1
);
10162 if (mode
!= VOIDmode
)
10163 const_op
= trunc_int_for_mode (const_op
, mode
);
10164 op1
= GEN_INT (const_op
);
10166 /* If we are comparing against a constant power of two and the value
10167 being compared can only have that single bit nonzero (e.g., it was
10168 `and'ed with that bit), we can replace this with a comparison
10171 && (code
== EQ
|| code
== NE
|| code
== GE
|| code
== GEU
10172 || code
== LT
|| code
== LTU
)
10173 && mode_width
<= HOST_BITS_PER_WIDE_INT
10174 && exact_log2 (const_op
) >= 0
10175 && nonzero_bits (op0
, mode
) == (unsigned HOST_WIDE_INT
) const_op
)
10177 code
= (code
== EQ
|| code
== GE
|| code
== GEU
? NE
: EQ
);
10178 op1
= const0_rtx
, const_op
= 0;
10181 /* Similarly, if we are comparing a value known to be either -1 or
10182 0 with -1, change it to the opposite comparison against zero. */
10185 && (code
== EQ
|| code
== NE
|| code
== GT
|| code
== LE
10186 || code
== GEU
|| code
== LTU
)
10187 && num_sign_bit_copies (op0
, mode
) == mode_width
)
10189 code
= (code
== EQ
|| code
== LE
|| code
== GEU
? NE
: EQ
);
10190 op1
= const0_rtx
, const_op
= 0;
10193 /* Do some canonicalizations based on the comparison code. We prefer
10194 comparisons against zero and then prefer equality comparisons.
10195 If we can reduce the size of a constant, we will do that too. */
10200 /* < C is equivalent to <= (C - 1) */
10204 op1
= GEN_INT (const_op
);
10206 /* ... fall through to LE case below. */
10212 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10216 op1
= GEN_INT (const_op
);
10220 /* If we are doing a <= 0 comparison on a value known to have
10221 a zero sign bit, we can replace this with == 0. */
10222 else if (const_op
== 0
10223 && mode_width
<= HOST_BITS_PER_WIDE_INT
10224 && (nonzero_bits (op0
, mode
)
10225 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
10230 /* >= C is equivalent to > (C - 1). */
10234 op1
= GEN_INT (const_op
);
10236 /* ... fall through to GT below. */
10242 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
10246 op1
= GEN_INT (const_op
);
10250 /* If we are doing a > 0 comparison on a value known to have
10251 a zero sign bit, we can replace this with != 0. */
10252 else if (const_op
== 0
10253 && mode_width
<= HOST_BITS_PER_WIDE_INT
10254 && (nonzero_bits (op0
, mode
)
10255 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
10260 /* < C is equivalent to <= (C - 1). */
10264 op1
= GEN_INT (const_op
);
10266 /* ... fall through ... */
10269 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10270 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
10271 && (const_op
== (HOST_WIDE_INT
) 1 << (mode_width
- 1)))
10273 const_op
= 0, op1
= const0_rtx
;
10281 /* unsigned <= 0 is equivalent to == 0 */
10285 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10286 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
10287 && (const_op
== ((HOST_WIDE_INT
) 1 << (mode_width
- 1)) - 1))
10289 const_op
= 0, op1
= const0_rtx
;
10295 /* >= C is equivalent to > (C - 1). */
10299 op1
= GEN_INT (const_op
);
10301 /* ... fall through ... */
10304 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10305 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
10306 && (const_op
== (HOST_WIDE_INT
) 1 << (mode_width
- 1)))
10308 const_op
= 0, op1
= const0_rtx
;
10316 /* unsigned > 0 is equivalent to != 0 */
10320 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10321 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
10322 && (const_op
== ((HOST_WIDE_INT
) 1 << (mode_width
- 1)) - 1))
10324 const_op
= 0, op1
= const0_rtx
;
10333 /* Compute some predicates to simplify code below. */
10335 equality_comparison_p
= (code
== EQ
|| code
== NE
);
10336 sign_bit_comparison_p
= ((code
== LT
|| code
== GE
) && const_op
== 0);
10337 unsigned_comparison_p
= (code
== LTU
|| code
== LEU
|| code
== GTU
10340 /* If this is a sign bit comparison and we can do arithmetic in
10341 MODE, say that we will only be needing the sign bit of OP0. */
10342 if (sign_bit_comparison_p
10343 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10344 op0
= force_to_mode (op0
, mode
,
10346 << (GET_MODE_BITSIZE (mode
) - 1)),
10349 /* Now try cases based on the opcode of OP0. If none of the cases
10350 does a "continue", we exit this loop immediately after the
10353 switch (GET_CODE (op0
))
10356 /* If we are extracting a single bit from a variable position in
10357 a constant that has only a single bit set and are comparing it
10358 with zero, we can convert this into an equality comparison
10359 between the position and the location of the single bit. */
10360 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10361 have already reduced the shift count modulo the word size. */
10362 if (!SHIFT_COUNT_TRUNCATED
10363 && GET_CODE (XEXP (op0
, 0)) == CONST_INT
10364 && XEXP (op0
, 1) == const1_rtx
10365 && equality_comparison_p
&& const_op
== 0
10366 && (i
= exact_log2 (INTVAL (XEXP (op0
, 0)))) >= 0)
10368 if (BITS_BIG_ENDIAN
)
10370 enum machine_mode new_mode
10371 = mode_for_extraction (EP_extzv
, 1);
10372 if (new_mode
== MAX_MACHINE_MODE
)
10373 i
= BITS_PER_WORD
- 1 - i
;
10377 i
= (GET_MODE_BITSIZE (mode
) - 1 - i
);
10381 op0
= XEXP (op0
, 2);
10385 /* Result is nonzero iff shift count is equal to I. */
10386 code
= reverse_condition (code
);
10390 /* ... fall through ... */
10393 tem
= expand_compound_operation (op0
);
10402 /* If testing for equality, we can take the NOT of the constant. */
10403 if (equality_comparison_p
10404 && (tem
= simplify_unary_operation (NOT
, mode
, op1
, mode
)) != 0)
10406 op0
= XEXP (op0
, 0);
10411 /* If just looking at the sign bit, reverse the sense of the
10413 if (sign_bit_comparison_p
)
10415 op0
= XEXP (op0
, 0);
10416 code
= (code
== GE
? LT
: GE
);
10422 /* If testing for equality, we can take the NEG of the constant. */
10423 if (equality_comparison_p
10424 && (tem
= simplify_unary_operation (NEG
, mode
, op1
, mode
)) != 0)
10426 op0
= XEXP (op0
, 0);
10431 /* The remaining cases only apply to comparisons with zero. */
10435 /* When X is ABS or is known positive,
10436 (neg X) is < 0 if and only if X != 0. */
10438 if (sign_bit_comparison_p
10439 && (GET_CODE (XEXP (op0
, 0)) == ABS
10440 || (mode_width
<= HOST_BITS_PER_WIDE_INT
10441 && (nonzero_bits (XEXP (op0
, 0), mode
)
10442 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)))
10444 op0
= XEXP (op0
, 0);
10445 code
= (code
== LT
? NE
: EQ
);
10449 /* If we have NEG of something whose two high-order bits are the
10450 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10451 if (num_sign_bit_copies (op0
, mode
) >= 2)
10453 op0
= XEXP (op0
, 0);
10454 code
= swap_condition (code
);
10460 /* If we are testing equality and our count is a constant, we
10461 can perform the inverse operation on our RHS. */
10462 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
10463 && (tem
= simplify_binary_operation (ROTATERT
, mode
,
10464 op1
, XEXP (op0
, 1))) != 0)
10466 op0
= XEXP (op0
, 0);
10471 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10472 a particular bit. Convert it to an AND of a constant of that
10473 bit. This will be converted into a ZERO_EXTRACT. */
10474 if (const_op
== 0 && sign_bit_comparison_p
10475 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10476 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
10478 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
10481 - INTVAL (XEXP (op0
, 1)))));
10482 code
= (code
== LT
? NE
: EQ
);
10486 /* Fall through. */
10489 /* ABS is ignorable inside an equality comparison with zero. */
10490 if (const_op
== 0 && equality_comparison_p
)
10492 op0
= XEXP (op0
, 0);
10498 /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10499 (compare FOO CONST) if CONST fits in FOO's mode and we
10500 are either testing inequality or have an unsigned
10501 comparison with ZERO_EXTEND or a signed comparison with
10502 SIGN_EXTEND. But don't do it if we don't have a compare
10503 insn of the given mode, since we'd have to revert it
10504 later on, and then we wouldn't know whether to sign- or
10506 mode
= GET_MODE (XEXP (op0
, 0));
10507 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
10508 && ! unsigned_comparison_p
10509 && (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10510 && ((unsigned HOST_WIDE_INT
) const_op
10511 < (((unsigned HOST_WIDE_INT
) 1
10512 << (GET_MODE_BITSIZE (mode
) - 1))))
10513 && optab_handler (cmp_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
10515 op0
= XEXP (op0
, 0);
10521 /* Check for the case where we are comparing A - C1 with C2, that is
10523 (subreg:MODE (plus (A) (-C1))) op (C2)
10525 with C1 a constant, and try to lift the SUBREG, i.e. to do the
10526 comparison in the wider mode. One of the following two conditions
10527 must be true in order for this to be valid:
10529 1. The mode extension results in the same bit pattern being added
10530 on both sides and the comparison is equality or unsigned. As
10531 C2 has been truncated to fit in MODE, the pattern can only be
10534 2. The mode extension results in the sign bit being copied on
10537 The difficulty here is that we have predicates for A but not for
10538 (A - C1) so we need to check that C1 is within proper bounds so
10539 as to perturbate A as little as possible. */
10541 if (mode_width
<= HOST_BITS_PER_WIDE_INT
10542 && subreg_lowpart_p (op0
)
10543 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) > mode_width
10544 && GET_CODE (SUBREG_REG (op0
)) == PLUS
10545 && GET_CODE (XEXP (SUBREG_REG (op0
), 1)) == CONST_INT
)
10547 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
10548 rtx a
= XEXP (SUBREG_REG (op0
), 0);
10549 HOST_WIDE_INT c1
= -INTVAL (XEXP (SUBREG_REG (op0
), 1));
10552 && (unsigned HOST_WIDE_INT
) c1
10553 < (unsigned HOST_WIDE_INT
) 1 << (mode_width
- 1)
10554 && (equality_comparison_p
|| unsigned_comparison_p
)
10555 /* (A - C1) zero-extends if it is positive and sign-extends
10556 if it is negative, C2 both zero- and sign-extends. */
10557 && ((0 == (nonzero_bits (a
, inner_mode
)
10558 & ~GET_MODE_MASK (mode
))
10560 /* (A - C1) sign-extends if it is positive and 1-extends
10561 if it is negative, C2 both sign- and 1-extends. */
10562 || (num_sign_bit_copies (a
, inner_mode
)
10563 > (unsigned int) (GET_MODE_BITSIZE (inner_mode
)
10566 || ((unsigned HOST_WIDE_INT
) c1
10567 < (unsigned HOST_WIDE_INT
) 1 << (mode_width
- 2)
10568 /* (A - C1) always sign-extends, like C2. */
10569 && num_sign_bit_copies (a
, inner_mode
)
10570 > (unsigned int) (GET_MODE_BITSIZE (inner_mode
)
10571 - (mode_width
- 1))))
10573 op0
= SUBREG_REG (op0
);
10578 /* If the inner mode is narrower and we are extracting the low part,
10579 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10580 if (subreg_lowpart_p (op0
)
10581 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) < mode_width
)
10582 /* Fall through */ ;
10586 /* ... fall through ... */
10589 mode
= GET_MODE (XEXP (op0
, 0));
10590 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
10591 && (unsigned_comparison_p
|| equality_comparison_p
)
10592 && (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10593 && ((unsigned HOST_WIDE_INT
) const_op
< GET_MODE_MASK (mode
))
10594 && optab_handler (cmp_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
10596 op0
= XEXP (op0
, 0);
10602 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10603 this for equality comparisons due to pathological cases involving
10605 if (equality_comparison_p
10606 && 0 != (tem
= simplify_binary_operation (MINUS
, mode
,
10607 op1
, XEXP (op0
, 1))))
10609 op0
= XEXP (op0
, 0);
10614 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10615 if (const_op
== 0 && XEXP (op0
, 1) == constm1_rtx
10616 && GET_CODE (XEXP (op0
, 0)) == ABS
&& sign_bit_comparison_p
)
10618 op0
= XEXP (XEXP (op0
, 0), 0);
10619 code
= (code
== LT
? EQ
: NE
);
10625 /* We used to optimize signed comparisons against zero, but that
10626 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10627 arrive here as equality comparisons, or (GEU, LTU) are
10628 optimized away. No need to special-case them. */
10630 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10631 (eq B (minus A C)), whichever simplifies. We can only do
10632 this for equality comparisons due to pathological cases involving
10634 if (equality_comparison_p
10635 && 0 != (tem
= simplify_binary_operation (PLUS
, mode
,
10636 XEXP (op0
, 1), op1
)))
10638 op0
= XEXP (op0
, 0);
10643 if (equality_comparison_p
10644 && 0 != (tem
= simplify_binary_operation (MINUS
, mode
,
10645 XEXP (op0
, 0), op1
)))
10647 op0
= XEXP (op0
, 1);
10652 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10653 of bits in X minus 1, is one iff X > 0. */
10654 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
10655 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
10656 && (unsigned HOST_WIDE_INT
) INTVAL (XEXP (XEXP (op0
, 0), 1))
10658 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
10660 op0
= XEXP (op0
, 1);
10661 code
= (code
== GE
? LE
: GT
);
10667 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10668 if C is zero or B is a constant. */
10669 if (equality_comparison_p
10670 && 0 != (tem
= simplify_binary_operation (XOR
, mode
,
10671 XEXP (op0
, 1), op1
)))
10673 op0
= XEXP (op0
, 0);
10680 case UNEQ
: case LTGT
:
10681 case LT
: case LTU
: case UNLT
: case LE
: case LEU
: case UNLE
:
10682 case GT
: case GTU
: case UNGT
: case GE
: case GEU
: case UNGE
:
10683 case UNORDERED
: case ORDERED
:
10684 /* We can't do anything if OP0 is a condition code value, rather
10685 than an actual data value. */
10687 || CC0_P (XEXP (op0
, 0))
10688 || GET_MODE_CLASS (GET_MODE (XEXP (op0
, 0))) == MODE_CC
)
10691 /* Get the two operands being compared. */
10692 if (GET_CODE (XEXP (op0
, 0)) == COMPARE
)
10693 tem
= XEXP (XEXP (op0
, 0), 0), tem1
= XEXP (XEXP (op0
, 0), 1);
10695 tem
= XEXP (op0
, 0), tem1
= XEXP (op0
, 1);
10697 /* Check for the cases where we simply want the result of the
10698 earlier test or the opposite of that result. */
10699 if (code
== NE
|| code
== EQ
10700 || (GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
10701 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10702 && (STORE_FLAG_VALUE
10703 & (((HOST_WIDE_INT
) 1
10704 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
10705 && (code
== LT
|| code
== GE
)))
10707 enum rtx_code new_code
;
10708 if (code
== LT
|| code
== NE
)
10709 new_code
= GET_CODE (op0
);
10711 new_code
= reversed_comparison_code (op0
, NULL
);
10713 if (new_code
!= UNKNOWN
)
10724 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10726 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == PLUS
10727 && XEXP (XEXP (op0
, 0), 1) == constm1_rtx
10728 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
10730 op0
= XEXP (op0
, 1);
10731 code
= (code
== GE
? GT
: LE
);
10737 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10738 will be converted to a ZERO_EXTRACT later. */
10739 if (const_op
== 0 && equality_comparison_p
10740 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
10741 && XEXP (XEXP (op0
, 0), 0) == const1_rtx
)
10743 op0
= simplify_and_const_int
10744 (NULL_RTX
, mode
, gen_rtx_LSHIFTRT (mode
,
10746 XEXP (XEXP (op0
, 0), 1)),
10747 (HOST_WIDE_INT
) 1);
10751 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10752 zero and X is a comparison and C1 and C2 describe only bits set
10753 in STORE_FLAG_VALUE, we can compare with X. */
10754 if (const_op
== 0 && equality_comparison_p
10755 && mode_width
<= HOST_BITS_PER_WIDE_INT
10756 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10757 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
10758 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
10759 && INTVAL (XEXP (XEXP (op0
, 0), 1)) >= 0
10760 && INTVAL (XEXP (XEXP (op0
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
10762 mask
= ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
10763 << INTVAL (XEXP (XEXP (op0
, 0), 1)));
10764 if ((~STORE_FLAG_VALUE
& mask
) == 0
10765 && (COMPARISON_P (XEXP (XEXP (op0
, 0), 0))
10766 || ((tem
= get_last_value (XEXP (XEXP (op0
, 0), 0))) != 0
10767 && COMPARISON_P (tem
))))
10769 op0
= XEXP (XEXP (op0
, 0), 0);
10774 /* If we are doing an equality comparison of an AND of a bit equal
10775 to the sign bit, replace this with a LT or GE comparison of
10776 the underlying value. */
10777 if (equality_comparison_p
10779 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10780 && mode_width
<= HOST_BITS_PER_WIDE_INT
10781 && ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
10782 == (unsigned HOST_WIDE_INT
) 1 << (mode_width
- 1)))
10784 op0
= XEXP (op0
, 0);
10785 code
= (code
== EQ
? GE
: LT
);
10789 /* If this AND operation is really a ZERO_EXTEND from a narrower
10790 mode, the constant fits within that mode, and this is either an
10791 equality or unsigned comparison, try to do this comparison in
10796 (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
10797 -> (ne:DI (reg:SI 4) (const_int 0))
10799 unless TRULY_NOOP_TRUNCATION allows it or the register is
10800 known to hold a value of the required mode the
10801 transformation is invalid. */
10802 if ((equality_comparison_p
|| unsigned_comparison_p
)
10803 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10804 && (i
= exact_log2 ((INTVAL (XEXP (op0
, 1))
10805 & GET_MODE_MASK (mode
))
10807 && const_op
>> i
== 0
10808 && (tmode
= mode_for_size (i
, MODE_INT
, 1)) != BLKmode
10809 && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode
),
10810 GET_MODE_BITSIZE (GET_MODE (op0
)))
10811 || (REG_P (XEXP (op0
, 0))
10812 && reg_truncated_to_mode (tmode
, XEXP (op0
, 0)))))
10814 op0
= gen_lowpart (tmode
, XEXP (op0
, 0));
10818 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10819 fits in both M1 and M2 and the SUBREG is either paradoxical
10820 or represents the low part, permute the SUBREG and the AND
10822 if (GET_CODE (XEXP (op0
, 0)) == SUBREG
)
10824 unsigned HOST_WIDE_INT c1
;
10825 tmode
= GET_MODE (SUBREG_REG (XEXP (op0
, 0)));
10826 /* Require an integral mode, to avoid creating something like
10828 if (SCALAR_INT_MODE_P (tmode
)
10829 /* It is unsafe to commute the AND into the SUBREG if the
10830 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10831 not defined. As originally written the upper bits
10832 have a defined value due to the AND operation.
10833 However, if we commute the AND inside the SUBREG then
10834 they no longer have defined values and the meaning of
10835 the code has been changed. */
10837 #ifdef WORD_REGISTER_OPERATIONS
10838 || (mode_width
> GET_MODE_BITSIZE (tmode
)
10839 && mode_width
<= BITS_PER_WORD
)
10841 || (mode_width
<= GET_MODE_BITSIZE (tmode
)
10842 && subreg_lowpart_p (XEXP (op0
, 0))))
10843 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10844 && mode_width
<= HOST_BITS_PER_WIDE_INT
10845 && GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
10846 && ((c1
= INTVAL (XEXP (op0
, 1))) & ~mask
) == 0
10847 && (c1
& ~GET_MODE_MASK (tmode
)) == 0
10849 && c1
!= GET_MODE_MASK (tmode
))
10851 op0
= simplify_gen_binary (AND
, tmode
,
10852 SUBREG_REG (XEXP (op0
, 0)),
10853 gen_int_mode (c1
, tmode
));
10854 op0
= gen_lowpart (mode
, op0
);
10859 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
10860 if (const_op
== 0 && equality_comparison_p
10861 && XEXP (op0
, 1) == const1_rtx
10862 && GET_CODE (XEXP (op0
, 0)) == NOT
)
10864 op0
= simplify_and_const_int
10865 (NULL_RTX
, mode
, XEXP (XEXP (op0
, 0), 0), (HOST_WIDE_INT
) 1);
10866 code
= (code
== NE
? EQ
: NE
);
10870 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10871 (eq (and (lshiftrt X) 1) 0).
10872 Also handle the case where (not X) is expressed using xor. */
10873 if (const_op
== 0 && equality_comparison_p
10874 && XEXP (op0
, 1) == const1_rtx
10875 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
)
10877 rtx shift_op
= XEXP (XEXP (op0
, 0), 0);
10878 rtx shift_count
= XEXP (XEXP (op0
, 0), 1);
10880 if (GET_CODE (shift_op
) == NOT
10881 || (GET_CODE (shift_op
) == XOR
10882 && GET_CODE (XEXP (shift_op
, 1)) == CONST_INT
10883 && GET_CODE (shift_count
) == CONST_INT
10884 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
10885 && (INTVAL (XEXP (shift_op
, 1))
10886 == (HOST_WIDE_INT
) 1 << INTVAL (shift_count
))))
10888 op0
= simplify_and_const_int
10890 gen_rtx_LSHIFTRT (mode
, XEXP (shift_op
, 0), shift_count
),
10891 (HOST_WIDE_INT
) 1);
10892 code
= (code
== NE
? EQ
: NE
);
10899 /* If we have (compare (ashift FOO N) (const_int C)) and
10900 the high order N bits of FOO (N+1 if an inequality comparison)
10901 are known to be zero, we can do this by comparing FOO with C
10902 shifted right N bits so long as the low-order N bits of C are
10904 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
10905 && INTVAL (XEXP (op0
, 1)) >= 0
10906 && ((INTVAL (XEXP (op0
, 1)) + ! equality_comparison_p
)
10907 < HOST_BITS_PER_WIDE_INT
)
10909 & (((HOST_WIDE_INT
) 1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0)
10910 && mode_width
<= HOST_BITS_PER_WIDE_INT
10911 && (nonzero_bits (XEXP (op0
, 0), mode
)
10912 & ~(mask
>> (INTVAL (XEXP (op0
, 1))
10913 + ! equality_comparison_p
))) == 0)
10915 /* We must perform a logical shift, not an arithmetic one,
10916 as we want the top N bits of C to be zero. */
10917 unsigned HOST_WIDE_INT temp
= const_op
& GET_MODE_MASK (mode
);
10919 temp
>>= INTVAL (XEXP (op0
, 1));
10920 op1
= gen_int_mode (temp
, mode
);
10921 op0
= XEXP (op0
, 0);
10925 /* If we are doing a sign bit comparison, it means we are testing
10926 a particular bit. Convert it to the appropriate AND. */
10927 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
10928 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
10930 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
10933 - INTVAL (XEXP (op0
, 1)))));
10934 code
= (code
== LT
? NE
: EQ
);
10938 /* If this an equality comparison with zero and we are shifting
10939 the low bit to the sign bit, we can convert this to an AND of the
10941 if (const_op
== 0 && equality_comparison_p
10942 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10943 && (unsigned HOST_WIDE_INT
) INTVAL (XEXP (op0
, 1))
10946 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
10947 (HOST_WIDE_INT
) 1);
10953 /* If this is an equality comparison with zero, we can do this
10954 as a logical shift, which might be much simpler. */
10955 if (equality_comparison_p
&& const_op
== 0
10956 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
10958 op0
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
,
10960 INTVAL (XEXP (op0
, 1)));
10964 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10965 do the comparison in a narrower mode. */
10966 if (! unsigned_comparison_p
10967 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10968 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
10969 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
10970 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
10971 MODE_INT
, 1)) != BLKmode
10972 && (((unsigned HOST_WIDE_INT
) const_op
10973 + (GET_MODE_MASK (tmode
) >> 1) + 1)
10974 <= GET_MODE_MASK (tmode
)))
10976 op0
= gen_lowpart (tmode
, XEXP (XEXP (op0
, 0), 0));
10980 /* Likewise if OP0 is a PLUS of a sign extension with a
10981 constant, which is usually represented with the PLUS
10982 between the shifts. */
10983 if (! unsigned_comparison_p
10984 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10985 && GET_CODE (XEXP (op0
, 0)) == PLUS
10986 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
10987 && GET_CODE (XEXP (XEXP (op0
, 0), 0)) == ASHIFT
10988 && XEXP (op0
, 1) == XEXP (XEXP (XEXP (op0
, 0), 0), 1)
10989 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
10990 MODE_INT
, 1)) != BLKmode
10991 && (((unsigned HOST_WIDE_INT
) const_op
10992 + (GET_MODE_MASK (tmode
) >> 1) + 1)
10993 <= GET_MODE_MASK (tmode
)))
10995 rtx inner
= XEXP (XEXP (XEXP (op0
, 0), 0), 0);
10996 rtx add_const
= XEXP (XEXP (op0
, 0), 1);
10997 rtx new_const
= simplify_gen_binary (ASHIFTRT
, GET_MODE (op0
),
10998 add_const
, XEXP (op0
, 1));
11000 op0
= simplify_gen_binary (PLUS
, tmode
,
11001 gen_lowpart (tmode
, inner
),
11006 /* ... fall through ... */
11008 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11009 the low order N bits of FOO are known to be zero, we can do this
11010 by comparing FOO with C shifted left N bits so long as no
11011 overflow occurs. */
11012 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
11013 && INTVAL (XEXP (op0
, 1)) >= 0
11014 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
11015 && mode_width
<= HOST_BITS_PER_WIDE_INT
11016 && (nonzero_bits (XEXP (op0
, 0), mode
)
11017 & (((HOST_WIDE_INT
) 1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0
11018 && (((unsigned HOST_WIDE_INT
) const_op
11019 + (GET_CODE (op0
) != LSHIFTRT
11020 ? ((GET_MODE_MASK (mode
) >> INTVAL (XEXP (op0
, 1)) >> 1)
11023 <= GET_MODE_MASK (mode
) >> INTVAL (XEXP (op0
, 1))))
11025 /* If the shift was logical, then we must make the condition
11027 if (GET_CODE (op0
) == LSHIFTRT
)
11028 code
= unsigned_condition (code
);
11030 const_op
<<= INTVAL (XEXP (op0
, 1));
11031 op1
= GEN_INT (const_op
);
11032 op0
= XEXP (op0
, 0);
11036 /* If we are using this shift to extract just the sign bit, we
11037 can replace this with an LT or GE comparison. */
11039 && (equality_comparison_p
|| sign_bit_comparison_p
)
11040 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
11041 && (unsigned HOST_WIDE_INT
) INTVAL (XEXP (op0
, 1))
11044 op0
= XEXP (op0
, 0);
11045 code
= (code
== NE
|| code
== GT
? LT
: GE
);
11057 /* Now make any compound operations involved in this comparison. Then,
11058 check for an outmost SUBREG on OP0 that is not doing anything or is
11059 paradoxical. The latter transformation must only be performed when
11060 it is known that the "extra" bits will be the same in op0 and op1 or
11061 that they don't matter. There are three cases to consider:
11063 1. SUBREG_REG (op0) is a register. In this case the bits are don't
11064 care bits and we can assume they have any convenient value. So
11065 making the transformation is safe.
11067 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11068 In this case the upper bits of op0 are undefined. We should not make
11069 the simplification in that case as we do not know the contents of
11072 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11073 UNKNOWN. In that case we know those bits are zeros or ones. We must
11074 also be sure that they are the same as the upper bits of op1.
11076 We can never remove a SUBREG for a non-equality comparison because
11077 the sign bit is in a different place in the underlying object. */
11079 op0
= make_compound_operation (op0
, op1
== const0_rtx
? COMPARE
: SET
);
11080 op1
= make_compound_operation (op1
, SET
);
11082 if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
11083 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
11084 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0
))) == MODE_INT
11085 && (code
== NE
|| code
== EQ
))
11087 if (GET_MODE_SIZE (GET_MODE (op0
))
11088 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))
11090 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
11092 if (REG_P (SUBREG_REG (op0
)))
11094 op0
= SUBREG_REG (op0
);
11095 op1
= gen_lowpart (GET_MODE (op0
), op1
);
11098 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
)))
11099 <= HOST_BITS_PER_WIDE_INT
)
11100 && (nonzero_bits (SUBREG_REG (op0
),
11101 GET_MODE (SUBREG_REG (op0
)))
11102 & ~GET_MODE_MASK (GET_MODE (op0
))) == 0)
11104 tem
= gen_lowpart (GET_MODE (SUBREG_REG (op0
)), op1
);
11106 if ((nonzero_bits (tem
, GET_MODE (SUBREG_REG (op0
)))
11107 & ~GET_MODE_MASK (GET_MODE (op0
))) == 0)
11108 op0
= SUBREG_REG (op0
), op1
= tem
;
11112 /* We now do the opposite procedure: Some machines don't have compare
11113 insns in all modes. If OP0's mode is an integer mode smaller than a
11114 word and we can't do a compare in that mode, see if there is a larger
11115 mode for which we can do the compare. There are a number of cases in
11116 which we can use the wider mode. */
11118 mode
= GET_MODE (op0
);
11119 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
11120 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
11121 && ! have_insn_for (COMPARE
, mode
))
11122 for (tmode
= GET_MODE_WIDER_MODE (mode
);
11124 && GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
);
11125 tmode
= GET_MODE_WIDER_MODE (tmode
))
11126 if (have_insn_for (COMPARE
, tmode
))
11130 /* If the only nonzero bits in OP0 and OP1 are those in the
11131 narrower mode and this is an equality or unsigned comparison,
11132 we can use the wider mode. Similarly for sign-extended
11133 values, in which case it is true for all comparisons. */
11134 zero_extended
= ((code
== EQ
|| code
== NE
11135 || code
== GEU
|| code
== GTU
11136 || code
== LEU
|| code
== LTU
)
11137 && (nonzero_bits (op0
, tmode
)
11138 & ~GET_MODE_MASK (mode
)) == 0
11139 && ((GET_CODE (op1
) == CONST_INT
11140 || (nonzero_bits (op1
, tmode
)
11141 & ~GET_MODE_MASK (mode
)) == 0)));
11144 || ((num_sign_bit_copies (op0
, tmode
)
11145 > (unsigned int) (GET_MODE_BITSIZE (tmode
)
11146 - GET_MODE_BITSIZE (mode
)))
11147 && (num_sign_bit_copies (op1
, tmode
)
11148 > (unsigned int) (GET_MODE_BITSIZE (tmode
)
11149 - GET_MODE_BITSIZE (mode
)))))
11151 /* If OP0 is an AND and we don't have an AND in MODE either,
11152 make a new AND in the proper mode. */
11153 if (GET_CODE (op0
) == AND
11154 && !have_insn_for (AND
, mode
))
11155 op0
= simplify_gen_binary (AND
, tmode
,
11156 gen_lowpart (tmode
,
11158 gen_lowpart (tmode
,
11161 op0
= gen_lowpart (tmode
, op0
);
11162 if (zero_extended
&& GET_CODE (op1
) == CONST_INT
)
11163 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (mode
));
11164 op1
= gen_lowpart (tmode
, op1
);
11168 /* If this is a test for negative, we can make an explicit
11169 test of the sign bit. */
11171 if (op1
== const0_rtx
&& (code
== LT
|| code
== GE
)
11172 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
11174 op0
= simplify_gen_binary (AND
, tmode
,
11175 gen_lowpart (tmode
, op0
),
11176 GEN_INT ((HOST_WIDE_INT
) 1
11177 << (GET_MODE_BITSIZE (mode
)
11179 code
= (code
== LT
) ? NE
: EQ
;
11184 #ifdef CANONICALIZE_COMPARISON
11185 /* If this machine only supports a subset of valid comparisons, see if we
11186 can convert an unsupported one into a supported one. */
11187 CANONICALIZE_COMPARISON (code
, op0
, op1
);
11196 /* Utility function for record_value_for_reg. Count number of
11201 enum rtx_code code
= GET_CODE (x
);
11205 if (GET_RTX_CLASS (code
) == '2'
11206 || GET_RTX_CLASS (code
) == 'c')
11208 rtx x0
= XEXP (x
, 0);
11209 rtx x1
= XEXP (x
, 1);
11212 return 1 + 2 * count_rtxs (x0
);
11214 if ((GET_RTX_CLASS (GET_CODE (x1
)) == '2'
11215 || GET_RTX_CLASS (GET_CODE (x1
)) == 'c')
11216 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
11217 return 2 + 2 * count_rtxs (x0
)
11218 + count_rtxs (x
== XEXP (x1
, 0)
11219 ? XEXP (x1
, 1) : XEXP (x1
, 0));
11221 if ((GET_RTX_CLASS (GET_CODE (x0
)) == '2'
11222 || GET_RTX_CLASS (GET_CODE (x0
)) == 'c')
11223 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
11224 return 2 + 2 * count_rtxs (x1
)
11225 + count_rtxs (x
== XEXP (x0
, 0)
11226 ? XEXP (x0
, 1) : XEXP (x0
, 0));
11229 fmt
= GET_RTX_FORMAT (code
);
11230 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
11232 ret
+= count_rtxs (XEXP (x
, i
));
11237 /* Utility function for following routine. Called when X is part of a value
11238 being stored into last_set_value. Sets last_set_table_tick
11239 for each register mentioned. Similar to mention_regs in cse.c */
11242 update_table_tick (rtx x
)
11244 enum rtx_code code
= GET_CODE (x
);
11245 const char *fmt
= GET_RTX_FORMAT (code
);
11250 unsigned int regno
= REGNO (x
);
11251 unsigned int endregno
= END_REGNO (x
);
11254 for (r
= regno
; r
< endregno
; r
++)
11256 reg_stat_type
*rsp
= VEC_index (reg_stat_type
, reg_stat
, r
);
11257 rsp
->last_set_table_tick
= label_tick
;
11263 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
11264 /* Note that we can't have an "E" in values stored; see
11265 get_last_value_validate. */
11268 /* Check for identical subexpressions. If x contains
11269 identical subexpression we only have to traverse one of
11271 if (i
== 0 && ARITHMETIC_P (x
))
11273 /* Note that at this point x1 has already been
11275 rtx x0
= XEXP (x
, 0);
11276 rtx x1
= XEXP (x
, 1);
11278 /* If x0 and x1 are identical then there is no need to
11283 /* If x0 is identical to a subexpression of x1 then while
11284 processing x1, x0 has already been processed. Thus we
11285 are done with x. */
11286 if (ARITHMETIC_P (x1
)
11287 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
11290 /* If x1 is identical to a subexpression of x0 then we
11291 still have to process the rest of x0. */
11292 if (ARITHMETIC_P (x0
)
11293 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
11295 update_table_tick (XEXP (x0
, x1
== XEXP (x0
, 0) ? 1 : 0));
11300 update_table_tick (XEXP (x
, i
));
11304 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11305 are saying that the register is clobbered and we no longer know its
11306 value. If INSN is zero, don't update reg_stat[].last_set; this is
11307 only permitted with VALUE also zero and is used to invalidate the
11311 record_value_for_reg (rtx reg
, rtx insn
, rtx value
)
11313 unsigned int regno
= REGNO (reg
);
11314 unsigned int endregno
= END_REGNO (reg
);
11316 reg_stat_type
*rsp
;
11318 /* If VALUE contains REG and we have a previous value for REG, substitute
11319 the previous value. */
11320 if (value
&& insn
&& reg_overlap_mentioned_p (reg
, value
))
11324 /* Set things up so get_last_value is allowed to see anything set up to
11326 subst_low_luid
= DF_INSN_LUID (insn
);
11327 tem
= get_last_value (reg
);
11329 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11330 it isn't going to be useful and will take a lot of time to process,
11331 so just use the CLOBBER. */
11335 if (ARITHMETIC_P (tem
)
11336 && GET_CODE (XEXP (tem
, 0)) == CLOBBER
11337 && GET_CODE (XEXP (tem
, 1)) == CLOBBER
)
11338 tem
= XEXP (tem
, 0);
11339 else if (count_occurrences (value
, reg
, 1) >= 2)
11341 /* If there are two or more occurrences of REG in VALUE,
11342 prevent the value from growing too much. */
11343 if (count_rtxs (tem
) > MAX_LAST_VALUE_RTL
)
11344 tem
= gen_rtx_CLOBBER (GET_MODE (tem
), const0_rtx
);
11347 value
= replace_rtx (copy_rtx (value
), reg
, tem
);
11351 /* For each register modified, show we don't know its value, that
11352 we don't know about its bitwise content, that its value has been
11353 updated, and that we don't know the location of the death of the
11355 for (i
= regno
; i
< endregno
; i
++)
11357 rsp
= VEC_index (reg_stat_type
, reg_stat
, i
);
11360 rsp
->last_set
= insn
;
11362 rsp
->last_set_value
= 0;
11363 rsp
->last_set_mode
= 0;
11364 rsp
->last_set_nonzero_bits
= 0;
11365 rsp
->last_set_sign_bit_copies
= 0;
11366 rsp
->last_death
= 0;
11367 rsp
->truncated_to_mode
= 0;
11370 /* Mark registers that are being referenced in this value. */
11372 update_table_tick (value
);
11374 /* Now update the status of each register being set.
11375 If someone is using this register in this block, set this register
11376 to invalid since we will get confused between the two lives in this
11377 basic block. This makes using this register always invalid. In cse, we
11378 scan the table to invalidate all entries using this register, but this
11379 is too much work for us. */
11381 for (i
= regno
; i
< endregno
; i
++)
11383 rsp
= VEC_index (reg_stat_type
, reg_stat
, i
);
11384 rsp
->last_set_label
= label_tick
;
11386 || (value
&& rsp
->last_set_table_tick
>= label_tick_ebb_start
))
11387 rsp
->last_set_invalid
= 1;
11389 rsp
->last_set_invalid
= 0;
11392 /* The value being assigned might refer to X (like in "x++;"). In that
11393 case, we must replace it with (clobber (const_int 0)) to prevent
11395 rsp
= VEC_index (reg_stat_type
, reg_stat
, regno
);
11396 if (value
&& ! get_last_value_validate (&value
, insn
,
11397 rsp
->last_set_label
, 0))
11399 value
= copy_rtx (value
);
11400 if (! get_last_value_validate (&value
, insn
,
11401 rsp
->last_set_label
, 1))
11405 /* For the main register being modified, update the value, the mode, the
11406 nonzero bits, and the number of sign bit copies. */
11408 rsp
->last_set_value
= value
;
11412 enum machine_mode mode
= GET_MODE (reg
);
11413 subst_low_luid
= DF_INSN_LUID (insn
);
11414 rsp
->last_set_mode
= mode
;
11415 if (GET_MODE_CLASS (mode
) == MODE_INT
11416 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
11417 mode
= nonzero_bits_mode
;
11418 rsp
->last_set_nonzero_bits
= nonzero_bits (value
, mode
);
11419 rsp
->last_set_sign_bit_copies
11420 = num_sign_bit_copies (value
, GET_MODE (reg
));
11424 /* Called via note_stores from record_dead_and_set_regs to handle one
11425 SET or CLOBBER in an insn. DATA is the instruction in which the
11426 set is occurring. */
11429 record_dead_and_set_regs_1 (rtx dest
, const_rtx setter
, void *data
)
11431 rtx record_dead_insn
= (rtx
) data
;
11433 if (GET_CODE (dest
) == SUBREG
)
11434 dest
= SUBREG_REG (dest
);
11436 if (!record_dead_insn
)
11439 record_value_for_reg (dest
, NULL_RTX
, NULL_RTX
);
11445 /* If we are setting the whole register, we know its value. Otherwise
11446 show that we don't know the value. We can handle SUBREG in
11448 if (GET_CODE (setter
) == SET
&& dest
== SET_DEST (setter
))
11449 record_value_for_reg (dest
, record_dead_insn
, SET_SRC (setter
));
11450 else if (GET_CODE (setter
) == SET
11451 && GET_CODE (SET_DEST (setter
)) == SUBREG
11452 && SUBREG_REG (SET_DEST (setter
)) == dest
11453 && GET_MODE_BITSIZE (GET_MODE (dest
)) <= BITS_PER_WORD
11454 && subreg_lowpart_p (SET_DEST (setter
)))
11455 record_value_for_reg (dest
, record_dead_insn
,
11456 gen_lowpart (GET_MODE (dest
),
11457 SET_SRC (setter
)));
11459 record_value_for_reg (dest
, record_dead_insn
, NULL_RTX
);
11461 else if (MEM_P (dest
)
11462 /* Ignore pushes, they clobber nothing. */
11463 && ! push_operand (dest
, GET_MODE (dest
)))
11464 mem_last_set
= DF_INSN_LUID (record_dead_insn
);
11467 /* Update the records of when each REG was most recently set or killed
11468 for the things done by INSN. This is the last thing done in processing
11469 INSN in the combiner loop.
11471 We update reg_stat[], in particular fields last_set, last_set_value,
11472 last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11473 last_death, and also the similar information mem_last_set (which insn
11474 most recently modified memory) and last_call_luid (which insn was the
11475 most recent subroutine call). */
11478 record_dead_and_set_regs (rtx insn
)
11483 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
11485 if (REG_NOTE_KIND (link
) == REG_DEAD
11486 && REG_P (XEXP (link
, 0)))
11488 unsigned int regno
= REGNO (XEXP (link
, 0));
11489 unsigned int endregno
= END_REGNO (XEXP (link
, 0));
11491 for (i
= regno
; i
< endregno
; i
++)
11493 reg_stat_type
*rsp
;
11495 rsp
= VEC_index (reg_stat_type
, reg_stat
, i
);
11496 rsp
->last_death
= insn
;
11499 else if (REG_NOTE_KIND (link
) == REG_INC
)
11500 record_value_for_reg (XEXP (link
, 0), insn
, NULL_RTX
);
11505 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
11506 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
11508 reg_stat_type
*rsp
;
11510 rsp
= VEC_index (reg_stat_type
, reg_stat
, i
);
11511 rsp
->last_set_invalid
= 1;
11512 rsp
->last_set
= insn
;
11513 rsp
->last_set_value
= 0;
11514 rsp
->last_set_mode
= 0;
11515 rsp
->last_set_nonzero_bits
= 0;
11516 rsp
->last_set_sign_bit_copies
= 0;
11517 rsp
->last_death
= 0;
11518 rsp
->truncated_to_mode
= 0;
11521 last_call_luid
= mem_last_set
= DF_INSN_LUID (insn
);
11523 /* We can't combine into a call pattern. Remember, though, that
11524 the return value register is set at this LUID. We could
11525 still replace a register with the return value from the
11526 wrong subroutine call! */
11527 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
, NULL_RTX
);
11530 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
, insn
);
11533 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11534 register present in the SUBREG, so for each such SUBREG go back and
11535 adjust nonzero and sign bit information of the registers that are
11536 known to have some zero/sign bits set.
11538 This is needed because when combine blows the SUBREGs away, the
11539 information on zero/sign bits is lost and further combines can be
11540 missed because of that. */
11543 record_promoted_value (rtx insn
, rtx subreg
)
11546 unsigned int regno
= REGNO (SUBREG_REG (subreg
));
11547 enum machine_mode mode
= GET_MODE (subreg
);
11549 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
11552 for (links
= LOG_LINKS (insn
); links
;)
11554 reg_stat_type
*rsp
;
11556 insn
= XEXP (links
, 0);
11557 set
= single_set (insn
);
11559 if (! set
|| !REG_P (SET_DEST (set
))
11560 || REGNO (SET_DEST (set
)) != regno
11561 || GET_MODE (SET_DEST (set
)) != GET_MODE (SUBREG_REG (subreg
)))
11563 links
= XEXP (links
, 1);
11567 rsp
= VEC_index (reg_stat_type
, reg_stat
, regno
);
11568 if (rsp
->last_set
== insn
)
11570 if (SUBREG_PROMOTED_UNSIGNED_P (subreg
) > 0)
11571 rsp
->last_set_nonzero_bits
&= GET_MODE_MASK (mode
);
11574 if (REG_P (SET_SRC (set
)))
11576 regno
= REGNO (SET_SRC (set
));
11577 links
= LOG_LINKS (insn
);
11584 /* Check if X, a register, is known to contain a value already
11585 truncated to MODE. In this case we can use a subreg to refer to
11586 the truncated value even though in the generic case we would need
11587 an explicit truncation. */
11590 reg_truncated_to_mode (enum machine_mode mode
, const_rtx x
)
11592 reg_stat_type
*rsp
= VEC_index (reg_stat_type
, reg_stat
, REGNO (x
));
11593 enum machine_mode truncated
= rsp
->truncated_to_mode
;
11596 || rsp
->truncation_label
< label_tick_ebb_start
)
11598 if (GET_MODE_SIZE (truncated
) <= GET_MODE_SIZE (mode
))
11600 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
11601 GET_MODE_BITSIZE (truncated
)))
11606 /* X is a REG or a SUBREG. If X is some sort of a truncation record
11607 it. For non-TRULY_NOOP_TRUNCATION targets we might be able to turn
11608 a truncate into a subreg using this information. */
11611 record_truncated_value (rtx x
)
11613 enum machine_mode truncated_mode
;
11614 reg_stat_type
*rsp
;
11616 if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
)))
11618 enum machine_mode original_mode
= GET_MODE (SUBREG_REG (x
));
11619 truncated_mode
= GET_MODE (x
);
11621 if (GET_MODE_SIZE (original_mode
) <= GET_MODE_SIZE (truncated_mode
))
11624 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode
),
11625 GET_MODE_BITSIZE (original_mode
)))
11628 x
= SUBREG_REG (x
);
11630 /* ??? For hard-regs we now record everything. We might be able to
11631 optimize this using last_set_mode. */
11632 else if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
11633 truncated_mode
= GET_MODE (x
);
11637 rsp
= VEC_index (reg_stat_type
, reg_stat
, REGNO (x
));
11638 if (rsp
->truncated_to_mode
== 0
11639 || rsp
->truncation_label
< label_tick_ebb_start
11640 || (GET_MODE_SIZE (truncated_mode
)
11641 < GET_MODE_SIZE (rsp
->truncated_to_mode
)))
11643 rsp
->truncated_to_mode
= truncated_mode
;
11644 rsp
->truncation_label
= label_tick
;
11648 /* Scan X for promoted SUBREGs and truncated REGs. For each one
11649 found, note what it implies to the registers used in it. */
11652 check_conversions (rtx insn
, rtx x
)
11654 if (GET_CODE (x
) == SUBREG
|| REG_P (x
))
11656 if (GET_CODE (x
) == SUBREG
11657 && SUBREG_PROMOTED_VAR_P (x
)
11658 && REG_P (SUBREG_REG (x
)))
11659 record_promoted_value (insn
, x
);
11661 record_truncated_value (x
);
11665 const char *format
= GET_RTX_FORMAT (GET_CODE (x
));
11668 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
11672 check_conversions (insn
, XEXP (x
, i
));
11676 if (XVEC (x
, i
) != 0)
11677 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
11678 check_conversions (insn
, XVECEXP (x
, i
, j
));
11684 /* Utility routine for the following function. Verify that all the registers
11685 mentioned in *LOC are valid when *LOC was part of a value set when
11686 label_tick == TICK. Return 0 if some are not.
11688 If REPLACE is nonzero, replace the invalid reference with
11689 (clobber (const_int 0)) and return 1. This replacement is useful because
11690 we often can get useful information about the form of a value (e.g., if
11691 it was produced by a shift that always produces -1 or 0) even though
11692 we don't know exactly what registers it was produced from. */
11695 get_last_value_validate (rtx
*loc
, rtx insn
, int tick
, int replace
)
11698 const char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
11699 int len
= GET_RTX_LENGTH (GET_CODE (x
));
11704 unsigned int regno
= REGNO (x
);
11705 unsigned int endregno
= END_REGNO (x
);
11708 for (j
= regno
; j
< endregno
; j
++)
11710 reg_stat_type
*rsp
= VEC_index (reg_stat_type
, reg_stat
, j
);
11711 if (rsp
->last_set_invalid
11712 /* If this is a pseudo-register that was only set once and not
11713 live at the beginning of the function, it is always valid. */
11714 || (! (regno
>= FIRST_PSEUDO_REGISTER
11715 && REG_N_SETS (regno
) == 1
11716 && (!REGNO_REG_SET_P
11717 (DF_LR_IN (ENTRY_BLOCK_PTR
->next_bb
), regno
)))
11718 && rsp
->last_set_label
> tick
))
11721 *loc
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
11728 /* If this is a memory reference, make sure that there were
11729 no stores after it that might have clobbered the value. We don't
11730 have alias info, so we assume any store invalidates it. */
11731 else if (MEM_P (x
) && !MEM_READONLY_P (x
)
11732 && DF_INSN_LUID (insn
) <= mem_last_set
)
11735 *loc
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
11739 for (i
= 0; i
< len
; i
++)
11743 /* Check for identical subexpressions. If x contains
11744 identical subexpression we only have to traverse one of
11746 if (i
== 1 && ARITHMETIC_P (x
))
11748 /* Note that at this point x0 has already been checked
11749 and found valid. */
11750 rtx x0
= XEXP (x
, 0);
11751 rtx x1
= XEXP (x
, 1);
11753 /* If x0 and x1 are identical then x is also valid. */
11757 /* If x1 is identical to a subexpression of x0 then
11758 while checking x0, x1 has already been checked. Thus
11759 it is valid and so as x. */
11760 if (ARITHMETIC_P (x0
)
11761 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
11764 /* If x0 is identical to a subexpression of x1 then x is
11765 valid iff the rest of x1 is valid. */
11766 if (ARITHMETIC_P (x1
)
11767 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
11769 get_last_value_validate (&XEXP (x1
,
11770 x0
== XEXP (x1
, 0) ? 1 : 0),
11771 insn
, tick
, replace
);
11774 if (get_last_value_validate (&XEXP (x
, i
), insn
, tick
,
11778 /* Don't bother with these. They shouldn't occur anyway. */
11779 else if (fmt
[i
] == 'E')
11783 /* If we haven't found a reason for it to be invalid, it is valid. */
11787 /* Get the last value assigned to X, if known. Some registers
11788 in the value may be replaced with (clobber (const_int 0)) if their value
11789 is known longer known reliably. */
11792 get_last_value (const_rtx x
)
11794 unsigned int regno
;
11796 reg_stat_type
*rsp
;
11798 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11799 then convert it to the desired mode. If this is a paradoxical SUBREG,
11800 we cannot predict what values the "extra" bits might have. */
11801 if (GET_CODE (x
) == SUBREG
11802 && subreg_lowpart_p (x
)
11803 && (GET_MODE_SIZE (GET_MODE (x
))
11804 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
11805 && (value
= get_last_value (SUBREG_REG (x
))) != 0)
11806 return gen_lowpart (GET_MODE (x
), value
);
11812 rsp
= VEC_index (reg_stat_type
, reg_stat
, regno
);
11813 value
= rsp
->last_set_value
;
11815 /* If we don't have a value, or if it isn't for this basic block and
11816 it's either a hard register, set more than once, or it's a live
11817 at the beginning of the function, return 0.
11819 Because if it's not live at the beginning of the function then the reg
11820 is always set before being used (is never used without being set).
11821 And, if it's set only once, and it's always set before use, then all
11822 uses must have the same last value, even if it's not from this basic
11826 || (rsp
->last_set_label
< label_tick_ebb_start
11827 && (regno
< FIRST_PSEUDO_REGISTER
11828 || REG_N_SETS (regno
) != 1
11830 (DF_LR_IN (ENTRY_BLOCK_PTR
->next_bb
), regno
))))
11833 /* If the value was set in a later insn than the ones we are processing,
11834 we can't use it even if the register was only set once. */
11835 if (rsp
->last_set_label
== label_tick
11836 && DF_INSN_LUID (rsp
->last_set
) >= subst_low_luid
)
11839 /* If the value has all its registers valid, return it. */
11840 if (get_last_value_validate (&value
, rsp
->last_set
,
11841 rsp
->last_set_label
, 0))
11844 /* Otherwise, make a copy and replace any invalid register with
11845 (clobber (const_int 0)). If that fails for some reason, return 0. */
11847 value
= copy_rtx (value
);
11848 if (get_last_value_validate (&value
, rsp
->last_set
,
11849 rsp
->last_set_label
, 1))
11855 /* Return nonzero if expression X refers to a REG or to memory
11856 that is set in an instruction more recent than FROM_LUID. */
11859 use_crosses_set_p (const_rtx x
, int from_luid
)
11863 enum rtx_code code
= GET_CODE (x
);
11867 unsigned int regno
= REGNO (x
);
11868 unsigned endreg
= END_REGNO (x
);
11870 #ifdef PUSH_ROUNDING
11871 /* Don't allow uses of the stack pointer to be moved,
11872 because we don't know whether the move crosses a push insn. */
11873 if (regno
== STACK_POINTER_REGNUM
&& PUSH_ARGS
)
11876 for (; regno
< endreg
; regno
++)
11878 reg_stat_type
*rsp
= VEC_index (reg_stat_type
, reg_stat
, regno
);
11880 && rsp
->last_set_label
== label_tick
11881 && DF_INSN_LUID (rsp
->last_set
) > from_luid
)
11887 if (code
== MEM
&& mem_last_set
> from_luid
)
11890 fmt
= GET_RTX_FORMAT (code
);
11892 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
11897 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
11898 if (use_crosses_set_p (XVECEXP (x
, i
, j
), from_luid
))
11901 else if (fmt
[i
] == 'e'
11902 && use_crosses_set_p (XEXP (x
, i
), from_luid
))
11908 /* Define three variables used for communication between the following
11911 static unsigned int reg_dead_regno
, reg_dead_endregno
;
11912 static int reg_dead_flag
;
11914 /* Function called via note_stores from reg_dead_at_p.
11916 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11917 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11920 reg_dead_at_p_1 (rtx dest
, const_rtx x
, void *data ATTRIBUTE_UNUSED
)
11922 unsigned int regno
, endregno
;
11927 regno
= REGNO (dest
);
11928 endregno
= END_REGNO (dest
);
11929 if (reg_dead_endregno
> regno
&& reg_dead_regno
< endregno
)
11930 reg_dead_flag
= (GET_CODE (x
) == CLOBBER
) ? 1 : -1;
11933 /* Return nonzero if REG is known to be dead at INSN.
11935 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11936 referencing REG, it is dead. If we hit a SET referencing REG, it is
11937 live. Otherwise, see if it is live or dead at the start of the basic
11938 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11939 must be assumed to be always live. */
11942 reg_dead_at_p (rtx reg
, rtx insn
)
11947 /* Set variables for reg_dead_at_p_1. */
11948 reg_dead_regno
= REGNO (reg
);
11949 reg_dead_endregno
= END_REGNO (reg
);
11953 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
11954 we allow the machine description to decide whether use-and-clobber
11955 patterns are OK. */
11956 if (reg_dead_regno
< FIRST_PSEUDO_REGISTER
)
11958 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
11959 if (!fixed_regs
[i
] && TEST_HARD_REG_BIT (newpat_used_regs
, i
))
11963 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11964 beginning of function. */
11965 for (; insn
&& !LABEL_P (insn
) && !BARRIER_P (insn
);
11966 insn
= prev_nonnote_insn (insn
))
11968 note_stores (PATTERN (insn
), reg_dead_at_p_1
, NULL
);
11970 return reg_dead_flag
== 1 ? 1 : 0;
11972 if (find_regno_note (insn
, REG_DEAD
, reg_dead_regno
))
11976 /* Get the basic block that we were in. */
11978 block
= ENTRY_BLOCK_PTR
->next_bb
;
11981 FOR_EACH_BB (block
)
11982 if (insn
== BB_HEAD (block
))
11985 if (block
== EXIT_BLOCK_PTR
)
11989 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
11990 if (REGNO_REG_SET_P (df_get_live_in (block
), i
))
11996 /* Note hard registers in X that are used. */
11999 mark_used_regs_combine (rtx x
)
12001 RTX_CODE code
= GET_CODE (x
);
12002 unsigned int regno
;
12015 case ADDR_DIFF_VEC
:
12018 /* CC0 must die in the insn after it is set, so we don't need to take
12019 special note of it here. */
12025 /* If we are clobbering a MEM, mark any hard registers inside the
12026 address as used. */
12027 if (MEM_P (XEXP (x
, 0)))
12028 mark_used_regs_combine (XEXP (XEXP (x
, 0), 0));
12033 /* A hard reg in a wide mode may really be multiple registers.
12034 If so, mark all of them just like the first. */
12035 if (regno
< FIRST_PSEUDO_REGISTER
)
12037 /* None of this applies to the stack, frame or arg pointers. */
12038 if (regno
== STACK_POINTER_REGNUM
12039 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12040 || regno
== HARD_FRAME_POINTER_REGNUM
12042 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12043 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
12045 || regno
== FRAME_POINTER_REGNUM
)
12048 add_to_hard_reg_set (&newpat_used_regs
, GET_MODE (x
), regno
);
12054 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12056 rtx testreg
= SET_DEST (x
);
12058 while (GET_CODE (testreg
) == SUBREG
12059 || GET_CODE (testreg
) == ZERO_EXTRACT
12060 || GET_CODE (testreg
) == STRICT_LOW_PART
)
12061 testreg
= XEXP (testreg
, 0);
12063 if (MEM_P (testreg
))
12064 mark_used_regs_combine (XEXP (testreg
, 0));
12066 mark_used_regs_combine (SET_SRC (x
));
12074 /* Recursively scan the operands of this expression. */
12077 const char *fmt
= GET_RTX_FORMAT (code
);
12079 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
12082 mark_used_regs_combine (XEXP (x
, i
));
12083 else if (fmt
[i
] == 'E')
12087 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
12088 mark_used_regs_combine (XVECEXP (x
, i
, j
));
12094 /* Remove register number REGNO from the dead registers list of INSN.
12096 Return the note used to record the death, if there was one. */
12099 remove_death (unsigned int regno
, rtx insn
)
12101 rtx note
= find_regno_note (insn
, REG_DEAD
, regno
);
12104 remove_note (insn
, note
);
12109 /* For each register (hardware or pseudo) used within expression X, if its
12110 death is in an instruction with luid between FROM_LUID (inclusive) and
12111 TO_INSN (exclusive), put a REG_DEAD note for that register in the
12112 list headed by PNOTES.
12114 That said, don't move registers killed by maybe_kill_insn.
12116 This is done when X is being merged by combination into TO_INSN. These
12117 notes will then be distributed as needed. */
12120 move_deaths (rtx x
, rtx maybe_kill_insn
, int from_luid
, rtx to_insn
,
12125 enum rtx_code code
= GET_CODE (x
);
12129 unsigned int regno
= REGNO (x
);
12130 rtx where_dead
= VEC_index (reg_stat_type
, reg_stat
, regno
)->last_death
;
12132 /* Don't move the register if it gets killed in between from and to. */
12133 if (maybe_kill_insn
&& reg_set_p (x
, maybe_kill_insn
)
12134 && ! reg_referenced_p (x
, maybe_kill_insn
))
12138 && DF_INSN_LUID (where_dead
) >= from_luid
12139 && DF_INSN_LUID (where_dead
) < DF_INSN_LUID (to_insn
))
12141 rtx note
= remove_death (regno
, where_dead
);
12143 /* It is possible for the call above to return 0. This can occur
12144 when last_death points to I2 or I1 that we combined with.
12145 In that case make a new note.
12147 We must also check for the case where X is a hard register
12148 and NOTE is a death note for a range of hard registers
12149 including X. In that case, we must put REG_DEAD notes for
12150 the remaining registers in place of NOTE. */
12152 if (note
!= 0 && regno
< FIRST_PSEUDO_REGISTER
12153 && (GET_MODE_SIZE (GET_MODE (XEXP (note
, 0)))
12154 > GET_MODE_SIZE (GET_MODE (x
))))
12156 unsigned int deadregno
= REGNO (XEXP (note
, 0));
12157 unsigned int deadend
= END_HARD_REGNO (XEXP (note
, 0));
12158 unsigned int ourend
= END_HARD_REGNO (x
);
12161 for (i
= deadregno
; i
< deadend
; i
++)
12162 if (i
< regno
|| i
>= ourend
)
12163 REG_NOTES (where_dead
)
12164 = gen_rtx_EXPR_LIST (REG_DEAD
,
12166 REG_NOTES (where_dead
));
12169 /* If we didn't find any note, or if we found a REG_DEAD note that
12170 covers only part of the given reg, and we have a multi-reg hard
12171 register, then to be safe we must check for REG_DEAD notes
12172 for each register other than the first. They could have
12173 their own REG_DEAD notes lying around. */
12174 else if ((note
== 0
12176 && (GET_MODE_SIZE (GET_MODE (XEXP (note
, 0)))
12177 < GET_MODE_SIZE (GET_MODE (x
)))))
12178 && regno
< FIRST_PSEUDO_REGISTER
12179 && hard_regno_nregs
[regno
][GET_MODE (x
)] > 1)
12181 unsigned int ourend
= END_HARD_REGNO (x
);
12182 unsigned int i
, offset
;
12186 offset
= hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))];
12190 for (i
= regno
+ offset
; i
< ourend
; i
++)
12191 move_deaths (regno_reg_rtx
[i
],
12192 maybe_kill_insn
, from_luid
, to_insn
, &oldnotes
);
12195 if (note
!= 0 && GET_MODE (XEXP (note
, 0)) == GET_MODE (x
))
12197 XEXP (note
, 1) = *pnotes
;
12201 *pnotes
= gen_rtx_EXPR_LIST (REG_DEAD
, x
, *pnotes
);
12207 else if (GET_CODE (x
) == SET
)
12209 rtx dest
= SET_DEST (x
);
12211 move_deaths (SET_SRC (x
), maybe_kill_insn
, from_luid
, to_insn
, pnotes
);
12213 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12214 that accesses one word of a multi-word item, some
12215 piece of everything register in the expression is used by
12216 this insn, so remove any old death. */
12217 /* ??? So why do we test for equality of the sizes? */
12219 if (GET_CODE (dest
) == ZERO_EXTRACT
12220 || GET_CODE (dest
) == STRICT_LOW_PART
12221 || (GET_CODE (dest
) == SUBREG
12222 && (((GET_MODE_SIZE (GET_MODE (dest
))
12223 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
12224 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
12225 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))))
12227 move_deaths (dest
, maybe_kill_insn
, from_luid
, to_insn
, pnotes
);
12231 /* If this is some other SUBREG, we know it replaces the entire
12232 value, so use that as the destination. */
12233 if (GET_CODE (dest
) == SUBREG
)
12234 dest
= SUBREG_REG (dest
);
12236 /* If this is a MEM, adjust deaths of anything used in the address.
12237 For a REG (the only other possibility), the entire value is
12238 being replaced so the old value is not used in this insn. */
12241 move_deaths (XEXP (dest
, 0), maybe_kill_insn
, from_luid
,
12246 else if (GET_CODE (x
) == CLOBBER
)
12249 len
= GET_RTX_LENGTH (code
);
12250 fmt
= GET_RTX_FORMAT (code
);
12252 for (i
= 0; i
< len
; i
++)
12257 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
12258 move_deaths (XVECEXP (x
, i
, j
), maybe_kill_insn
, from_luid
,
12261 else if (fmt
[i
] == 'e')
12262 move_deaths (XEXP (x
, i
), maybe_kill_insn
, from_luid
, to_insn
, pnotes
);
12266 /* Return 1 if X is the target of a bit-field assignment in BODY, the
12267 pattern of an insn. X must be a REG. */
12270 reg_bitfield_target_p (rtx x
, rtx body
)
12274 if (GET_CODE (body
) == SET
)
12276 rtx dest
= SET_DEST (body
);
12278 unsigned int regno
, tregno
, endregno
, endtregno
;
12280 if (GET_CODE (dest
) == ZERO_EXTRACT
)
12281 target
= XEXP (dest
, 0);
12282 else if (GET_CODE (dest
) == STRICT_LOW_PART
)
12283 target
= SUBREG_REG (XEXP (dest
, 0));
12287 if (GET_CODE (target
) == SUBREG
)
12288 target
= SUBREG_REG (target
);
12290 if (!REG_P (target
))
12293 tregno
= REGNO (target
), regno
= REGNO (x
);
12294 if (tregno
>= FIRST_PSEUDO_REGISTER
|| regno
>= FIRST_PSEUDO_REGISTER
)
12295 return target
== x
;
12297 endtregno
= end_hard_regno (GET_MODE (target
), tregno
);
12298 endregno
= end_hard_regno (GET_MODE (x
), regno
);
12300 return endregno
> tregno
&& regno
< endtregno
;
12303 else if (GET_CODE (body
) == PARALLEL
)
12304 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
12305 if (reg_bitfield_target_p (x
, XVECEXP (body
, 0, i
)))
12311 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12312 as appropriate. I3 and I2 are the insns resulting from the combination
12313 insns including FROM (I2 may be zero).
12315 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12316 not need REG_DEAD notes because they are being substituted for. This
12317 saves searching in the most common cases.
12319 Each note in the list is either ignored or placed on some insns, depending
12320 on the type of note. */
12323 distribute_notes (rtx notes
, rtx from_insn
, rtx i3
, rtx i2
, rtx elim_i2
,
12326 rtx note
, next_note
;
12329 for (note
= notes
; note
; note
= next_note
)
12331 rtx place
= 0, place2
= 0;
12333 next_note
= XEXP (note
, 1);
12334 switch (REG_NOTE_KIND (note
))
12338 /* Doesn't matter much where we put this, as long as it's somewhere.
12339 It is preferable to keep these notes on branches, which is most
12340 likely to be i3. */
12344 case REG_VALUE_PROFILE
:
12345 /* Just get rid of this note, as it is unused later anyway. */
12348 case REG_NON_LOCAL_GOTO
:
12353 gcc_assert (i2
&& JUMP_P (i2
));
12358 case REG_EH_REGION
:
12359 /* These notes must remain with the call or trapping instruction. */
12362 else if (i2
&& CALL_P (i2
))
12366 gcc_assert (flag_non_call_exceptions
);
12367 if (may_trap_p (i3
))
12369 else if (i2
&& may_trap_p (i2
))
12371 /* ??? Otherwise assume we've combined things such that we
12372 can now prove that the instructions can't trap. Drop the
12373 note in this case. */
12379 /* These notes must remain with the call. It should not be
12380 possible for both I2 and I3 to be a call. */
12385 gcc_assert (i2
&& CALL_P (i2
));
12391 /* Any clobbers for i3 may still exist, and so we must process
12392 REG_UNUSED notes from that insn.
12394 Any clobbers from i2 or i1 can only exist if they were added by
12395 recog_for_combine. In that case, recog_for_combine created the
12396 necessary REG_UNUSED notes. Trying to keep any original
12397 REG_UNUSED notes from these insns can cause incorrect output
12398 if it is for the same register as the original i3 dest.
12399 In that case, we will notice that the register is set in i3,
12400 and then add a REG_UNUSED note for the destination of i3, which
12401 is wrong. However, it is possible to have REG_UNUSED notes from
12402 i2 or i1 for register which were both used and clobbered, so
12403 we keep notes from i2 or i1 if they will turn into REG_DEAD
12406 /* If this register is set or clobbered in I3, put the note there
12407 unless there is one already. */
12408 if (reg_set_p (XEXP (note
, 0), PATTERN (i3
)))
12410 if (from_insn
!= i3
)
12413 if (! (REG_P (XEXP (note
, 0))
12414 ? find_regno_note (i3
, REG_UNUSED
, REGNO (XEXP (note
, 0)))
12415 : find_reg_note (i3
, REG_UNUSED
, XEXP (note
, 0))))
12418 /* Otherwise, if this register is used by I3, then this register
12419 now dies here, so we must put a REG_DEAD note here unless there
12421 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
))
12422 && ! (REG_P (XEXP (note
, 0))
12423 ? find_regno_note (i3
, REG_DEAD
,
12424 REGNO (XEXP (note
, 0)))
12425 : find_reg_note (i3
, REG_DEAD
, XEXP (note
, 0))))
12427 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
12435 /* These notes say something about results of an insn. We can
12436 only support them if they used to be on I3 in which case they
12437 remain on I3. Otherwise they are ignored.
12439 If the note refers to an expression that is not a constant, we
12440 must also ignore the note since we cannot tell whether the
12441 equivalence is still true. It might be possible to do
12442 slightly better than this (we only have a problem if I2DEST
12443 or I1DEST is present in the expression), but it doesn't
12444 seem worth the trouble. */
12446 if (from_insn
== i3
12447 && (XEXP (note
, 0) == 0 || CONSTANT_P (XEXP (note
, 0))))
12452 case REG_NO_CONFLICT
:
12453 /* These notes say something about how a register is used. They must
12454 be present on any use of the register in I2 or I3. */
12455 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
)))
12458 if (i2
&& reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
)))
12467 case REG_LABEL_TARGET
:
12468 case REG_LABEL_OPERAND
:
12469 /* This can show up in several ways -- either directly in the
12470 pattern, or hidden off in the constant pool with (or without?)
12471 a REG_EQUAL note. */
12472 /* ??? Ignore the without-reg_equal-note problem for now. */
12473 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
))
12474 || ((tem
= find_reg_note (i3
, REG_EQUAL
, NULL_RTX
))
12475 && GET_CODE (XEXP (tem
, 0)) == LABEL_REF
12476 && XEXP (XEXP (tem
, 0), 0) == XEXP (note
, 0)))
12480 && (reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
))
12481 || ((tem
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
))
12482 && GET_CODE (XEXP (tem
, 0)) == LABEL_REF
12483 && XEXP (XEXP (tem
, 0), 0) == XEXP (note
, 0))))
12491 /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
12492 as a JUMP_LABEL or decrement LABEL_NUSES if it's already
12494 if (place
&& JUMP_P (place
)
12495 && REG_NOTE_KIND (note
) == REG_LABEL_TARGET
12496 && (JUMP_LABEL (place
) == NULL
12497 || JUMP_LABEL (place
) == XEXP (note
, 0)))
12499 rtx label
= JUMP_LABEL (place
);
12502 JUMP_LABEL (place
) = XEXP (note
, 0);
12503 else if (LABEL_P (label
))
12504 LABEL_NUSES (label
)--;
12507 if (place2
&& JUMP_P (place2
)
12508 && REG_NOTE_KIND (note
) == REG_LABEL_TARGET
12509 && (JUMP_LABEL (place2
) == NULL
12510 || JUMP_LABEL (place2
) == XEXP (note
, 0)))
12512 rtx label
= JUMP_LABEL (place2
);
12515 JUMP_LABEL (place2
) = XEXP (note
, 0);
12516 else if (LABEL_P (label
))
12517 LABEL_NUSES (label
)--;
12523 /* This note says something about the value of a register prior
12524 to the execution of an insn. It is too much trouble to see
12525 if the note is still correct in all situations. It is better
12526 to simply delete it. */
12530 /* If the insn previously containing this note still exists,
12531 put it back where it was. Otherwise move it to the previous
12532 insn. Adjust the corresponding REG_LIBCALL note. */
12533 if (!NOTE_P (from_insn
))
12537 tem
= find_reg_note (XEXP (note
, 0), REG_LIBCALL
, NULL_RTX
);
12538 place
= prev_real_insn (from_insn
);
12540 XEXP (tem
, 0) = place
;
12541 /* If we're deleting the last remaining instruction of a
12542 libcall sequence, don't add the notes. */
12543 else if (XEXP (note
, 0) == from_insn
)
12545 /* Don't add the dangling REG_RETVAL note. */
12552 /* This is handled similarly to REG_RETVAL. */
12553 if (!NOTE_P (from_insn
))
12557 tem
= find_reg_note (XEXP (note
, 0), REG_RETVAL
, NULL_RTX
);
12558 place
= next_real_insn (from_insn
);
12560 XEXP (tem
, 0) = place
;
12561 /* If we're deleting the last remaining instruction of a
12562 libcall sequence, don't add the notes. */
12563 else if (XEXP (note
, 0) == from_insn
)
12565 /* Don't add the dangling REG_LIBCALL note. */
12572 /* If we replaced the right hand side of FROM_INSN with a
12573 REG_EQUAL note, the original use of the dying register
12574 will not have been combined into I3 and I2. In such cases,
12575 FROM_INSN is guaranteed to be the first of the combined
12576 instructions, so we simply need to search back before
12577 FROM_INSN for the previous use or set of this register,
12578 then alter the notes there appropriately.
12580 If the register is used as an input in I3, it dies there.
12581 Similarly for I2, if it is nonzero and adjacent to I3.
12583 If the register is not used as an input in either I3 or I2
12584 and it is not one of the registers we were supposed to eliminate,
12585 there are two possibilities. We might have a non-adjacent I2
12586 or we might have somehow eliminated an additional register
12587 from a computation. For example, we might have had A & B where
12588 we discover that B will always be zero. In this case we will
12589 eliminate the reference to A.
12591 In both cases, we must search to see if we can find a previous
12592 use of A and put the death note there. */
12595 && from_insn
== i2mod
12596 && !reg_overlap_mentioned_p (XEXP (note
, 0), i2mod_new_rhs
))
12601 && CALL_P (from_insn
)
12602 && find_reg_fusage (from_insn
, USE
, XEXP (note
, 0)))
12604 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
)))
12606 else if (i2
!= 0 && next_nonnote_insn (i2
) == i3
12607 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
12609 else if ((rtx_equal_p (XEXP (note
, 0), elim_i2
)
12611 && reg_overlap_mentioned_p (XEXP (note
, 0),
12613 || rtx_equal_p (XEXP (note
, 0), elim_i1
))
12620 basic_block bb
= this_basic_block
;
12622 for (tem
= PREV_INSN (tem
); place
== 0; tem
= PREV_INSN (tem
))
12624 if (! INSN_P (tem
))
12626 if (tem
== BB_HEAD (bb
))
12631 /* If the register is being set at TEM, see if that is all
12632 TEM is doing. If so, delete TEM. Otherwise, make this
12633 into a REG_UNUSED note instead. Don't delete sets to
12634 global register vars. */
12635 if ((REGNO (XEXP (note
, 0)) >= FIRST_PSEUDO_REGISTER
12636 || !global_regs
[REGNO (XEXP (note
, 0))])
12637 && reg_set_p (XEXP (note
, 0), PATTERN (tem
)))
12639 rtx set
= single_set (tem
);
12640 rtx inner_dest
= 0;
12642 rtx cc0_setter
= NULL_RTX
;
12646 for (inner_dest
= SET_DEST (set
);
12647 (GET_CODE (inner_dest
) == STRICT_LOW_PART
12648 || GET_CODE (inner_dest
) == SUBREG
12649 || GET_CODE (inner_dest
) == ZERO_EXTRACT
);
12650 inner_dest
= XEXP (inner_dest
, 0))
12653 /* Verify that it was the set, and not a clobber that
12654 modified the register.
12656 CC0 targets must be careful to maintain setter/user
12657 pairs. If we cannot delete the setter due to side
12658 effects, mark the user with an UNUSED note instead
12661 if (set
!= 0 && ! side_effects_p (SET_SRC (set
))
12662 && rtx_equal_p (XEXP (note
, 0), inner_dest
)
12664 && (! reg_mentioned_p (cc0_rtx
, SET_SRC (set
))
12665 || ((cc0_setter
= prev_cc0_setter (tem
)) != NULL
12666 && sets_cc0_p (PATTERN (cc0_setter
)) > 0))
12670 /* Move the notes and links of TEM elsewhere.
12671 This might delete other dead insns recursively.
12672 First set the pattern to something that won't use
12674 rtx old_notes
= REG_NOTES (tem
);
12676 PATTERN (tem
) = pc_rtx
;
12677 REG_NOTES (tem
) = NULL
;
12679 distribute_notes (old_notes
, tem
, tem
, NULL_RTX
,
12680 NULL_RTX
, NULL_RTX
);
12681 distribute_links (LOG_LINKS (tem
));
12683 SET_INSN_DELETED (tem
);
12686 /* Delete the setter too. */
12689 PATTERN (cc0_setter
) = pc_rtx
;
12690 old_notes
= REG_NOTES (cc0_setter
);
12691 REG_NOTES (cc0_setter
) = NULL
;
12693 distribute_notes (old_notes
, cc0_setter
,
12694 cc0_setter
, NULL_RTX
,
12695 NULL_RTX
, NULL_RTX
);
12696 distribute_links (LOG_LINKS (cc0_setter
));
12698 SET_INSN_DELETED (cc0_setter
);
12704 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
12706 /* If there isn't already a REG_UNUSED note, put one
12707 here. Do not place a REG_DEAD note, even if
12708 the register is also used here; that would not
12709 match the algorithm used in lifetime analysis
12710 and can cause the consistency check in the
12711 scheduler to fail. */
12712 if (! find_regno_note (tem
, REG_UNUSED
,
12713 REGNO (XEXP (note
, 0))))
12718 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (tem
))
12720 && find_reg_fusage (tem
, USE
, XEXP (note
, 0))))
12724 /* If we are doing a 3->2 combination, and we have a
12725 register which formerly died in i3 and was not used
12726 by i2, which now no longer dies in i3 and is used in
12727 i2 but does not die in i2, and place is between i2
12728 and i3, then we may need to move a link from place to
12730 if (i2
&& DF_INSN_LUID (place
) > DF_INSN_LUID (i2
)
12732 && DF_INSN_LUID (from_insn
) > DF_INSN_LUID (i2
)
12733 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
12735 rtx links
= LOG_LINKS (place
);
12736 LOG_LINKS (place
) = 0;
12737 distribute_links (links
);
12742 if (tem
== BB_HEAD (bb
))
12748 /* If the register is set or already dead at PLACE, we needn't do
12749 anything with this note if it is still a REG_DEAD note.
12750 We check here if it is set at all, not if is it totally replaced,
12751 which is what `dead_or_set_p' checks, so also check for it being
12754 if (place
&& REG_NOTE_KIND (note
) == REG_DEAD
)
12756 unsigned int regno
= REGNO (XEXP (note
, 0));
12757 reg_stat_type
*rsp
= VEC_index (reg_stat_type
, reg_stat
, regno
);
12759 if (dead_or_set_p (place
, XEXP (note
, 0))
12760 || reg_bitfield_target_p (XEXP (note
, 0), PATTERN (place
)))
12762 /* Unless the register previously died in PLACE, clear
12763 last_death. [I no longer understand why this is
12765 if (rsp
->last_death
!= place
)
12766 rsp
->last_death
= 0;
12770 rsp
->last_death
= place
;
12772 /* If this is a death note for a hard reg that is occupying
12773 multiple registers, ensure that we are still using all
12774 parts of the object. If we find a piece of the object
12775 that is unused, we must arrange for an appropriate REG_DEAD
12776 note to be added for it. However, we can't just emit a USE
12777 and tag the note to it, since the register might actually
12778 be dead; so we recourse, and the recursive call then finds
12779 the previous insn that used this register. */
12781 if (place
&& regno
< FIRST_PSEUDO_REGISTER
12782 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] > 1)
12784 unsigned int endregno
= END_HARD_REGNO (XEXP (note
, 0));
12788 for (i
= regno
; i
< endregno
; i
++)
12789 if ((! refers_to_regno_p (i
, i
+ 1, PATTERN (place
), 0)
12790 && ! find_regno_fusage (place
, USE
, i
))
12791 || dead_or_set_regno_p (place
, i
))
12796 /* Put only REG_DEAD notes for pieces that are
12797 not already dead or set. */
12799 for (i
= regno
; i
< endregno
;
12800 i
+= hard_regno_nregs
[i
][reg_raw_mode
[i
]])
12802 rtx piece
= regno_reg_rtx
[i
];
12803 basic_block bb
= this_basic_block
;
12805 if (! dead_or_set_p (place
, piece
)
12806 && ! reg_bitfield_target_p (piece
,
12810 = gen_rtx_EXPR_LIST (REG_DEAD
, piece
, NULL_RTX
);
12812 distribute_notes (new_note
, place
, place
,
12813 NULL_RTX
, NULL_RTX
, NULL_RTX
);
12815 else if (! refers_to_regno_p (i
, i
+ 1,
12816 PATTERN (place
), 0)
12817 && ! find_regno_fusage (place
, USE
, i
))
12818 for (tem
= PREV_INSN (place
); ;
12819 tem
= PREV_INSN (tem
))
12821 if (! INSN_P (tem
))
12823 if (tem
== BB_HEAD (bb
))
12827 if (dead_or_set_p (tem
, piece
)
12828 || reg_bitfield_target_p (piece
,
12832 = gen_rtx_EXPR_LIST (REG_UNUSED
, piece
,
12847 /* Any other notes should not be present at this point in the
12849 gcc_unreachable ();
12854 XEXP (note
, 1) = REG_NOTES (place
);
12855 REG_NOTES (place
) = note
;
12860 = gen_rtx_fmt_ee (GET_CODE (note
), REG_NOTE_KIND (note
),
12861 XEXP (note
, 0), REG_NOTES (place2
));
12865 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12866 I3, I2, and I1 to new locations. This is also called to add a link
12867 pointing at I3 when I3's destination is changed. */
12870 distribute_links (rtx links
)
12872 rtx link
, next_link
;
12874 for (link
= links
; link
; link
= next_link
)
12880 next_link
= XEXP (link
, 1);
12882 /* If the insn that this link points to is a NOTE or isn't a single
12883 set, ignore it. In the latter case, it isn't clear what we
12884 can do other than ignore the link, since we can't tell which
12885 register it was for. Such links wouldn't be used by combine
12888 It is not possible for the destination of the target of the link to
12889 have been changed by combine. The only potential of this is if we
12890 replace I3, I2, and I1 by I3 and I2. But in that case the
12891 destination of I2 also remains unchanged. */
12893 if (NOTE_P (XEXP (link
, 0))
12894 || (set
= single_set (XEXP (link
, 0))) == 0)
12897 reg
= SET_DEST (set
);
12898 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
12899 || GET_CODE (reg
) == STRICT_LOW_PART
)
12900 reg
= XEXP (reg
, 0);
12902 /* A LOG_LINK is defined as being placed on the first insn that uses
12903 a register and points to the insn that sets the register. Start
12904 searching at the next insn after the target of the link and stop
12905 when we reach a set of the register or the end of the basic block.
12907 Note that this correctly handles the link that used to point from
12908 I3 to I2. Also note that not much searching is typically done here
12909 since most links don't point very far away. */
12911 for (insn
= NEXT_INSN (XEXP (link
, 0));
12912 (insn
&& (this_basic_block
->next_bb
== EXIT_BLOCK_PTR
12913 || BB_HEAD (this_basic_block
->next_bb
) != insn
));
12914 insn
= NEXT_INSN (insn
))
12915 if (INSN_P (insn
) && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
12917 if (reg_referenced_p (reg
, PATTERN (insn
)))
12921 else if (CALL_P (insn
)
12922 && find_reg_fusage (insn
, USE
, reg
))
12927 else if (INSN_P (insn
) && reg_set_p (reg
, insn
))
12930 /* If we found a place to put the link, place it there unless there
12931 is already a link to the same insn as LINK at that point. */
12937 for (link2
= LOG_LINKS (place
); link2
; link2
= XEXP (link2
, 1))
12938 if (XEXP (link2
, 0) == XEXP (link
, 0))
12943 XEXP (link
, 1) = LOG_LINKS (place
);
12944 LOG_LINKS (place
) = link
;
12946 /* Set added_links_insn to the earliest insn we added a
12948 if (added_links_insn
== 0
12949 || DF_INSN_LUID (added_links_insn
) > DF_INSN_LUID (place
))
12950 added_links_insn
= place
;
12956 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12957 Check whether the expression pointer to by LOC is a register or
12958 memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12959 Otherwise return zero. */
12962 unmentioned_reg_p_1 (rtx
*loc
, void *expr
)
12967 && (REG_P (x
) || MEM_P (x
))
12968 && ! reg_mentioned_p (x
, (rtx
) expr
))
12973 /* Check for any register or memory mentioned in EQUIV that is not
12974 mentioned in EXPR. This is used to restrict EQUIV to "specializations"
12975 of EXPR where some registers may have been replaced by constants. */
12978 unmentioned_reg_p (rtx equiv
, rtx expr
)
12980 return for_each_rtx (&equiv
, unmentioned_reg_p_1
, expr
);
12984 dump_combine_stats (FILE *file
)
12988 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12989 combine_attempts
, combine_merges
, combine_extras
, combine_successes
);
12993 dump_combine_total_stats (FILE *file
)
12997 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12998 total_attempts
, total_merges
, total_extras
, total_successes
);
13002 gate_handle_combine (void)
13004 return (optimize
> 0);
13007 /* Try combining insns through substitution. */
13008 static unsigned int
13009 rest_of_handle_combine (void)
13011 int rebuild_jump_labels_after_combine
;
13013 df_set_flags (DF_LR_RUN_DCE
+ DF_DEFER_INSN_RESCAN
);
13014 df_note_add_problem ();
13017 regstat_init_n_sets_and_refs ();
13019 rebuild_jump_labels_after_combine
13020 = combine_instructions (get_insns (), max_reg_num ());
13022 /* Combining insns may have turned an indirect jump into a
13023 direct jump. Rebuild the JUMP_LABEL fields of jumping
13025 if (rebuild_jump_labels_after_combine
)
13027 timevar_push (TV_JUMP
);
13028 rebuild_jump_labels (get_insns ());
13030 timevar_pop (TV_JUMP
);
13033 regstat_free_n_sets_and_refs ();
13037 struct tree_opt_pass pass_combine
=
13039 "combine", /* name */
13040 gate_handle_combine
, /* gate */
13041 rest_of_handle_combine
, /* execute */
13044 0, /* static_pass_number */
13045 TV_COMBINE
, /* tv_id */
13046 0, /* properties_required */
13047 0, /* properties_provided */
13048 0, /* properties_destroyed */
13049 0, /* todo_flags_start */
13051 TODO_df_finish
| TODO_verify_rtl_sharing
|
13052 TODO_ggc_collect
, /* todo_flags_finish */