1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_notes) when a
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
79 #include "coretypes.h"
86 #include "hard-reg-set.h"
87 #include "basic-block.h"
88 #include "insn-config.h"
90 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
92 #include "insn-attr.h"
98 #include "insn-codes.h"
99 #include "rtlhooks-def.h"
100 /* Include output.h for dump_file. */
104 #include "tree-pass.h"
106 /* Number of attempts to combine instructions in this function. */
108 static int combine_attempts
;
110 /* Number of attempts that got as far as substitution in this function. */
112 static int combine_merges
;
114 /* Number of instructions combined with added SETs in this function. */
116 static int combine_extras
;
118 /* Number of instructions combined in this function. */
120 static int combine_successes
;
122 /* Totals over entire compilation. */
124 static int total_attempts
, total_merges
, total_extras
, total_successes
;
126 /* Sometimes combine tries to replace the right hand side of an insn
127 with the value of a REG_EQUAL note. This is the insn that has been
128 so modified, or null if none. */
130 static rtx replaced_rhs_insn
;
132 /* When REPLACED_RHS_INSN is nonnull, this is a copy of the new right
135 static rtx replaced_rhs_value
;
137 /* Vector mapping INSN_UIDs to cuids.
138 The cuids are like uids but increase monotonically always.
139 Combine always uses cuids so that it can compare them.
140 But actually renumbering the uids, which we used to do,
141 proves to be a bad idea because it makes it hard to compare
142 the dumps produced by earlier passes with those from later passes. */
144 static int *uid_cuid
;
145 static int max_uid_cuid
;
147 /* Get the cuid of an insn. */
149 #define INSN_CUID(INSN) \
150 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
152 /* Maximum register number, which is the size of the tables below. */
154 static unsigned int combine_max_regno
;
157 /* Record last point of death of (hard or pseudo) register n. */
160 /* Record last point of modification of (hard or pseudo) register n. */
163 /* The next group of fields allows the recording of the last value assigned
164 to (hard or pseudo) register n. We use this information to see if an
165 operation being processed is redundant given a prior operation performed
166 on the register. For example, an `and' with a constant is redundant if
167 all the zero bits are already known to be turned off.
169 We use an approach similar to that used by cse, but change it in the
172 (1) We do not want to reinitialize at each label.
173 (2) It is useful, but not critical, to know the actual value assigned
174 to a register. Often just its form is helpful.
176 Therefore, we maintain the following fields:
178 last_set_value the last value assigned
179 last_set_label records the value of label_tick when the
180 register was assigned
181 last_set_table_tick records the value of label_tick when a
182 value using the register is assigned
183 last_set_invalid set to nonzero when it is not valid
184 to use the value of this register in some
187 To understand the usage of these tables, it is important to understand
188 the distinction between the value in last_set_value being valid and
189 the register being validly contained in some other expression in the
192 (The next two parameters are out of date).
194 reg_stat[i].last_set_value is valid if it is nonzero, and either
195 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
197 Register I may validly appear in any expression returned for the value
198 of another register if reg_n_sets[i] is 1. It may also appear in the
199 value for register J if reg_stat[j].last_set_invalid is zero, or
200 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
202 If an expression is found in the table containing a register which may
203 not validly appear in an expression, the register is replaced by
204 something that won't match, (clobber (const_int 0)). */
206 /* Record last value assigned to (hard or pseudo) register n. */
210 /* Record the value of label_tick when an expression involving register n
211 is placed in last_set_value. */
213 int last_set_table_tick
;
215 /* Record the value of label_tick when the value for register n is placed in
220 /* These fields are maintained in parallel with last_set_value and are
221 used to store the mode in which the register was last set, the bits
222 that were known to be zero when it was last set, and the number of
223 sign bits copies it was known to have when it was last set. */
225 unsigned HOST_WIDE_INT last_set_nonzero_bits
;
226 char last_set_sign_bit_copies
;
227 ENUM_BITFIELD(machine_mode
) last_set_mode
: 8;
229 /* Set nonzero if references to register n in expressions should not be
230 used. last_set_invalid is set nonzero when this register is being
231 assigned to and last_set_table_tick == label_tick. */
233 char last_set_invalid
;
235 /* Some registers that are set more than once and used in more than one
236 basic block are nevertheless always set in similar ways. For example,
237 a QImode register may be loaded from memory in two places on a machine
238 where byte loads zero extend.
240 We record in the following fields if a register has some leading bits
241 that are always equal to the sign bit, and what we know about the
242 nonzero bits of a register, specifically which bits are known to be
245 If an entry is zero, it means that we don't know anything special. */
247 unsigned char sign_bit_copies
;
249 unsigned HOST_WIDE_INT nonzero_bits
;
251 /* Record the value of the label_tick when the last truncation
252 happened. The field truncated_to_mode is only valid if
253 truncation_label == label_tick. */
255 int truncation_label
;
257 /* Record the last truncation seen for this register. If truncation
258 is not a nop to this mode we might be able to save an explicit
259 truncation if we know that value already contains a truncated
262 ENUM_BITFIELD(machine_mode
) truncated_to_mode
: 8;
265 static struct reg_stat
*reg_stat
;
267 /* Record the cuid of the last insn that invalidated memory
268 (anything that writes memory, and subroutine calls, but not pushes). */
270 static int mem_last_set
;
272 /* Record the cuid of the last CALL_INSN
273 so we can tell whether a potential combination crosses any calls. */
275 static int last_call_cuid
;
277 /* When `subst' is called, this is the insn that is being modified
278 (by combining in a previous insn). The PATTERN of this insn
279 is still the old pattern partially modified and it should not be
280 looked at, but this may be used to examine the successors of the insn
281 to judge whether a simplification is valid. */
283 static rtx subst_insn
;
285 /* This is the lowest CUID that `subst' is currently dealing with.
286 get_last_value will not return a value if the register was set at or
287 after this CUID. If not for this mechanism, we could get confused if
288 I2 or I1 in try_combine were an insn that used the old value of a register
289 to obtain a new value. In that case, we might erroneously get the
290 new value of the register when we wanted the old one. */
292 static int subst_low_cuid
;
294 /* This contains any hard registers that are used in newpat; reg_dead_at_p
295 must consider all these registers to be always live. */
297 static HARD_REG_SET newpat_used_regs
;
299 /* This is an insn to which a LOG_LINKS entry has been added. If this
300 insn is the earlier than I2 or I3, combine should rescan starting at
303 static rtx added_links_insn
;
305 /* Basic block in which we are performing combines. */
306 static basic_block this_basic_block
;
308 /* A bitmap indicating which blocks had registers go dead at entry.
309 After combine, we'll need to re-do global life analysis with
310 those blocks as starting points. */
311 static sbitmap refresh_blocks
;
313 /* The following array records the insn_rtx_cost for every insn
314 in the instruction stream. */
316 static int *uid_insn_cost
;
318 /* Length of the currently allocated uid_insn_cost array. */
320 static int last_insn_cost
;
322 /* Incremented for each label. */
324 static int label_tick
;
326 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
327 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
329 static enum machine_mode nonzero_bits_mode
;
331 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
332 be safely used. It is zero while computing them and after combine has
333 completed. This former test prevents propagating values based on
334 previously set values, which can be incorrect if a variable is modified
337 static int nonzero_sign_valid
;
340 /* Record one modification to rtl structure
341 to be undone by storing old_contents into *where. */
346 enum { UNDO_RTX
, UNDO_INT
, UNDO_MODE
} kind
;
347 union { rtx r
; int i
; enum machine_mode m
; } old_contents
;
348 union { rtx
*r
; int *i
; } where
;
351 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
352 num_undo says how many are currently recorded.
354 other_insn is nonzero if we have modified some other insn in the process
355 of working on subst_insn. It must be verified too. */
364 static struct undobuf undobuf
;
366 /* Number of times the pseudo being substituted for
367 was found and replaced. */
369 static int n_occurrences
;
371 static rtx
reg_nonzero_bits_for_combine (rtx
, enum machine_mode
, rtx
,
373 unsigned HOST_WIDE_INT
,
374 unsigned HOST_WIDE_INT
*);
375 static rtx
reg_num_sign_bit_copies_for_combine (rtx
, enum machine_mode
, rtx
,
377 unsigned int, unsigned int *);
378 static void do_SUBST (rtx
*, rtx
);
379 static void do_SUBST_INT (int *, int);
380 static void init_reg_last (void);
381 static void setup_incoming_promotions (void);
382 static void set_nonzero_bits_and_sign_copies (rtx
, rtx
, void *);
383 static int cant_combine_insn_p (rtx
);
384 static int can_combine_p (rtx
, rtx
, rtx
, rtx
, rtx
*, rtx
*);
385 static int combinable_i3pat (rtx
, rtx
*, rtx
, rtx
, int, rtx
*);
386 static int contains_muldiv (rtx
);
387 static rtx
try_combine (rtx
, rtx
, rtx
, int *);
388 static void undo_all (void);
389 static void undo_commit (void);
390 static rtx
*find_split_point (rtx
*, rtx
);
391 static rtx
subst (rtx
, rtx
, rtx
, int, int);
392 static rtx
combine_simplify_rtx (rtx
, enum machine_mode
, int);
393 static rtx
simplify_if_then_else (rtx
);
394 static rtx
simplify_set (rtx
);
395 static rtx
simplify_logical (rtx
);
396 static rtx
expand_compound_operation (rtx
);
397 static rtx
expand_field_assignment (rtx
);
398 static rtx
make_extraction (enum machine_mode
, rtx
, HOST_WIDE_INT
,
399 rtx
, unsigned HOST_WIDE_INT
, int, int, int);
400 static rtx
extract_left_shift (rtx
, int);
401 static rtx
make_compound_operation (rtx
, enum rtx_code
);
402 static int get_pos_from_mask (unsigned HOST_WIDE_INT
,
403 unsigned HOST_WIDE_INT
*);
404 static rtx
canon_reg_for_combine (rtx
, rtx
);
405 static rtx
force_to_mode (rtx
, enum machine_mode
,
406 unsigned HOST_WIDE_INT
, int);
407 static rtx
if_then_else_cond (rtx
, rtx
*, rtx
*);
408 static rtx
known_cond (rtx
, enum rtx_code
, rtx
, rtx
);
409 static int rtx_equal_for_field_assignment_p (rtx
, rtx
);
410 static rtx
make_field_assignment (rtx
);
411 static rtx
apply_distributive_law (rtx
);
412 static rtx
distribute_and_simplify_rtx (rtx
, int);
413 static rtx
simplify_and_const_int_1 (enum machine_mode
, rtx
,
414 unsigned HOST_WIDE_INT
);
415 static rtx
simplify_and_const_int (rtx
, enum machine_mode
, rtx
,
416 unsigned HOST_WIDE_INT
);
417 static int merge_outer_ops (enum rtx_code
*, HOST_WIDE_INT
*, enum rtx_code
,
418 HOST_WIDE_INT
, enum machine_mode
, int *);
419 static rtx
simplify_shift_const_1 (enum rtx_code
, enum machine_mode
, rtx
, int);
420 static rtx
simplify_shift_const (rtx
, enum rtx_code
, enum machine_mode
, rtx
,
422 static int recog_for_combine (rtx
*, rtx
, rtx
*);
423 static rtx
gen_lowpart_for_combine (enum machine_mode
, rtx
);
424 static enum rtx_code
simplify_comparison (enum rtx_code
, rtx
*, rtx
*);
425 static void update_table_tick (rtx
);
426 static void record_value_for_reg (rtx
, rtx
, rtx
);
427 static void check_conversions (rtx
, rtx
);
428 static void record_dead_and_set_regs_1 (rtx
, rtx
, void *);
429 static void record_dead_and_set_regs (rtx
);
430 static int get_last_value_validate (rtx
*, rtx
, int, int);
431 static rtx
get_last_value (rtx
);
432 static int use_crosses_set_p (rtx
, int);
433 static void reg_dead_at_p_1 (rtx
, rtx
, void *);
434 static int reg_dead_at_p (rtx
, rtx
);
435 static void move_deaths (rtx
, rtx
, int, rtx
, rtx
*);
436 static int reg_bitfield_target_p (rtx
, rtx
);
437 static void distribute_notes (rtx
, rtx
, rtx
, rtx
, rtx
, rtx
);
438 static void distribute_links (rtx
);
439 static void mark_used_regs_combine (rtx
);
440 static int insn_cuid (rtx
);
441 static void record_promoted_value (rtx
, rtx
);
442 static int unmentioned_reg_p_1 (rtx
*, void *);
443 static bool unmentioned_reg_p (rtx
, rtx
);
444 static void record_truncated_value (rtx
);
445 static bool reg_truncated_to_mode (enum machine_mode
, rtx
);
446 static rtx
gen_lowpart_or_truncate (enum machine_mode
, rtx
);
449 /* It is not safe to use ordinary gen_lowpart in combine.
450 See comments in gen_lowpart_for_combine. */
451 #undef RTL_HOOKS_GEN_LOWPART
452 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
454 /* Our implementation of gen_lowpart never emits a new pseudo. */
455 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
456 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
458 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
459 #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
461 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
462 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
464 #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
465 #define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
467 static const struct rtl_hooks combine_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
470 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
471 insn. The substitution can be undone by undo_all. If INTO is already
472 set to NEWVAL, do not record this change. Because computing NEWVAL might
473 also call SUBST, we have to compute it before we put anything into
477 do_SUBST (rtx
*into
, rtx newval
)
482 if (oldval
== newval
)
485 /* We'd like to catch as many invalid transformations here as
486 possible. Unfortunately, there are way too many mode changes
487 that are perfectly valid, so we'd waste too much effort for
488 little gain doing the checks here. Focus on catching invalid
489 transformations involving integer constants. */
490 if (GET_MODE_CLASS (GET_MODE (oldval
)) == MODE_INT
491 && GET_CODE (newval
) == CONST_INT
)
493 /* Sanity check that we're replacing oldval with a CONST_INT
494 that is a valid sign-extension for the original mode. */
495 gcc_assert (INTVAL (newval
)
496 == trunc_int_for_mode (INTVAL (newval
), GET_MODE (oldval
)));
498 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
499 CONST_INT is not valid, because after the replacement, the
500 original mode would be gone. Unfortunately, we can't tell
501 when do_SUBST is called to replace the operand thereof, so we
502 perform this test on oldval instead, checking whether an
503 invalid replacement took place before we got here. */
504 gcc_assert (!(GET_CODE (oldval
) == SUBREG
505 && GET_CODE (SUBREG_REG (oldval
)) == CONST_INT
));
506 gcc_assert (!(GET_CODE (oldval
) == ZERO_EXTEND
507 && GET_CODE (XEXP (oldval
, 0)) == CONST_INT
));
511 buf
= undobuf
.frees
, undobuf
.frees
= buf
->next
;
513 buf
= XNEW (struct undo
);
515 buf
->kind
= UNDO_RTX
;
517 buf
->old_contents
.r
= oldval
;
520 buf
->next
= undobuf
.undos
, undobuf
.undos
= buf
;
523 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
525 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
526 for the value of a HOST_WIDE_INT value (including CONST_INT) is
530 do_SUBST_INT (int *into
, int newval
)
535 if (oldval
== newval
)
539 buf
= undobuf
.frees
, undobuf
.frees
= buf
->next
;
541 buf
= XNEW (struct undo
);
543 buf
->kind
= UNDO_INT
;
545 buf
->old_contents
.i
= oldval
;
548 buf
->next
= undobuf
.undos
, undobuf
.undos
= buf
;
551 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
553 /* Similar to SUBST, but just substitute the mode. This is used when
554 changing the mode of a pseudo-register, so that any other
555 references to the entry in the regno_reg_rtx array will change as
559 do_SUBST_MODE (rtx
*into
, enum machine_mode newval
)
562 enum machine_mode oldval
= GET_MODE (*into
);
564 if (oldval
== newval
)
568 buf
= undobuf
.frees
, undobuf
.frees
= buf
->next
;
570 buf
= XNEW (struct undo
);
572 buf
->kind
= UNDO_MODE
;
574 buf
->old_contents
.m
= oldval
;
575 PUT_MODE (*into
, newval
);
577 buf
->next
= undobuf
.undos
, undobuf
.undos
= buf
;
580 #define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE(&(INTO), (NEWVAL))
582 /* Subroutine of try_combine. Determine whether the combine replacement
583 patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
584 that the original instruction sequence I1, I2 and I3. Note that I1
585 and/or NEWI2PAT may be NULL_RTX. This function returns false, if the
586 costs of all instructions can be estimated, and the replacements are
587 more expensive than the original sequence. */
590 combine_validate_cost (rtx i1
, rtx i2
, rtx i3
, rtx newpat
, rtx newi2pat
)
592 int i1_cost
, i2_cost
, i3_cost
;
593 int new_i2_cost
, new_i3_cost
;
594 int old_cost
, new_cost
;
596 /* Lookup the original insn_rtx_costs. */
597 i2_cost
= INSN_UID (i2
) <= last_insn_cost
598 ? uid_insn_cost
[INSN_UID (i2
)] : 0;
599 i3_cost
= INSN_UID (i3
) <= last_insn_cost
600 ? uid_insn_cost
[INSN_UID (i3
)] : 0;
604 i1_cost
= INSN_UID (i1
) <= last_insn_cost
605 ? uid_insn_cost
[INSN_UID (i1
)] : 0;
606 old_cost
= (i1_cost
> 0 && i2_cost
> 0 && i3_cost
> 0)
607 ? i1_cost
+ i2_cost
+ i3_cost
: 0;
611 old_cost
= (i2_cost
> 0 && i3_cost
> 0) ? i2_cost
+ i3_cost
: 0;
615 /* Calculate the replacement insn_rtx_costs. */
616 new_i3_cost
= insn_rtx_cost (newpat
);
619 new_i2_cost
= insn_rtx_cost (newi2pat
);
620 new_cost
= (new_i2_cost
> 0 && new_i3_cost
> 0)
621 ? new_i2_cost
+ new_i3_cost
: 0;
625 new_cost
= new_i3_cost
;
629 if (undobuf
.other_insn
)
631 int old_other_cost
, new_other_cost
;
633 old_other_cost
= (INSN_UID (undobuf
.other_insn
) <= last_insn_cost
634 ? uid_insn_cost
[INSN_UID (undobuf
.other_insn
)] : 0);
635 new_other_cost
= insn_rtx_cost (PATTERN (undobuf
.other_insn
));
636 if (old_other_cost
> 0 && new_other_cost
> 0)
638 old_cost
+= old_other_cost
;
639 new_cost
+= new_other_cost
;
645 /* Disallow this recombination if both new_cost and old_cost are
646 greater than zero, and new_cost is greater than old cost. */
648 && new_cost
> old_cost
)
655 "rejecting combination of insns %d, %d and %d\n",
656 INSN_UID (i1
), INSN_UID (i2
), INSN_UID (i3
));
657 fprintf (dump_file
, "original costs %d + %d + %d = %d\n",
658 i1_cost
, i2_cost
, i3_cost
, old_cost
);
663 "rejecting combination of insns %d and %d\n",
664 INSN_UID (i2
), INSN_UID (i3
));
665 fprintf (dump_file
, "original costs %d + %d = %d\n",
666 i2_cost
, i3_cost
, old_cost
);
671 fprintf (dump_file
, "replacement costs %d + %d = %d\n",
672 new_i2_cost
, new_i3_cost
, new_cost
);
675 fprintf (dump_file
, "replacement cost %d\n", new_cost
);
681 /* Update the uid_insn_cost array with the replacement costs. */
682 uid_insn_cost
[INSN_UID (i2
)] = new_i2_cost
;
683 uid_insn_cost
[INSN_UID (i3
)] = new_i3_cost
;
685 uid_insn_cost
[INSN_UID (i1
)] = 0;
690 /* Main entry point for combiner. F is the first insn of the function.
691 NREGS is the first unused pseudo-reg number.
693 Return nonzero if the combiner has turned an indirect jump
694 instruction into a direct jump. */
696 combine_instructions (rtx f
, unsigned int nregs
)
704 rtx links
, nextlinks
;
705 sbitmap_iterator sbi
;
707 int new_direct_jump_p
= 0;
709 combine_attempts
= 0;
712 combine_successes
= 0;
714 combine_max_regno
= nregs
;
716 rtl_hooks
= combine_rtl_hooks
;
718 reg_stat
= XCNEWVEC (struct reg_stat
, nregs
);
720 init_recog_no_volatile ();
722 /* Compute maximum uid value so uid_cuid can be allocated. */
724 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
725 if (INSN_UID (insn
) > i
)
728 uid_cuid
= XNEWVEC (int, i
+ 1);
731 nonzero_bits_mode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
733 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
734 problems when, for example, we have j <<= 1 in a loop. */
736 nonzero_sign_valid
= 0;
738 /* Compute the mapping from uids to cuids.
739 Cuids are numbers assigned to insns, like uids,
740 except that cuids increase monotonically through the code.
742 Scan all SETs and see if we can deduce anything about what
743 bits are known to be zero for some registers and how many copies
744 of the sign bit are known to exist for those registers.
746 Also set any known values so that we can use it while searching
747 for what bits are known to be set. */
751 setup_incoming_promotions ();
753 refresh_blocks
= sbitmap_alloc (last_basic_block
);
754 sbitmap_zero (refresh_blocks
);
756 /* Allocate array of current insn_rtx_costs. */
757 uid_insn_cost
= XCNEWVEC (int, max_uid_cuid
+ 1);
758 last_insn_cost
= max_uid_cuid
;
760 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
762 uid_cuid
[INSN_UID (insn
)] = ++i
;
768 note_stores (PATTERN (insn
), set_nonzero_bits_and_sign_copies
,
770 record_dead_and_set_regs (insn
);
773 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
774 if (REG_NOTE_KIND (links
) == REG_INC
)
775 set_nonzero_bits_and_sign_copies (XEXP (links
, 0), NULL_RTX
,
779 /* Record the current insn_rtx_cost of this instruction. */
780 if (NONJUMP_INSN_P (insn
))
781 uid_insn_cost
[INSN_UID (insn
)] = insn_rtx_cost (PATTERN (insn
));
783 fprintf(dump_file
, "insn_cost %d: %d\n",
784 INSN_UID (insn
), uid_insn_cost
[INSN_UID (insn
)]);
791 nonzero_sign_valid
= 1;
793 /* Now scan all the insns in forward order. */
799 setup_incoming_promotions ();
801 FOR_EACH_BB (this_basic_block
)
803 for (insn
= BB_HEAD (this_basic_block
);
804 insn
!= NEXT_INSN (BB_END (this_basic_block
));
805 insn
= next
? next
: NEXT_INSN (insn
))
812 else if (INSN_P (insn
))
814 /* See if we know about function return values before this
815 insn based upon SUBREG flags. */
816 check_conversions (insn
, PATTERN (insn
));
818 /* Try this insn with each insn it links back to. */
820 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
821 if ((next
= try_combine (insn
, XEXP (links
, 0),
822 NULL_RTX
, &new_direct_jump_p
)) != 0)
825 /* Try each sequence of three linked insns ending with this one. */
827 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
829 rtx link
= XEXP (links
, 0);
831 /* If the linked insn has been replaced by a note, then there
832 is no point in pursuing this chain any further. */
836 for (nextlinks
= LOG_LINKS (link
);
838 nextlinks
= XEXP (nextlinks
, 1))
839 if ((next
= try_combine (insn
, link
,
841 &new_direct_jump_p
)) != 0)
846 /* Try to combine a jump insn that uses CC0
847 with a preceding insn that sets CC0, and maybe with its
848 logical predecessor as well.
849 This is how we make decrement-and-branch insns.
850 We need this special code because data flow connections
851 via CC0 do not get entered in LOG_LINKS. */
854 && (prev
= prev_nonnote_insn (insn
)) != 0
855 && NONJUMP_INSN_P (prev
)
856 && sets_cc0_p (PATTERN (prev
)))
858 if ((next
= try_combine (insn
, prev
,
859 NULL_RTX
, &new_direct_jump_p
)) != 0)
862 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
863 nextlinks
= XEXP (nextlinks
, 1))
864 if ((next
= try_combine (insn
, prev
,
866 &new_direct_jump_p
)) != 0)
870 /* Do the same for an insn that explicitly references CC0. */
871 if (NONJUMP_INSN_P (insn
)
872 && (prev
= prev_nonnote_insn (insn
)) != 0
873 && NONJUMP_INSN_P (prev
)
874 && sets_cc0_p (PATTERN (prev
))
875 && GET_CODE (PATTERN (insn
)) == SET
876 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (insn
))))
878 if ((next
= try_combine (insn
, prev
,
879 NULL_RTX
, &new_direct_jump_p
)) != 0)
882 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
883 nextlinks
= XEXP (nextlinks
, 1))
884 if ((next
= try_combine (insn
, prev
,
886 &new_direct_jump_p
)) != 0)
890 /* Finally, see if any of the insns that this insn links to
891 explicitly references CC0. If so, try this insn, that insn,
892 and its predecessor if it sets CC0. */
893 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
894 if (NONJUMP_INSN_P (XEXP (links
, 0))
895 && GET_CODE (PATTERN (XEXP (links
, 0))) == SET
896 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (XEXP (links
, 0))))
897 && (prev
= prev_nonnote_insn (XEXP (links
, 0))) != 0
898 && NONJUMP_INSN_P (prev
)
899 && sets_cc0_p (PATTERN (prev
))
900 && (next
= try_combine (insn
, XEXP (links
, 0),
901 prev
, &new_direct_jump_p
)) != 0)
905 /* Try combining an insn with two different insns whose results it
907 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
908 for (nextlinks
= XEXP (links
, 1); nextlinks
;
909 nextlinks
= XEXP (nextlinks
, 1))
910 if ((next
= try_combine (insn
, XEXP (links
, 0),
912 &new_direct_jump_p
)) != 0)
915 /* Try this insn with each REG_EQUAL note it links back to. */
916 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
919 rtx temp
= XEXP (links
, 0);
920 if ((set
= single_set (temp
)) != 0
921 && (note
= find_reg_equal_equiv_note (temp
)) != 0
922 && (note
= XEXP (note
, 0), GET_CODE (note
)) != EXPR_LIST
923 /* Avoid using a register that may already been marked
924 dead by an earlier instruction. */
925 && ! unmentioned_reg_p (note
, SET_SRC (set
))
926 && (GET_MODE (note
) == VOIDmode
927 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set
)))
928 : GET_MODE (SET_DEST (set
)) == GET_MODE (note
)))
930 /* Temporarily replace the set's source with the
931 contents of the REG_EQUAL note. The insn will
932 be deleted or recognized by try_combine. */
933 rtx orig
= SET_SRC (set
);
934 SET_SRC (set
) = note
;
935 replaced_rhs_insn
= temp
;
936 replaced_rhs_value
= copy_rtx (note
);
937 next
= try_combine (insn
, temp
, NULL_RTX
,
939 replaced_rhs_insn
= NULL
;
942 SET_SRC (set
) = orig
;
947 record_dead_and_set_regs (insn
);
956 EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks
, 0, j
, sbi
)
957 BASIC_BLOCK (j
)->flags
|= BB_DIRTY
;
958 new_direct_jump_p
|= purge_all_dead_edges ();
959 delete_noop_moves ();
961 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES
,
962 PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
963 | PROP_KILL_DEAD_CODE
);
966 sbitmap_free (refresh_blocks
);
967 free (uid_insn_cost
);
972 struct undo
*undo
, *next
;
973 for (undo
= undobuf
.frees
; undo
; undo
= next
)
981 total_attempts
+= combine_attempts
;
982 total_merges
+= combine_merges
;
983 total_extras
+= combine_extras
;
984 total_successes
+= combine_successes
;
986 nonzero_sign_valid
= 0;
987 rtl_hooks
= general_rtl_hooks
;
989 /* Make recognizer allow volatile MEMs again. */
992 return new_direct_jump_p
;
995 /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1001 for (i
= 0; i
< combine_max_regno
; i
++)
1002 memset (reg_stat
+ i
, 0, offsetof (struct reg_stat
, sign_bit_copies
));
1005 /* Set up any promoted values for incoming argument registers. */
1008 setup_incoming_promotions (void)
1012 enum machine_mode mode
;
1014 rtx first
= get_insns ();
1016 if (targetm
.calls
.promote_function_args (TREE_TYPE (cfun
->decl
)))
1018 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1019 /* Check whether this register can hold an incoming pointer
1020 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1021 numbers, so translate if necessary due to register windows. */
1022 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno
))
1023 && (reg
= promoted_input_arg (regno
, &mode
, &unsignedp
)) != 0)
1025 record_value_for_reg
1026 (reg
, first
, gen_rtx_fmt_e ((unsignedp
? ZERO_EXTEND
1029 gen_rtx_CLOBBER (mode
, const0_rtx
)));
1034 /* Called via note_stores. If X is a pseudo that is narrower than
1035 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1037 If we are setting only a portion of X and we can't figure out what
1038 portion, assume all bits will be used since we don't know what will
1041 Similarly, set how many bits of X are known to be copies of the sign bit
1042 at all locations in the function. This is the smallest number implied
1046 set_nonzero_bits_and_sign_copies (rtx x
, rtx set
,
1047 void *data ATTRIBUTE_UNUSED
)
1052 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
1053 /* If this register is undefined at the start of the file, we can't
1054 say what its contents were. */
1055 && ! REGNO_REG_SET_P
1056 (ENTRY_BLOCK_PTR
->next_bb
->il
.rtl
->global_live_at_start
, REGNO (x
))
1057 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
)
1059 if (set
== 0 || GET_CODE (set
) == CLOBBER
)
1061 reg_stat
[REGNO (x
)].nonzero_bits
= GET_MODE_MASK (GET_MODE (x
));
1062 reg_stat
[REGNO (x
)].sign_bit_copies
= 1;
1066 /* If this is a complex assignment, see if we can convert it into a
1067 simple assignment. */
1068 set
= expand_field_assignment (set
);
1070 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1071 set what we know about X. */
1073 if (SET_DEST (set
) == x
1074 || (GET_CODE (SET_DEST (set
)) == SUBREG
1075 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set
)))
1076 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set
)))))
1077 && SUBREG_REG (SET_DEST (set
)) == x
))
1079 rtx src
= SET_SRC (set
);
1081 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1082 /* If X is narrower than a word and SRC is a non-negative
1083 constant that would appear negative in the mode of X,
1084 sign-extend it for use in reg_stat[].nonzero_bits because some
1085 machines (maybe most) will actually do the sign-extension
1086 and this is the conservative approach.
1088 ??? For 2.5, try to tighten up the MD files in this regard
1089 instead of this kludge. */
1091 if (GET_MODE_BITSIZE (GET_MODE (x
)) < BITS_PER_WORD
1092 && GET_CODE (src
) == CONST_INT
1094 && 0 != (INTVAL (src
)
1095 & ((HOST_WIDE_INT
) 1
1096 << (GET_MODE_BITSIZE (GET_MODE (x
)) - 1))))
1097 src
= GEN_INT (INTVAL (src
)
1098 | ((HOST_WIDE_INT
) (-1)
1099 << GET_MODE_BITSIZE (GET_MODE (x
))));
1102 /* Don't call nonzero_bits if it cannot change anything. */
1103 if (reg_stat
[REGNO (x
)].nonzero_bits
!= ~(unsigned HOST_WIDE_INT
) 0)
1104 reg_stat
[REGNO (x
)].nonzero_bits
1105 |= nonzero_bits (src
, nonzero_bits_mode
);
1106 num
= num_sign_bit_copies (SET_SRC (set
), GET_MODE (x
));
1107 if (reg_stat
[REGNO (x
)].sign_bit_copies
== 0
1108 || reg_stat
[REGNO (x
)].sign_bit_copies
> num
)
1109 reg_stat
[REGNO (x
)].sign_bit_copies
= num
;
1113 reg_stat
[REGNO (x
)].nonzero_bits
= GET_MODE_MASK (GET_MODE (x
));
1114 reg_stat
[REGNO (x
)].sign_bit_copies
= 1;
1119 /* See if INSN can be combined into I3. PRED and SUCC are optionally
1120 insns that were previously combined into I3 or that will be combined
1121 into the merger of INSN and I3.
1123 Return 0 if the combination is not allowed for any reason.
1125 If the combination is allowed, *PDEST will be set to the single
1126 destination of INSN and *PSRC to the single source, and this function
1130 can_combine_p (rtx insn
, rtx i3
, rtx pred ATTRIBUTE_UNUSED
, rtx succ
,
1131 rtx
*pdest
, rtx
*psrc
)
1134 rtx set
= 0, src
, dest
;
1139 int all_adjacent
= (succ
? (next_active_insn (insn
) == succ
1140 && next_active_insn (succ
) == i3
)
1141 : next_active_insn (insn
) == i3
);
1143 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1144 or a PARALLEL consisting of such a SET and CLOBBERs.
1146 If INSN has CLOBBER parallel parts, ignore them for our processing.
1147 By definition, these happen during the execution of the insn. When it
1148 is merged with another insn, all bets are off. If they are, in fact,
1149 needed and aren't also supplied in I3, they may be added by
1150 recog_for_combine. Otherwise, it won't match.
1152 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1155 Get the source and destination of INSN. If more than one, can't
1158 if (GET_CODE (PATTERN (insn
)) == SET
)
1159 set
= PATTERN (insn
);
1160 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
1161 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1163 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1165 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
1168 switch (GET_CODE (elt
))
1170 /* This is important to combine floating point insns
1171 for the SH4 port. */
1173 /* Combining an isolated USE doesn't make sense.
1174 We depend here on combinable_i3pat to reject them. */
1175 /* The code below this loop only verifies that the inputs of
1176 the SET in INSN do not change. We call reg_set_between_p
1177 to verify that the REG in the USE does not change between
1179 If the USE in INSN was for a pseudo register, the matching
1180 insn pattern will likely match any register; combining this
1181 with any other USE would only be safe if we knew that the
1182 used registers have identical values, or if there was
1183 something to tell them apart, e.g. different modes. For
1184 now, we forgo such complicated tests and simply disallow
1185 combining of USES of pseudo registers with any other USE. */
1186 if (REG_P (XEXP (elt
, 0))
1187 && GET_CODE (PATTERN (i3
)) == PARALLEL
)
1189 rtx i3pat
= PATTERN (i3
);
1190 int i
= XVECLEN (i3pat
, 0) - 1;
1191 unsigned int regno
= REGNO (XEXP (elt
, 0));
1195 rtx i3elt
= XVECEXP (i3pat
, 0, i
);
1197 if (GET_CODE (i3elt
) == USE
1198 && REG_P (XEXP (i3elt
, 0))
1199 && (REGNO (XEXP (i3elt
, 0)) == regno
1200 ? reg_set_between_p (XEXP (elt
, 0),
1201 PREV_INSN (insn
), i3
)
1202 : regno
>= FIRST_PSEUDO_REGISTER
))
1209 /* We can ignore CLOBBERs. */
1214 /* Ignore SETs whose result isn't used but not those that
1215 have side-effects. */
1216 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (elt
))
1217 && (!(note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
))
1218 || INTVAL (XEXP (note
, 0)) <= 0)
1219 && ! side_effects_p (elt
))
1222 /* If we have already found a SET, this is a second one and
1223 so we cannot combine with this insn. */
1231 /* Anything else means we can't combine. */
1237 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1238 so don't do anything with it. */
1239 || GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
)
1248 set
= expand_field_assignment (set
);
1249 src
= SET_SRC (set
), dest
= SET_DEST (set
);
1251 /* Don't eliminate a store in the stack pointer. */
1252 if (dest
== stack_pointer_rtx
1253 /* Don't combine with an insn that sets a register to itself if it has
1254 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1255 || (rtx_equal_p (src
, dest
) && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1256 /* Can't merge an ASM_OPERANDS. */
1257 || GET_CODE (src
) == ASM_OPERANDS
1258 /* Can't merge a function call. */
1259 || GET_CODE (src
) == CALL
1260 /* Don't eliminate a function call argument. */
1262 && (find_reg_fusage (i3
, USE
, dest
)
1264 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
1265 && global_regs
[REGNO (dest
)])))
1266 /* Don't substitute into an incremented register. */
1267 || FIND_REG_INC_NOTE (i3
, dest
)
1268 || (succ
&& FIND_REG_INC_NOTE (succ
, dest
))
1269 /* Don't substitute into a non-local goto, this confuses CFG. */
1270 || (JUMP_P (i3
) && find_reg_note (i3
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
1272 /* Don't combine the end of a libcall into anything. */
1273 /* ??? This gives worse code, and appears to be unnecessary, since no
1274 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1275 use REG_RETVAL notes for noconflict blocks, but other code here
1276 makes sure that those insns don't disappear. */
1277 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
1279 /* Make sure that DEST is not used after SUCC but before I3. */
1280 || (succ
&& ! all_adjacent
1281 && reg_used_between_p (dest
, succ
, i3
))
1282 /* Make sure that the value that is to be substituted for the register
1283 does not use any registers whose values alter in between. However,
1284 If the insns are adjacent, a use can't cross a set even though we
1285 think it might (this can happen for a sequence of insns each setting
1286 the same destination; last_set of that register might point to
1287 a NOTE). If INSN has a REG_EQUIV note, the register is always
1288 equivalent to the memory so the substitution is valid even if there
1289 are intervening stores. Also, don't move a volatile asm or
1290 UNSPEC_VOLATILE across any other insns. */
1293 || ! find_reg_note (insn
, REG_EQUIV
, src
))
1294 && use_crosses_set_p (src
, INSN_CUID (insn
)))
1295 || (GET_CODE (src
) == ASM_OPERANDS
&& MEM_VOLATILE_P (src
))
1296 || GET_CODE (src
) == UNSPEC_VOLATILE
))
1297 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1298 better register allocation by not doing the combine. */
1299 || find_reg_note (i3
, REG_NO_CONFLICT
, dest
)
1300 || (succ
&& find_reg_note (succ
, REG_NO_CONFLICT
, dest
))
1301 /* Don't combine across a CALL_INSN, because that would possibly
1302 change whether the life span of some REGs crosses calls or not,
1303 and it is a pain to update that information.
1304 Exception: if source is a constant, moving it later can't hurt.
1305 Accept that special case, because it helps -fforce-addr a lot. */
1306 || (INSN_CUID (insn
) < last_call_cuid
&& ! CONSTANT_P (src
)))
1309 /* DEST must either be a REG or CC0. */
1312 /* If register alignment is being enforced for multi-word items in all
1313 cases except for parameters, it is possible to have a register copy
1314 insn referencing a hard register that is not allowed to contain the
1315 mode being copied and which would not be valid as an operand of most
1316 insns. Eliminate this problem by not combining with such an insn.
1318 Also, on some machines we don't want to extend the life of a hard
1322 && ((REGNO (dest
) < FIRST_PSEUDO_REGISTER
1323 && ! HARD_REGNO_MODE_OK (REGNO (dest
), GET_MODE (dest
)))
1324 /* Don't extend the life of a hard register unless it is
1325 user variable (if we have few registers) or it can't
1326 fit into the desired register (meaning something special
1328 Also avoid substituting a return register into I3, because
1329 reload can't handle a conflict with constraints of other
1331 || (REGNO (src
) < FIRST_PSEUDO_REGISTER
1332 && ! HARD_REGNO_MODE_OK (REGNO (src
), GET_MODE (src
)))))
1335 else if (GET_CODE (dest
) != CC0
)
1339 if (GET_CODE (PATTERN (i3
)) == PARALLEL
)
1340 for (i
= XVECLEN (PATTERN (i3
), 0) - 1; i
>= 0; i
--)
1341 if (GET_CODE (XVECEXP (PATTERN (i3
), 0, i
)) == CLOBBER
)
1343 /* Don't substitute for a register intended as a clobberable
1345 rtx reg
= XEXP (XVECEXP (PATTERN (i3
), 0, i
), 0);
1346 if (rtx_equal_p (reg
, dest
))
1349 /* If the clobber represents an earlyclobber operand, we must not
1350 substitute an expression containing the clobbered register.
1351 As we do not analyze the constraint strings here, we have to
1352 make the conservative assumption. However, if the register is
1353 a fixed hard reg, the clobber cannot represent any operand;
1354 we leave it up to the machine description to either accept or
1355 reject use-and-clobber patterns. */
1357 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
1358 || !fixed_regs
[REGNO (reg
)])
1359 if (reg_overlap_mentioned_p (reg
, src
))
1363 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1364 or not), reject, unless nothing volatile comes between it and I3 */
1366 if (GET_CODE (src
) == ASM_OPERANDS
|| volatile_refs_p (src
))
1368 /* Make sure succ doesn't contain a volatile reference. */
1369 if (succ
!= 0 && volatile_refs_p (PATTERN (succ
)))
1372 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
1373 if (INSN_P (p
) && p
!= succ
&& volatile_refs_p (PATTERN (p
)))
1377 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1378 to be an explicit register variable, and was chosen for a reason. */
1380 if (GET_CODE (src
) == ASM_OPERANDS
1381 && REG_P (dest
) && REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
1384 /* If there are any volatile insns between INSN and I3, reject, because
1385 they might affect machine state. */
1387 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
1388 if (INSN_P (p
) && p
!= succ
&& volatile_insn_p (PATTERN (p
)))
1391 /* If INSN contains an autoincrement or autodecrement, make sure that
1392 register is not used between there and I3, and not already used in
1393 I3 either. Neither must it be used in PRED or SUCC, if they exist.
1394 Also insist that I3 not be a jump; if it were one
1395 and the incremented register were spilled, we would lose. */
1398 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1399 if (REG_NOTE_KIND (link
) == REG_INC
1401 || reg_used_between_p (XEXP (link
, 0), insn
, i3
)
1402 || (pred
!= NULL_RTX
1403 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (pred
)))
1404 || (succ
!= NULL_RTX
1405 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (succ
)))
1406 || reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i3
))))
1411 /* Don't combine an insn that follows a CC0-setting insn.
1412 An insn that uses CC0 must not be separated from the one that sets it.
1413 We do, however, allow I2 to follow a CC0-setting insn if that insn
1414 is passed as I1; in that case it will be deleted also.
1415 We also allow combining in this case if all the insns are adjacent
1416 because that would leave the two CC0 insns adjacent as well.
1417 It would be more logical to test whether CC0 occurs inside I1 or I2,
1418 but that would be much slower, and this ought to be equivalent. */
1420 p
= prev_nonnote_insn (insn
);
1421 if (p
&& p
!= pred
&& NONJUMP_INSN_P (p
) && sets_cc0_p (PATTERN (p
))
1426 /* If we get here, we have passed all the tests and the combination is
1435 /* LOC is the location within I3 that contains its pattern or the component
1436 of a PARALLEL of the pattern. We validate that it is valid for combining.
1438 One problem is if I3 modifies its output, as opposed to replacing it
1439 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1440 so would produce an insn that is not equivalent to the original insns.
1444 (set (reg:DI 101) (reg:DI 100))
1445 (set (subreg:SI (reg:DI 101) 0) <foo>)
1447 This is NOT equivalent to:
1449 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1450 (set (reg:DI 101) (reg:DI 100))])
1452 Not only does this modify 100 (in which case it might still be valid
1453 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1455 We can also run into a problem if I2 sets a register that I1
1456 uses and I1 gets directly substituted into I3 (not via I2). In that
1457 case, we would be getting the wrong value of I2DEST into I3, so we
1458 must reject the combination. This case occurs when I2 and I1 both
1459 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1460 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1461 of a SET must prevent combination from occurring.
1463 Before doing the above check, we first try to expand a field assignment
1464 into a set of logical operations.
1466 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1467 we place a register that is both set and used within I3. If more than one
1468 such register is detected, we fail.
1470 Return 1 if the combination is valid, zero otherwise. */
1473 combinable_i3pat (rtx i3
, rtx
*loc
, rtx i2dest
, rtx i1dest
,
1474 int i1_not_in_src
, rtx
*pi3dest_killed
)
1478 if (GET_CODE (x
) == SET
)
1481 rtx dest
= SET_DEST (set
);
1482 rtx src
= SET_SRC (set
);
1483 rtx inner_dest
= dest
;
1486 while (GET_CODE (inner_dest
) == STRICT_LOW_PART
1487 || GET_CODE (inner_dest
) == SUBREG
1488 || GET_CODE (inner_dest
) == ZERO_EXTRACT
)
1489 inner_dest
= XEXP (inner_dest
, 0);
1491 /* Check for the case where I3 modifies its output, as discussed
1492 above. We don't want to prevent pseudos from being combined
1493 into the address of a MEM, so only prevent the combination if
1494 i1 or i2 set the same MEM. */
1495 if ((inner_dest
!= dest
&&
1496 (!MEM_P (inner_dest
)
1497 || rtx_equal_p (i2dest
, inner_dest
)
1498 || (i1dest
&& rtx_equal_p (i1dest
, inner_dest
)))
1499 && (reg_overlap_mentioned_p (i2dest
, inner_dest
)
1500 || (i1dest
&& reg_overlap_mentioned_p (i1dest
, inner_dest
))))
1502 /* This is the same test done in can_combine_p except we can't test
1503 all_adjacent; we don't have to, since this instruction will stay
1504 in place, thus we are not considering increasing the lifetime of
1507 Also, if this insn sets a function argument, combining it with
1508 something that might need a spill could clobber a previous
1509 function argument; the all_adjacent test in can_combine_p also
1510 checks this; here, we do a more specific test for this case. */
1512 || (REG_P (inner_dest
)
1513 && REGNO (inner_dest
) < FIRST_PSEUDO_REGISTER
1514 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest
),
1515 GET_MODE (inner_dest
))))
1516 || (i1_not_in_src
&& reg_overlap_mentioned_p (i1dest
, src
)))
1519 /* If DEST is used in I3, it is being killed in this insn, so
1520 record that for later. We have to consider paradoxical
1521 subregs here, since they kill the whole register, but we
1522 ignore partial subregs, STRICT_LOW_PART, etc.
1523 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1524 STACK_POINTER_REGNUM, since these are always considered to be
1525 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1527 if (GET_CODE (subdest
) == SUBREG
1528 && (GET_MODE_SIZE (GET_MODE (subdest
))
1529 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest
)))))
1530 subdest
= SUBREG_REG (subdest
);
1533 && reg_referenced_p (subdest
, PATTERN (i3
))
1534 && REGNO (subdest
) != FRAME_POINTER_REGNUM
1535 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1536 && REGNO (subdest
) != HARD_FRAME_POINTER_REGNUM
1538 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1539 && (REGNO (subdest
) != ARG_POINTER_REGNUM
1540 || ! fixed_regs
[REGNO (subdest
)])
1542 && REGNO (subdest
) != STACK_POINTER_REGNUM
)
1544 if (*pi3dest_killed
)
1547 *pi3dest_killed
= subdest
;
1551 else if (GET_CODE (x
) == PARALLEL
)
1555 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
1556 if (! combinable_i3pat (i3
, &XVECEXP (x
, 0, i
), i2dest
, i1dest
,
1557 i1_not_in_src
, pi3dest_killed
))
1564 /* Return 1 if X is an arithmetic expression that contains a multiplication
1565 and division. We don't count multiplications by powers of two here. */
1568 contains_muldiv (rtx x
)
1570 switch (GET_CODE (x
))
1572 case MOD
: case DIV
: case UMOD
: case UDIV
:
1576 return ! (GET_CODE (XEXP (x
, 1)) == CONST_INT
1577 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0);
1580 return contains_muldiv (XEXP (x
, 0))
1581 || contains_muldiv (XEXP (x
, 1));
1584 return contains_muldiv (XEXP (x
, 0));
1590 /* Determine whether INSN can be used in a combination. Return nonzero if
1591 not. This is used in try_combine to detect early some cases where we
1592 can't perform combinations. */
1595 cant_combine_insn_p (rtx insn
)
1600 /* If this isn't really an insn, we can't do anything.
1601 This can occur when flow deletes an insn that it has merged into an
1602 auto-increment address. */
1603 if (! INSN_P (insn
))
1606 /* Never combine loads and stores involving hard regs that are likely
1607 to be spilled. The register allocator can usually handle such
1608 reg-reg moves by tying. If we allow the combiner to make
1609 substitutions of likely-spilled regs, reload might die.
1610 As an exception, we allow combinations involving fixed regs; these are
1611 not available to the register allocator so there's no risk involved. */
1613 set
= single_set (insn
);
1616 src
= SET_SRC (set
);
1617 dest
= SET_DEST (set
);
1618 if (GET_CODE (src
) == SUBREG
)
1619 src
= SUBREG_REG (src
);
1620 if (GET_CODE (dest
) == SUBREG
)
1621 dest
= SUBREG_REG (dest
);
1622 if (REG_P (src
) && REG_P (dest
)
1623 && ((REGNO (src
) < FIRST_PSEUDO_REGISTER
1624 && ! fixed_regs
[REGNO (src
)]
1625 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src
))))
1626 || (REGNO (dest
) < FIRST_PSEUDO_REGISTER
1627 && ! fixed_regs
[REGNO (dest
)]
1628 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest
))))))
1634 struct likely_spilled_retval_info
1636 unsigned regno
, nregs
;
1640 /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
1641 hard registers that are known to be written to / clobbered in full. */
1643 likely_spilled_retval_1 (rtx x
, rtx set
, void *data
)
1645 struct likely_spilled_retval_info
*info
= data
;
1646 unsigned regno
, nregs
;
1649 if (!REG_P (XEXP (set
, 0)))
1652 if (regno
>= info
->regno
+ info
->nregs
)
1654 nregs
= hard_regno_nregs
[regno
][GET_MODE (x
)];
1655 if (regno
+ nregs
<= info
->regno
)
1657 new_mask
= (2U << (nregs
- 1)) - 1;
1658 if (regno
< info
->regno
)
1659 new_mask
>>= info
->regno
- regno
;
1661 new_mask
<<= regno
- info
->regno
;
1662 info
->mask
&= new_mask
;
1665 /* Return nonzero iff part of the return value is live during INSN, and
1666 it is likely spilled. This can happen when more than one insn is needed
1667 to copy the return value, e.g. when we consider to combine into the
1668 second copy insn for a complex value. */
1671 likely_spilled_retval_p (rtx insn
)
1673 rtx use
= BB_END (this_basic_block
);
1675 unsigned regno
, nregs
;
1676 /* We assume here that no machine mode needs more than
1677 32 hard registers when the value overlaps with a register
1678 for which FUNCTION_VALUE_REGNO_P is true. */
1680 struct likely_spilled_retval_info info
;
1682 if (!NONJUMP_INSN_P (use
) || GET_CODE (PATTERN (use
)) != USE
|| insn
== use
)
1684 reg
= XEXP (PATTERN (use
), 0);
1685 if (!REG_P (reg
) || !FUNCTION_VALUE_REGNO_P (REGNO (reg
)))
1687 regno
= REGNO (reg
);
1688 nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
1691 mask
= (2U << (nregs
- 1)) - 1;
1693 /* Disregard parts of the return value that are set later. */
1697 for (p
= PREV_INSN (use
); info
.mask
&& p
!= insn
; p
= PREV_INSN (p
))
1698 note_stores (PATTERN (insn
), likely_spilled_retval_1
, &info
);
1701 /* Check if any of the (probably) live return value registers is
1706 if ((mask
& 1 << nregs
)
1707 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno
+ nregs
)))
1713 /* Adjust INSN after we made a change to its destination.
1715 Changing the destination can invalidate notes that say something about
1716 the results of the insn and a LOG_LINK pointing to the insn. */
1719 adjust_for_new_dest (rtx insn
)
1723 /* For notes, be conservative and simply remove them. */
1724 loc
= ®_NOTES (insn
);
1727 enum reg_note kind
= REG_NOTE_KIND (*loc
);
1728 if (kind
== REG_EQUAL
|| kind
== REG_EQUIV
)
1729 *loc
= XEXP (*loc
, 1);
1731 loc
= &XEXP (*loc
, 1);
1734 /* The new insn will have a destination that was previously the destination
1735 of an insn just above it. Call distribute_links to make a LOG_LINK from
1736 the next use of that destination. */
1737 distribute_links (gen_rtx_INSN_LIST (VOIDmode
, insn
, NULL_RTX
));
1740 /* Return TRUE if combine can reuse reg X in mode MODE.
1741 ADDED_SETS is nonzero if the original set is still required. */
1743 can_change_dest_mode (rtx x
, int added_sets
, enum machine_mode mode
)
1751 /* Allow hard registers if the new mode is legal, and occupies no more
1752 registers than the old mode. */
1753 if (regno
< FIRST_PSEUDO_REGISTER
)
1754 return (HARD_REGNO_MODE_OK (regno
, mode
)
1755 && (hard_regno_nregs
[regno
][GET_MODE (x
)]
1756 >= hard_regno_nregs
[regno
][mode
]));
1758 /* Or a pseudo that is only used once. */
1759 return (REG_N_SETS (regno
) == 1 && !added_sets
1760 && !REG_USERVAR_P (x
));
1764 /* Check whether X, the destination of a set, refers to part of
1765 the register specified by REG. */
1768 reg_subword_p (rtx x
, rtx reg
)
1770 /* Check that reg is an integer mode register. */
1771 if (!REG_P (reg
) || GET_MODE_CLASS (GET_MODE (reg
)) != MODE_INT
)
1774 if (GET_CODE (x
) == STRICT_LOW_PART
1775 || GET_CODE (x
) == ZERO_EXTRACT
)
1778 return GET_CODE (x
) == SUBREG
1779 && SUBREG_REG (x
) == reg
1780 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
;
1784 /* Try to combine the insns I1 and I2 into I3.
1785 Here I1 and I2 appear earlier than I3.
1786 I1 can be zero; then we combine just I2 into I3.
1788 If we are combining three insns and the resulting insn is not recognized,
1789 try splitting it into two insns. If that happens, I2 and I3 are retained
1790 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1793 Return 0 if the combination does not work. Then nothing is changed.
1794 If we did the combination, return the insn at which combine should
1797 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1798 new direct jump instruction. */
1801 try_combine (rtx i3
, rtx i2
, rtx i1
, int *new_direct_jump_p
)
1803 /* New patterns for I3 and I2, respectively. */
1804 rtx newpat
, newi2pat
= 0;
1805 rtvec newpat_vec_with_clobbers
= 0;
1806 int substed_i2
= 0, substed_i1
= 0;
1807 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1808 int added_sets_1
, added_sets_2
;
1809 /* Total number of SETs to put into I3. */
1811 /* Nonzero if I2's body now appears in I3. */
1813 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1814 int insn_code_number
, i2_code_number
= 0, other_code_number
= 0;
1815 /* Contains I3 if the destination of I3 is used in its source, which means
1816 that the old life of I3 is being killed. If that usage is placed into
1817 I2 and not in I3, a REG_DEAD note must be made. */
1818 rtx i3dest_killed
= 0;
1819 /* SET_DEST and SET_SRC of I2 and I1. */
1820 rtx i2dest
, i2src
, i1dest
= 0, i1src
= 0;
1821 /* PATTERN (I2), or a copy of it in certain cases. */
1823 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1824 int i2dest_in_i2src
= 0, i1dest_in_i1src
= 0, i2dest_in_i1src
= 0;
1825 int i2dest_killed
= 0, i1dest_killed
= 0;
1826 int i1_feeds_i3
= 0;
1827 /* Notes that must be added to REG_NOTES in I3 and I2. */
1828 rtx new_i3_notes
, new_i2_notes
;
1829 /* Notes that we substituted I3 into I2 instead of the normal case. */
1830 int i3_subst_into_i2
= 0;
1831 /* Notes that I1, I2 or I3 is a MULT operation. */
1840 /* Exit early if one of the insns involved can't be used for
1842 if (cant_combine_insn_p (i3
)
1843 || cant_combine_insn_p (i2
)
1844 || (i1
&& cant_combine_insn_p (i1
))
1845 || likely_spilled_retval_p (i3
)
1846 /* We also can't do anything if I3 has a
1847 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1850 /* ??? This gives worse code, and appears to be unnecessary, since no
1851 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1852 || find_reg_note (i3
, REG_LIBCALL
, NULL_RTX
)
1858 undobuf
.other_insn
= 0;
1860 /* Reset the hard register usage information. */
1861 CLEAR_HARD_REG_SET (newpat_used_regs
);
1863 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1864 code below, set I1 to be the earlier of the two insns. */
1865 if (i1
&& INSN_CUID (i1
) > INSN_CUID (i2
))
1866 temp
= i1
, i1
= i2
, i2
= temp
;
1868 added_links_insn
= 0;
1870 /* First check for one important special-case that the code below will
1871 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
1872 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1873 we may be able to replace that destination with the destination of I3.
1874 This occurs in the common code where we compute both a quotient and
1875 remainder into a structure, in which case we want to do the computation
1876 directly into the structure to avoid register-register copies.
1878 Note that this case handles both multiple sets in I2 and also
1879 cases where I2 has a number of CLOBBER or PARALLELs.
1881 We make very conservative checks below and only try to handle the
1882 most common cases of this. For example, we only handle the case
1883 where I2 and I3 are adjacent to avoid making difficult register
1886 if (i1
== 0 && NONJUMP_INSN_P (i3
) && GET_CODE (PATTERN (i3
)) == SET
1887 && REG_P (SET_SRC (PATTERN (i3
)))
1888 && REGNO (SET_SRC (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
1889 && find_reg_note (i3
, REG_DEAD
, SET_SRC (PATTERN (i3
)))
1890 && GET_CODE (PATTERN (i2
)) == PARALLEL
1891 && ! side_effects_p (SET_DEST (PATTERN (i3
)))
1892 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1893 below would need to check what is inside (and reg_overlap_mentioned_p
1894 doesn't support those codes anyway). Don't allow those destinations;
1895 the resulting insn isn't likely to be recognized anyway. */
1896 && GET_CODE (SET_DEST (PATTERN (i3
))) != ZERO_EXTRACT
1897 && GET_CODE (SET_DEST (PATTERN (i3
))) != STRICT_LOW_PART
1898 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3
)),
1899 SET_DEST (PATTERN (i3
)))
1900 && next_real_insn (i2
) == i3
)
1902 rtx p2
= PATTERN (i2
);
1904 /* Make sure that the destination of I3,
1905 which we are going to substitute into one output of I2,
1906 is not used within another output of I2. We must avoid making this:
1907 (parallel [(set (mem (reg 69)) ...)
1908 (set (reg 69) ...)])
1909 which is not well-defined as to order of actions.
1910 (Besides, reload can't handle output reloads for this.)
1912 The problem can also happen if the dest of I3 is a memory ref,
1913 if another dest in I2 is an indirect memory ref. */
1914 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
1915 if ((GET_CODE (XVECEXP (p2
, 0, i
)) == SET
1916 || GET_CODE (XVECEXP (p2
, 0, i
)) == CLOBBER
)
1917 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3
)),
1918 SET_DEST (XVECEXP (p2
, 0, i
))))
1921 if (i
== XVECLEN (p2
, 0))
1922 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
1923 if ((GET_CODE (XVECEXP (p2
, 0, i
)) == SET
1924 || GET_CODE (XVECEXP (p2
, 0, i
)) == CLOBBER
)
1925 && SET_DEST (XVECEXP (p2
, 0, i
)) == SET_SRC (PATTERN (i3
)))
1930 subst_low_cuid
= INSN_CUID (i2
);
1932 added_sets_2
= added_sets_1
= 0;
1933 i2dest
= SET_SRC (PATTERN (i3
));
1934 i2dest_killed
= dead_or_set_p (i2
, i2dest
);
1936 /* Replace the dest in I2 with our dest and make the resulting
1937 insn the new pattern for I3. Then skip to where we
1938 validate the pattern. Everything was set up above. */
1939 SUBST (SET_DEST (XVECEXP (p2
, 0, i
)),
1940 SET_DEST (PATTERN (i3
)));
1943 i3_subst_into_i2
= 1;
1944 goto validate_replacement
;
1948 /* If I2 is setting a pseudo to a constant and I3 is setting some
1949 sub-part of it to another constant, merge them by making a new
1952 && (temp
= single_set (i2
)) != 0
1953 && (GET_CODE (SET_SRC (temp
)) == CONST_INT
1954 || GET_CODE (SET_SRC (temp
)) == CONST_DOUBLE
)
1955 && GET_CODE (PATTERN (i3
)) == SET
1956 && (GET_CODE (SET_SRC (PATTERN (i3
))) == CONST_INT
1957 || GET_CODE (SET_SRC (PATTERN (i3
))) == CONST_DOUBLE
)
1958 && reg_subword_p (SET_DEST (PATTERN (i3
)), SET_DEST (temp
)))
1960 rtx dest
= SET_DEST (PATTERN (i3
));
1964 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1966 if (GET_CODE (XEXP (dest
, 1)) == CONST_INT
1967 && GET_CODE (XEXP (dest
, 2)) == CONST_INT
)
1969 width
= INTVAL (XEXP (dest
, 1));
1970 offset
= INTVAL (XEXP (dest
, 2));
1971 dest
= XEXP (dest
, 0);
1972 if (BITS_BIG_ENDIAN
)
1973 offset
= GET_MODE_BITSIZE (GET_MODE (dest
)) - width
- offset
;
1978 if (GET_CODE (dest
) == STRICT_LOW_PART
)
1979 dest
= XEXP (dest
, 0);
1980 width
= GET_MODE_BITSIZE (GET_MODE (dest
));
1986 /* If this is the low part, we're done. */
1987 if (subreg_lowpart_p (dest
))
1989 /* Handle the case where inner is twice the size of outer. */
1990 else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp
)))
1991 == 2 * GET_MODE_BITSIZE (GET_MODE (dest
)))
1992 offset
+= GET_MODE_BITSIZE (GET_MODE (dest
));
1993 /* Otherwise give up for now. */
2000 HOST_WIDE_INT mhi
, ohi
, ihi
;
2001 HOST_WIDE_INT mlo
, olo
, ilo
;
2002 rtx inner
= SET_SRC (PATTERN (i3
));
2003 rtx outer
= SET_SRC (temp
);
2005 if (GET_CODE (outer
) == CONST_INT
)
2007 olo
= INTVAL (outer
);
2008 ohi
= olo
< 0 ? -1 : 0;
2012 olo
= CONST_DOUBLE_LOW (outer
);
2013 ohi
= CONST_DOUBLE_HIGH (outer
);
2016 if (GET_CODE (inner
) == CONST_INT
)
2018 ilo
= INTVAL (inner
);
2019 ihi
= ilo
< 0 ? -1 : 0;
2023 ilo
= CONST_DOUBLE_LOW (inner
);
2024 ihi
= CONST_DOUBLE_HIGH (inner
);
2027 if (width
< HOST_BITS_PER_WIDE_INT
)
2029 mlo
= ((unsigned HOST_WIDE_INT
) 1 << width
) - 1;
2032 else if (width
< HOST_BITS_PER_WIDE_INT
* 2)
2034 mhi
= ((unsigned HOST_WIDE_INT
) 1
2035 << (width
- HOST_BITS_PER_WIDE_INT
)) - 1;
2047 if (offset
>= HOST_BITS_PER_WIDE_INT
)
2049 mhi
= mlo
<< (offset
- HOST_BITS_PER_WIDE_INT
);
2051 ihi
= ilo
<< (offset
- HOST_BITS_PER_WIDE_INT
);
2054 else if (offset
> 0)
2056 mhi
= (mhi
<< offset
) | ((unsigned HOST_WIDE_INT
) mlo
2057 >> (HOST_BITS_PER_WIDE_INT
- offset
));
2058 mlo
= mlo
<< offset
;
2059 ihi
= (ihi
<< offset
) | ((unsigned HOST_WIDE_INT
) ilo
2060 >> (HOST_BITS_PER_WIDE_INT
- offset
));
2061 ilo
= ilo
<< offset
;
2064 olo
= (olo
& ~mlo
) | ilo
;
2065 ohi
= (ohi
& ~mhi
) | ihi
;
2069 subst_low_cuid
= INSN_CUID (i2
);
2070 added_sets_2
= added_sets_1
= 0;
2071 i2dest
= SET_DEST (temp
);
2072 i2dest_killed
= dead_or_set_p (i2
, i2dest
);
2074 SUBST (SET_SRC (temp
),
2075 immed_double_const (olo
, ohi
, GET_MODE (SET_DEST (temp
))));
2077 newpat
= PATTERN (i2
);
2078 goto validate_replacement
;
2083 /* If we have no I1 and I2 looks like:
2084 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2086 make up a dummy I1 that is
2089 (set (reg:CC X) (compare:CC Y (const_int 0)))
2091 (We can ignore any trailing CLOBBERs.)
2093 This undoes a previous combination and allows us to match a branch-and-
2096 if (i1
== 0 && GET_CODE (PATTERN (i2
)) == PARALLEL
2097 && XVECLEN (PATTERN (i2
), 0) >= 2
2098 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 0)) == SET
2099 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 0))))
2101 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0))) == COMPARE
2102 && XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 1) == const0_rtx
2103 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 1)) == SET
2104 && REG_P (SET_DEST (XVECEXP (PATTERN (i2
), 0, 1)))
2105 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 0),
2106 SET_SRC (XVECEXP (PATTERN (i2
), 0, 1))))
2108 for (i
= XVECLEN (PATTERN (i2
), 0) - 1; i
>= 2; i
--)
2109 if (GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) != CLOBBER
)
2114 /* We make I1 with the same INSN_UID as I2. This gives it
2115 the same INSN_CUID for value tracking. Our fake I1 will
2116 never appear in the insn stream so giving it the same INSN_UID
2117 as I2 will not cause a problem. */
2119 i1
= gen_rtx_INSN (VOIDmode
, INSN_UID (i2
), NULL_RTX
, i2
,
2120 BLOCK_FOR_INSN (i2
), INSN_LOCATOR (i2
),
2121 XVECEXP (PATTERN (i2
), 0, 1), -1, NULL_RTX
,
2124 SUBST (PATTERN (i2
), XVECEXP (PATTERN (i2
), 0, 0));
2125 SUBST (XEXP (SET_SRC (PATTERN (i2
)), 0),
2126 SET_DEST (PATTERN (i1
)));
2131 /* Verify that I2 and I1 are valid for combining. */
2132 if (! can_combine_p (i2
, i3
, i1
, NULL_RTX
, &i2dest
, &i2src
)
2133 || (i1
&& ! can_combine_p (i1
, i3
, NULL_RTX
, i2
, &i1dest
, &i1src
)))
2139 /* Record whether I2DEST is used in I2SRC and similarly for the other
2140 cases. Knowing this will help in register status updating below. */
2141 i2dest_in_i2src
= reg_overlap_mentioned_p (i2dest
, i2src
);
2142 i1dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i1dest
, i1src
);
2143 i2dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i2dest
, i1src
);
2144 i2dest_killed
= dead_or_set_p (i2
, i2dest
);
2145 i1dest_killed
= i1
&& dead_or_set_p (i1
, i1dest
);
2147 /* See if I1 directly feeds into I3. It does if I1DEST is not used
2149 i1_feeds_i3
= i1
&& ! reg_overlap_mentioned_p (i1dest
, i2src
);
2151 /* Ensure that I3's pattern can be the destination of combines. */
2152 if (! combinable_i3pat (i3
, &PATTERN (i3
), i2dest
, i1dest
,
2153 i1
&& i2dest_in_i1src
&& i1_feeds_i3
,
2160 /* See if any of the insns is a MULT operation. Unless one is, we will
2161 reject a combination that is, since it must be slower. Be conservative
2163 if (GET_CODE (i2src
) == MULT
2164 || (i1
!= 0 && GET_CODE (i1src
) == MULT
)
2165 || (GET_CODE (PATTERN (i3
)) == SET
2166 && GET_CODE (SET_SRC (PATTERN (i3
))) == MULT
))
2169 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2170 We used to do this EXCEPT in one case: I3 has a post-inc in an
2171 output operand. However, that exception can give rise to insns like
2173 which is a famous insn on the PDP-11 where the value of r3 used as the
2174 source was model-dependent. Avoid this sort of thing. */
2177 if (!(GET_CODE (PATTERN (i3
)) == SET
2178 && REG_P (SET_SRC (PATTERN (i3
)))
2179 && MEM_P (SET_DEST (PATTERN (i3
)))
2180 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_INC
2181 || GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_DEC
)))
2182 /* It's not the exception. */
2185 for (link
= REG_NOTES (i3
); link
; link
= XEXP (link
, 1))
2186 if (REG_NOTE_KIND (link
) == REG_INC
2187 && (reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i2
))
2189 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i1
)))))
2196 /* See if the SETs in I1 or I2 need to be kept around in the merged
2197 instruction: whenever the value set there is still needed past I3.
2198 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2200 For the SET in I1, we have two cases: If I1 and I2 independently
2201 feed into I3, the set in I1 needs to be kept around if I1DEST dies
2202 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
2203 in I1 needs to be kept around unless I1DEST dies or is set in either
2204 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
2205 I1DEST. If so, we know I1 feeds into I2. */
2207 added_sets_2
= ! dead_or_set_p (i3
, i2dest
);
2210 = i1
&& ! (i1_feeds_i3
? dead_or_set_p (i3
, i1dest
)
2211 : (dead_or_set_p (i3
, i1dest
) || dead_or_set_p (i2
, i1dest
)));
2213 /* If the set in I2 needs to be kept around, we must make a copy of
2214 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2215 PATTERN (I2), we are only substituting for the original I1DEST, not into
2216 an already-substituted copy. This also prevents making self-referential
2217 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2220 i2pat
= (GET_CODE (PATTERN (i2
)) == PARALLEL
2221 ? gen_rtx_SET (VOIDmode
, i2dest
, i2src
)
2225 i2pat
= copy_rtx (i2pat
);
2229 /* Substitute in the latest insn for the regs set by the earlier ones. */
2231 maxreg
= max_reg_num ();
2236 /* Many machines that don't use CC0 have insns that can both perform an
2237 arithmetic operation and set the condition code. These operations will
2238 be represented as a PARALLEL with the first element of the vector
2239 being a COMPARE of an arithmetic operation with the constant zero.
2240 The second element of the vector will set some pseudo to the result
2241 of the same arithmetic operation. If we simplify the COMPARE, we won't
2242 match such a pattern and so will generate an extra insn. Here we test
2243 for this case, where both the comparison and the operation result are
2244 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2245 I2SRC. Later we will make the PARALLEL that contains I2. */
2247 if (i1
== 0 && added_sets_2
&& GET_CODE (PATTERN (i3
)) == SET
2248 && GET_CODE (SET_SRC (PATTERN (i3
))) == COMPARE
2249 && XEXP (SET_SRC (PATTERN (i3
)), 1) == const0_rtx
2250 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3
)), 0), i2dest
))
2252 #ifdef SELECT_CC_MODE
2254 enum machine_mode compare_mode
;
2257 newpat
= PATTERN (i3
);
2258 SUBST (XEXP (SET_SRC (newpat
), 0), i2src
);
2262 #ifdef SELECT_CC_MODE
2263 /* See if a COMPARE with the operand we substituted in should be done
2264 with the mode that is currently being used. If not, do the same
2265 processing we do in `subst' for a SET; namely, if the destination
2266 is used only once, try to replace it with a register of the proper
2267 mode and also replace the COMPARE. */
2268 if (undobuf
.other_insn
== 0
2269 && (cc_use
= find_single_use (SET_DEST (newpat
), i3
,
2270 &undobuf
.other_insn
))
2271 && ((compare_mode
= SELECT_CC_MODE (GET_CODE (*cc_use
),
2273 != GET_MODE (SET_DEST (newpat
))))
2275 if (can_change_dest_mode(SET_DEST (newpat
), added_sets_2
,
2278 unsigned int regno
= REGNO (SET_DEST (newpat
));
2281 if (regno
< FIRST_PSEUDO_REGISTER
)
2282 new_dest
= gen_rtx_REG (compare_mode
, regno
);
2285 SUBST_MODE (regno_reg_rtx
[regno
], compare_mode
);
2286 new_dest
= regno_reg_rtx
[regno
];
2289 SUBST (SET_DEST (newpat
), new_dest
);
2290 SUBST (XEXP (*cc_use
, 0), new_dest
);
2291 SUBST (SET_SRC (newpat
),
2292 gen_rtx_COMPARE (compare_mode
, i2src
, const0_rtx
));
2295 undobuf
.other_insn
= 0;
2302 /* It is possible that the source of I2 or I1 may be performing
2303 an unneeded operation, such as a ZERO_EXTEND of something
2304 that is known to have the high part zero. Handle that case
2305 by letting subst look at the innermost one of them.
2307 Another way to do this would be to have a function that tries
2308 to simplify a single insn instead of merging two or more
2309 insns. We don't do this because of the potential of infinite
2310 loops and because of the potential extra memory required.
2311 However, doing it the way we are is a bit of a kludge and
2312 doesn't catch all cases.
2314 But only do this if -fexpensive-optimizations since it slows
2315 things down and doesn't usually win.
2317 This is not done in the COMPARE case above because the
2318 unmodified I2PAT is used in the PARALLEL and so a pattern
2319 with a modified I2SRC would not match. */
2321 if (flag_expensive_optimizations
)
2323 /* Pass pc_rtx so no substitutions are done, just
2327 subst_low_cuid
= INSN_CUID (i1
);
2328 i1src
= subst (i1src
, pc_rtx
, pc_rtx
, 0, 0);
2332 subst_low_cuid
= INSN_CUID (i2
);
2333 i2src
= subst (i2src
, pc_rtx
, pc_rtx
, 0, 0);
2337 n_occurrences
= 0; /* `subst' counts here */
2339 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2340 need to make a unique copy of I2SRC each time we substitute it
2341 to avoid self-referential rtl. */
2343 subst_low_cuid
= INSN_CUID (i2
);
2344 newpat
= subst (PATTERN (i3
), i2dest
, i2src
, 0,
2345 ! i1_feeds_i3
&& i1dest_in_i1src
);
2348 /* Record whether i2's body now appears within i3's body. */
2349 i2_is_used
= n_occurrences
;
2352 /* If we already got a failure, don't try to do more. Otherwise,
2353 try to substitute in I1 if we have it. */
2355 if (i1
&& GET_CODE (newpat
) != CLOBBER
)
2357 /* Before we can do this substitution, we must redo the test done
2358 above (see detailed comments there) that ensures that I1DEST
2359 isn't mentioned in any SETs in NEWPAT that are field assignments. */
2361 if (! combinable_i3pat (NULL_RTX
, &newpat
, i1dest
, NULL_RTX
,
2369 subst_low_cuid
= INSN_CUID (i1
);
2370 newpat
= subst (newpat
, i1dest
, i1src
, 0, 0);
2374 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2375 to count all the ways that I2SRC and I1SRC can be used. */
2376 if ((FIND_REG_INC_NOTE (i2
, NULL_RTX
) != 0
2377 && i2_is_used
+ added_sets_2
> 1)
2378 || (i1
!= 0 && FIND_REG_INC_NOTE (i1
, NULL_RTX
) != 0
2379 && (n_occurrences
+ added_sets_1
+ (added_sets_2
&& ! i1_feeds_i3
)
2381 /* Fail if we tried to make a new register. */
2382 || max_reg_num () != maxreg
2383 /* Fail if we couldn't do something and have a CLOBBER. */
2384 || GET_CODE (newpat
) == CLOBBER
2385 /* Fail if this new pattern is a MULT and we didn't have one before
2386 at the outer level. */
2387 || (GET_CODE (newpat
) == SET
&& GET_CODE (SET_SRC (newpat
)) == MULT
2394 /* If the actions of the earlier insns must be kept
2395 in addition to substituting them into the latest one,
2396 we must make a new PARALLEL for the latest insn
2397 to hold additional the SETs. */
2399 if (added_sets_1
|| added_sets_2
)
2403 if (GET_CODE (newpat
) == PARALLEL
)
2405 rtvec old
= XVEC (newpat
, 0);
2406 total_sets
= XVECLEN (newpat
, 0) + added_sets_1
+ added_sets_2
;
2407 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_sets
));
2408 memcpy (XVEC (newpat
, 0)->elem
, &old
->elem
[0],
2409 sizeof (old
->elem
[0]) * old
->num_elem
);
2414 total_sets
= 1 + added_sets_1
+ added_sets_2
;
2415 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_sets
));
2416 XVECEXP (newpat
, 0, 0) = old
;
2420 XVECEXP (newpat
, 0, --total_sets
)
2421 = (GET_CODE (PATTERN (i1
)) == PARALLEL
2422 ? gen_rtx_SET (VOIDmode
, i1dest
, i1src
) : PATTERN (i1
));
2426 /* If there is no I1, use I2's body as is. We used to also not do
2427 the subst call below if I2 was substituted into I3,
2428 but that could lose a simplification. */
2430 XVECEXP (newpat
, 0, --total_sets
) = i2pat
;
2432 /* See comment where i2pat is assigned. */
2433 XVECEXP (newpat
, 0, --total_sets
)
2434 = subst (i2pat
, i1dest
, i1src
, 0, 0);
2438 /* We come here when we are replacing a destination in I2 with the
2439 destination of I3. */
2440 validate_replacement
:
2442 /* Note which hard regs this insn has as inputs. */
2443 mark_used_regs_combine (newpat
);
2445 /* If recog_for_combine fails, it strips existing clobbers. If we'll
2446 consider splitting this pattern, we might need these clobbers. */
2447 if (i1
&& GET_CODE (newpat
) == PARALLEL
2448 && GET_CODE (XVECEXP (newpat
, 0, XVECLEN (newpat
, 0) - 1)) == CLOBBER
)
2450 int len
= XVECLEN (newpat
, 0);
2452 newpat_vec_with_clobbers
= rtvec_alloc (len
);
2453 for (i
= 0; i
< len
; i
++)
2454 RTVEC_ELT (newpat_vec_with_clobbers
, i
) = XVECEXP (newpat
, 0, i
);
2457 /* Is the result of combination a valid instruction? */
2458 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2460 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2461 the second SET's destination is a register that is unused and isn't
2462 marked as an instruction that might trap in an EH region. In that case,
2463 we just need the first SET. This can occur when simplifying a divmod
2464 insn. We *must* test for this case here because the code below that
2465 splits two independent SETs doesn't handle this case correctly when it
2466 updates the register status.
2468 It's pointless doing this if we originally had two sets, one from
2469 i3, and one from i2. Combining then splitting the parallel results
2470 in the original i2 again plus an invalid insn (which we delete).
2471 The net effect is only to move instructions around, which makes
2472 debug info less accurate.
2474 Also check the case where the first SET's destination is unused.
2475 That would not cause incorrect code, but does cause an unneeded
2478 if (insn_code_number
< 0
2479 && !(added_sets_2
&& i1
== 0)
2480 && GET_CODE (newpat
) == PARALLEL
2481 && XVECLEN (newpat
, 0) == 2
2482 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
2483 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
2484 && asm_noperands (newpat
) < 0)
2486 rtx set0
= XVECEXP (newpat
, 0, 0);
2487 rtx set1
= XVECEXP (newpat
, 0, 1);
2490 if (((REG_P (SET_DEST (set1
))
2491 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (set1
)))
2492 || (GET_CODE (SET_DEST (set1
)) == SUBREG
2493 && find_reg_note (i3
, REG_UNUSED
, SUBREG_REG (SET_DEST (set1
)))))
2494 && (!(note
= find_reg_note (i3
, REG_EH_REGION
, NULL_RTX
))
2495 || INTVAL (XEXP (note
, 0)) <= 0)
2496 && ! side_effects_p (SET_SRC (set1
)))
2499 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2502 else if (((REG_P (SET_DEST (set0
))
2503 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (set0
)))
2504 || (GET_CODE (SET_DEST (set0
)) == SUBREG
2505 && find_reg_note (i3
, REG_UNUSED
,
2506 SUBREG_REG (SET_DEST (set0
)))))
2507 && (!(note
= find_reg_note (i3
, REG_EH_REGION
, NULL_RTX
))
2508 || INTVAL (XEXP (note
, 0)) <= 0)
2509 && ! side_effects_p (SET_SRC (set0
)))
2512 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2514 if (insn_code_number
>= 0)
2516 /* If we will be able to accept this, we have made a
2517 change to the destination of I3. This requires us to
2518 do a few adjustments. */
2520 PATTERN (i3
) = newpat
;
2521 adjust_for_new_dest (i3
);
2526 /* If we were combining three insns and the result is a simple SET
2527 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2528 insns. There are two ways to do this. It can be split using a
2529 machine-specific method (like when you have an addition of a large
2530 constant) or by combine in the function find_split_point. */
2532 if (i1
&& insn_code_number
< 0 && GET_CODE (newpat
) == SET
2533 && asm_noperands (newpat
) < 0)
2535 rtx m_split
, *split
;
2537 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2538 use I2DEST as a scratch register will help. In the latter case,
2539 convert I2DEST to the mode of the source of NEWPAT if we can. */
2541 m_split
= split_insns (newpat
, i3
);
2543 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2544 inputs of NEWPAT. */
2546 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2547 possible to try that as a scratch reg. This would require adding
2548 more code to make it work though. */
2550 if (m_split
== 0 && ! reg_overlap_mentioned_p (i2dest
, newpat
))
2552 enum machine_mode new_mode
= GET_MODE (SET_DEST (newpat
));
2554 /* First try to split using the original register as a
2555 scratch register. */
2556 m_split
= split_insns (gen_rtx_PARALLEL
2558 gen_rtvec (2, newpat
,
2559 gen_rtx_CLOBBER (VOIDmode
,
2563 /* If that didn't work, try changing the mode of I2DEST if
2566 && new_mode
!= GET_MODE (i2dest
)
2567 && new_mode
!= VOIDmode
2568 && can_change_dest_mode (i2dest
, added_sets_2
, new_mode
))
2570 enum machine_mode old_mode
= GET_MODE (i2dest
);
2573 if (REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
)
2574 ni2dest
= gen_rtx_REG (new_mode
, REGNO (i2dest
));
2577 SUBST_MODE (regno_reg_rtx
[REGNO (i2dest
)], new_mode
);
2578 ni2dest
= regno_reg_rtx
[REGNO (i2dest
)];
2581 m_split
= split_insns (gen_rtx_PARALLEL
2583 gen_rtvec (2, newpat
,
2584 gen_rtx_CLOBBER (VOIDmode
,
2589 && REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
2593 PUT_MODE (regno_reg_rtx
[REGNO (i2dest
)], old_mode
);
2594 buf
= undobuf
.undos
;
2595 undobuf
.undos
= buf
->next
;
2596 buf
->next
= undobuf
.frees
;
2597 undobuf
.frees
= buf
;
2602 /* If recog_for_combine has discarded clobbers, try to use them
2603 again for the split. */
2604 if (m_split
== 0 && newpat_vec_with_clobbers
)
2606 = split_insns (gen_rtx_PARALLEL (VOIDmode
,
2607 newpat_vec_with_clobbers
), i3
);
2609 if (m_split
&& NEXT_INSN (m_split
) == NULL_RTX
)
2611 m_split
= PATTERN (m_split
);
2612 insn_code_number
= recog_for_combine (&m_split
, i3
, &new_i3_notes
);
2613 if (insn_code_number
>= 0)
2616 else if (m_split
&& NEXT_INSN (NEXT_INSN (m_split
)) == NULL_RTX
2617 && (next_real_insn (i2
) == i3
2618 || ! use_crosses_set_p (PATTERN (m_split
), INSN_CUID (i2
))))
2621 rtx newi3pat
= PATTERN (NEXT_INSN (m_split
));
2622 newi2pat
= PATTERN (m_split
);
2624 i3set
= single_set (NEXT_INSN (m_split
));
2625 i2set
= single_set (m_split
);
2627 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
2629 /* If I2 or I3 has multiple SETs, we won't know how to track
2630 register status, so don't use these insns. If I2's destination
2631 is used between I2 and I3, we also can't use these insns. */
2633 if (i2_code_number
>= 0 && i2set
&& i3set
2634 && (next_real_insn (i2
) == i3
2635 || ! reg_used_between_p (SET_DEST (i2set
), i2
, i3
)))
2636 insn_code_number
= recog_for_combine (&newi3pat
, i3
,
2638 if (insn_code_number
>= 0)
2641 /* It is possible that both insns now set the destination of I3.
2642 If so, we must show an extra use of it. */
2644 if (insn_code_number
>= 0)
2646 rtx new_i3_dest
= SET_DEST (i3set
);
2647 rtx new_i2_dest
= SET_DEST (i2set
);
2649 while (GET_CODE (new_i3_dest
) == ZERO_EXTRACT
2650 || GET_CODE (new_i3_dest
) == STRICT_LOW_PART
2651 || GET_CODE (new_i3_dest
) == SUBREG
)
2652 new_i3_dest
= XEXP (new_i3_dest
, 0);
2654 while (GET_CODE (new_i2_dest
) == ZERO_EXTRACT
2655 || GET_CODE (new_i2_dest
) == STRICT_LOW_PART
2656 || GET_CODE (new_i2_dest
) == SUBREG
)
2657 new_i2_dest
= XEXP (new_i2_dest
, 0);
2659 if (REG_P (new_i3_dest
)
2660 && REG_P (new_i2_dest
)
2661 && REGNO (new_i3_dest
) == REGNO (new_i2_dest
))
2662 REG_N_SETS (REGNO (new_i2_dest
))++;
2666 /* If we can split it and use I2DEST, go ahead and see if that
2667 helps things be recognized. Verify that none of the registers
2668 are set between I2 and I3. */
2669 if (insn_code_number
< 0 && (split
= find_split_point (&newpat
, i3
)) != 0
2673 /* We need I2DEST in the proper mode. If it is a hard register
2674 or the only use of a pseudo, we can change its mode.
2675 Make sure we don't change a hard register to have a mode that
2676 isn't valid for it, or change the number of registers. */
2677 && (GET_MODE (*split
) == GET_MODE (i2dest
)
2678 || GET_MODE (*split
) == VOIDmode
2679 || can_change_dest_mode (i2dest
, added_sets_2
,
2681 && (next_real_insn (i2
) == i3
2682 || ! use_crosses_set_p (*split
, INSN_CUID (i2
)))
2683 /* We can't overwrite I2DEST if its value is still used by
2685 && ! reg_referenced_p (i2dest
, newpat
))
2687 rtx newdest
= i2dest
;
2688 enum rtx_code split_code
= GET_CODE (*split
);
2689 enum machine_mode split_mode
= GET_MODE (*split
);
2690 bool subst_done
= false;
2691 newi2pat
= NULL_RTX
;
2693 /* Get NEWDEST as a register in the proper mode. We have already
2694 validated that we can do this. */
2695 if (GET_MODE (i2dest
) != split_mode
&& split_mode
!= VOIDmode
)
2697 if (REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
)
2698 newdest
= gen_rtx_REG (split_mode
, REGNO (i2dest
));
2701 SUBST_MODE (regno_reg_rtx
[REGNO (i2dest
)], split_mode
);
2702 newdest
= regno_reg_rtx
[REGNO (i2dest
)];
2706 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2707 an ASHIFT. This can occur if it was inside a PLUS and hence
2708 appeared to be a memory address. This is a kludge. */
2709 if (split_code
== MULT
2710 && GET_CODE (XEXP (*split
, 1)) == CONST_INT
2711 && INTVAL (XEXP (*split
, 1)) > 0
2712 && (i
= exact_log2 (INTVAL (XEXP (*split
, 1)))) >= 0)
2714 SUBST (*split
, gen_rtx_ASHIFT (split_mode
,
2715 XEXP (*split
, 0), GEN_INT (i
)));
2716 /* Update split_code because we may not have a multiply
2718 split_code
= GET_CODE (*split
);
2721 #ifdef INSN_SCHEDULING
2722 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2723 be written as a ZERO_EXTEND. */
2724 if (split_code
== SUBREG
&& MEM_P (SUBREG_REG (*split
)))
2726 #ifdef LOAD_EXTEND_OP
2727 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2728 what it really is. */
2729 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split
)))
2731 SUBST (*split
, gen_rtx_SIGN_EXTEND (split_mode
,
2732 SUBREG_REG (*split
)));
2735 SUBST (*split
, gen_rtx_ZERO_EXTEND (split_mode
,
2736 SUBREG_REG (*split
)));
2740 /* Attempt to split binary operators using arithmetic identities. */
2741 if (BINARY_P (SET_SRC (newpat
))
2742 && split_mode
== GET_MODE (SET_SRC (newpat
))
2743 && ! side_effects_p (SET_SRC (newpat
)))
2745 rtx setsrc
= SET_SRC (newpat
);
2746 enum machine_mode mode
= GET_MODE (setsrc
);
2747 enum rtx_code code
= GET_CODE (setsrc
);
2748 rtx src_op0
= XEXP (setsrc
, 0);
2749 rtx src_op1
= XEXP (setsrc
, 1);
2751 /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
2752 if (rtx_equal_p (src_op0
, src_op1
))
2754 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
, src_op0
);
2755 SUBST (XEXP (setsrc
, 0), newdest
);
2756 SUBST (XEXP (setsrc
, 1), newdest
);
2759 /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
2760 else if ((code
== PLUS
|| code
== MULT
)
2761 && GET_CODE (src_op0
) == code
2762 && GET_CODE (XEXP (src_op0
, 0)) == code
2763 && (INTEGRAL_MODE_P (mode
)
2764 || (FLOAT_MODE_P (mode
)
2765 && flag_unsafe_math_optimizations
)))
2767 rtx p
= XEXP (XEXP (src_op0
, 0), 0);
2768 rtx q
= XEXP (XEXP (src_op0
, 0), 1);
2769 rtx r
= XEXP (src_op0
, 1);
2772 /* Split both "((X op Y) op X) op Y" and
2773 "((X op Y) op Y) op X" as "T op T" where T is
2775 if ((rtx_equal_p (p
,r
) && rtx_equal_p (q
,s
))
2776 || (rtx_equal_p (p
,s
) && rtx_equal_p (q
,r
)))
2778 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
,
2780 SUBST (XEXP (setsrc
, 0), newdest
);
2781 SUBST (XEXP (setsrc
, 1), newdest
);
2784 /* Split "((X op X) op Y) op Y)" as "T op T" where
2786 else if (rtx_equal_p (p
,q
) && rtx_equal_p (r
,s
))
2788 rtx tmp
= simplify_gen_binary (code
, mode
, p
, r
);
2789 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
, tmp
);
2790 SUBST (XEXP (setsrc
, 0), newdest
);
2791 SUBST (XEXP (setsrc
, 1), newdest
);
2799 newi2pat
= gen_rtx_SET (VOIDmode
, newdest
, *split
);
2800 SUBST (*split
, newdest
);
2803 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
2805 /* recog_for_combine might have added CLOBBERs to newi2pat.
2806 Make sure NEWPAT does not depend on the clobbered regs. */
2807 if (GET_CODE (newi2pat
) == PARALLEL
)
2808 for (i
= XVECLEN (newi2pat
, 0) - 1; i
>= 0; i
--)
2809 if (GET_CODE (XVECEXP (newi2pat
, 0, i
)) == CLOBBER
)
2811 rtx reg
= XEXP (XVECEXP (newi2pat
, 0, i
), 0);
2812 if (reg_overlap_mentioned_p (reg
, newpat
))
2819 /* If the split point was a MULT and we didn't have one before,
2820 don't use one now. */
2821 if (i2_code_number
>= 0 && ! (split_code
== MULT
&& ! have_mult
))
2822 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2826 /* Check for a case where we loaded from memory in a narrow mode and
2827 then sign extended it, but we need both registers. In that case,
2828 we have a PARALLEL with both loads from the same memory location.
2829 We can split this into a load from memory followed by a register-register
2830 copy. This saves at least one insn, more if register allocation can
2833 We cannot do this if the destination of the first assignment is a
2834 condition code register or cc0. We eliminate this case by making sure
2835 the SET_DEST and SET_SRC have the same mode.
2837 We cannot do this if the destination of the second assignment is
2838 a register that we have already assumed is zero-extended. Similarly
2839 for a SUBREG of such a register. */
2841 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
2842 && GET_CODE (newpat
) == PARALLEL
2843 && XVECLEN (newpat
, 0) == 2
2844 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
2845 && GET_CODE (SET_SRC (XVECEXP (newpat
, 0, 0))) == SIGN_EXTEND
2846 && (GET_MODE (SET_DEST (XVECEXP (newpat
, 0, 0)))
2847 == GET_MODE (SET_SRC (XVECEXP (newpat
, 0, 0))))
2848 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
2849 && rtx_equal_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
2850 XEXP (SET_SRC (XVECEXP (newpat
, 0, 0)), 0))
2851 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
2853 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
2854 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
2855 && ! (temp
= SET_DEST (XVECEXP (newpat
, 0, 1)),
2857 && reg_stat
[REGNO (temp
)].nonzero_bits
!= 0
2858 && GET_MODE_BITSIZE (GET_MODE (temp
)) < BITS_PER_WORD
2859 && GET_MODE_BITSIZE (GET_MODE (temp
)) < HOST_BITS_PER_INT
2860 && (reg_stat
[REGNO (temp
)].nonzero_bits
2861 != GET_MODE_MASK (word_mode
))))
2862 && ! (GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) == SUBREG
2863 && (temp
= SUBREG_REG (SET_DEST (XVECEXP (newpat
, 0, 1))),
2865 && reg_stat
[REGNO (temp
)].nonzero_bits
!= 0
2866 && GET_MODE_BITSIZE (GET_MODE (temp
)) < BITS_PER_WORD
2867 && GET_MODE_BITSIZE (GET_MODE (temp
)) < HOST_BITS_PER_INT
2868 && (reg_stat
[REGNO (temp
)].nonzero_bits
2869 != GET_MODE_MASK (word_mode
)))))
2870 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
2871 SET_SRC (XVECEXP (newpat
, 0, 1)))
2872 && ! find_reg_note (i3
, REG_UNUSED
,
2873 SET_DEST (XVECEXP (newpat
, 0, 0))))
2877 newi2pat
= XVECEXP (newpat
, 0, 0);
2878 ni2dest
= SET_DEST (XVECEXP (newpat
, 0, 0));
2879 newpat
= XVECEXP (newpat
, 0, 1);
2880 SUBST (SET_SRC (newpat
),
2881 gen_lowpart (GET_MODE (SET_SRC (newpat
)), ni2dest
));
2882 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
2884 if (i2_code_number
>= 0)
2885 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2887 if (insn_code_number
>= 0)
2891 /* Similarly, check for a case where we have a PARALLEL of two independent
2892 SETs but we started with three insns. In this case, we can do the sets
2893 as two separate insns. This case occurs when some SET allows two
2894 other insns to combine, but the destination of that SET is still live. */
2896 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
2897 && GET_CODE (newpat
) == PARALLEL
2898 && XVECLEN (newpat
, 0) == 2
2899 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
2900 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != ZERO_EXTRACT
2901 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != STRICT_LOW_PART
2902 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
2903 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
2904 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
2905 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
2907 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
2908 XVECEXP (newpat
, 0, 0))
2909 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 0)),
2910 XVECEXP (newpat
, 0, 1))
2911 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat
, 0, 0)))
2912 && contains_muldiv (SET_SRC (XVECEXP (newpat
, 0, 1))))
2914 /* We cannot split the parallel into two sets if both sets
2916 && ! (reg_referenced_p (cc0_rtx
, XVECEXP (newpat
, 0, 0))
2917 && reg_referenced_p (cc0_rtx
, XVECEXP (newpat
, 0, 1)))
2921 /* Normally, it doesn't matter which of the two is done first,
2922 but it does if one references cc0. In that case, it has to
2925 if (reg_referenced_p (cc0_rtx
, XVECEXP (newpat
, 0, 0)))
2927 newi2pat
= XVECEXP (newpat
, 0, 0);
2928 newpat
= XVECEXP (newpat
, 0, 1);
2933 newi2pat
= XVECEXP (newpat
, 0, 1);
2934 newpat
= XVECEXP (newpat
, 0, 0);
2937 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
2939 if (i2_code_number
>= 0)
2940 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
2943 /* If it still isn't recognized, fail and change things back the way they
2945 if ((insn_code_number
< 0
2946 /* Is the result a reasonable ASM_OPERANDS? */
2947 && (! check_asm_operands (newpat
) || added_sets_1
|| added_sets_2
)))
2953 /* If we had to change another insn, make sure it is valid also. */
2954 if (undobuf
.other_insn
)
2956 rtx other_pat
= PATTERN (undobuf
.other_insn
);
2957 rtx new_other_notes
;
2960 CLEAR_HARD_REG_SET (newpat_used_regs
);
2962 other_code_number
= recog_for_combine (&other_pat
, undobuf
.other_insn
,
2965 if (other_code_number
< 0 && ! check_asm_operands (other_pat
))
2971 PATTERN (undobuf
.other_insn
) = other_pat
;
2973 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2974 are still valid. Then add any non-duplicate notes added by
2975 recog_for_combine. */
2976 for (note
= REG_NOTES (undobuf
.other_insn
); note
; note
= next
)
2978 next
= XEXP (note
, 1);
2980 if (REG_NOTE_KIND (note
) == REG_UNUSED
2981 && ! reg_set_p (XEXP (note
, 0), PATTERN (undobuf
.other_insn
)))
2983 if (REG_P (XEXP (note
, 0)))
2984 REG_N_DEATHS (REGNO (XEXP (note
, 0)))--;
2986 remove_note (undobuf
.other_insn
, note
);
2990 for (note
= new_other_notes
; note
; note
= XEXP (note
, 1))
2991 if (REG_P (XEXP (note
, 0)))
2992 REG_N_DEATHS (REGNO (XEXP (note
, 0)))++;
2994 distribute_notes (new_other_notes
, undobuf
.other_insn
,
2995 undobuf
.other_insn
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
2998 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
2999 they are adjacent to each other or not. */
3001 rtx p
= prev_nonnote_insn (i3
);
3002 if (p
&& p
!= i2
&& NONJUMP_INSN_P (p
) && newi2pat
3003 && sets_cc0_p (newi2pat
))
3011 /* Only allow this combination if insn_rtx_costs reports that the
3012 replacement instructions are cheaper than the originals. */
3013 if (!combine_validate_cost (i1
, i2
, i3
, newpat
, newi2pat
))
3019 /* We now know that we can do this combination. Merge the insns and
3020 update the status of registers and LOG_LINKS. */
3028 /* I3 now uses what used to be its destination and which is now
3029 I2's destination. This requires us to do a few adjustments. */
3030 PATTERN (i3
) = newpat
;
3031 adjust_for_new_dest (i3
);
3033 /* We need a LOG_LINK from I3 to I2. But we used to have one,
3036 However, some later insn might be using I2's dest and have
3037 a LOG_LINK pointing at I3. We must remove this link.
3038 The simplest way to remove the link is to point it at I1,
3039 which we know will be a NOTE. */
3041 /* newi2pat is usually a SET here; however, recog_for_combine might
3042 have added some clobbers. */
3043 if (GET_CODE (newi2pat
) == PARALLEL
)
3044 ni2dest
= SET_DEST (XVECEXP (newi2pat
, 0, 0));
3046 ni2dest
= SET_DEST (newi2pat
);
3048 for (insn
= NEXT_INSN (i3
);
3049 insn
&& (this_basic_block
->next_bb
== EXIT_BLOCK_PTR
3050 || insn
!= BB_HEAD (this_basic_block
->next_bb
));
3051 insn
= NEXT_INSN (insn
))
3053 if (INSN_P (insn
) && reg_referenced_p (ni2dest
, PATTERN (insn
)))
3055 for (link
= LOG_LINKS (insn
); link
;
3056 link
= XEXP (link
, 1))
3057 if (XEXP (link
, 0) == i3
)
3058 XEXP (link
, 0) = i1
;
3066 rtx i3notes
, i2notes
, i1notes
= 0;
3067 rtx i3links
, i2links
, i1links
= 0;
3070 /* Compute which registers we expect to eliminate. newi2pat may be setting
3071 either i3dest or i2dest, so we must check it. Also, i1dest may be the
3072 same as i3dest, in which case newi2pat may be setting i1dest. */
3073 rtx elim_i2
= ((newi2pat
&& reg_set_p (i2dest
, newi2pat
))
3074 || i2dest_in_i2src
|| i2dest_in_i1src
3077 rtx elim_i1
= (i1
== 0 || i1dest_in_i1src
3078 || (newi2pat
&& reg_set_p (i1dest
, newi2pat
))
3082 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3084 i3notes
= REG_NOTES (i3
), i3links
= LOG_LINKS (i3
);
3085 i2notes
= REG_NOTES (i2
), i2links
= LOG_LINKS (i2
);
3087 i1notes
= REG_NOTES (i1
), i1links
= LOG_LINKS (i1
);
3089 /* Ensure that we do not have something that should not be shared but
3090 occurs multiple times in the new insns. Check this by first
3091 resetting all the `used' flags and then copying anything is shared. */
3093 reset_used_flags (i3notes
);
3094 reset_used_flags (i2notes
);
3095 reset_used_flags (i1notes
);
3096 reset_used_flags (newpat
);
3097 reset_used_flags (newi2pat
);
3098 if (undobuf
.other_insn
)
3099 reset_used_flags (PATTERN (undobuf
.other_insn
));
3101 i3notes
= copy_rtx_if_shared (i3notes
);
3102 i2notes
= copy_rtx_if_shared (i2notes
);
3103 i1notes
= copy_rtx_if_shared (i1notes
);
3104 newpat
= copy_rtx_if_shared (newpat
);
3105 newi2pat
= copy_rtx_if_shared (newi2pat
);
3106 if (undobuf
.other_insn
)
3107 reset_used_flags (PATTERN (undobuf
.other_insn
));
3109 INSN_CODE (i3
) = insn_code_number
;
3110 PATTERN (i3
) = newpat
;
3112 if (CALL_P (i3
) && CALL_INSN_FUNCTION_USAGE (i3
))
3114 rtx call_usage
= CALL_INSN_FUNCTION_USAGE (i3
);
3116 reset_used_flags (call_usage
);
3117 call_usage
= copy_rtx (call_usage
);
3120 replace_rtx (call_usage
, i2dest
, i2src
);
3123 replace_rtx (call_usage
, i1dest
, i1src
);
3125 CALL_INSN_FUNCTION_USAGE (i3
) = call_usage
;
3128 if (undobuf
.other_insn
)
3129 INSN_CODE (undobuf
.other_insn
) = other_code_number
;
3131 /* We had one special case above where I2 had more than one set and
3132 we replaced a destination of one of those sets with the destination
3133 of I3. In that case, we have to update LOG_LINKS of insns later
3134 in this basic block. Note that this (expensive) case is rare.
3136 Also, in this case, we must pretend that all REG_NOTEs for I2
3137 actually came from I3, so that REG_UNUSED notes from I2 will be
3138 properly handled. */
3140 if (i3_subst_into_i2
)
3142 for (i
= 0; i
< XVECLEN (PATTERN (i2
), 0); i
++)
3143 if ((GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) == SET
3144 || GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) == CLOBBER
)
3145 && REG_P (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)))
3146 && SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)) != i2dest
3147 && ! find_reg_note (i2
, REG_UNUSED
,
3148 SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))))
3149 for (temp
= NEXT_INSN (i2
);
3150 temp
&& (this_basic_block
->next_bb
== EXIT_BLOCK_PTR
3151 || BB_HEAD (this_basic_block
) != temp
);
3152 temp
= NEXT_INSN (temp
))
3153 if (temp
!= i3
&& INSN_P (temp
))
3154 for (link
= LOG_LINKS (temp
); link
; link
= XEXP (link
, 1))
3155 if (XEXP (link
, 0) == i2
)
3156 XEXP (link
, 0) = i3
;
3161 while (XEXP (link
, 1))
3162 link
= XEXP (link
, 1);
3163 XEXP (link
, 1) = i2notes
;
3177 INSN_CODE (i2
) = i2_code_number
;
3178 PATTERN (i2
) = newi2pat
;
3181 SET_INSN_DELETED (i2
);
3187 SET_INSN_DELETED (i1
);
3190 /* Get death notes for everything that is now used in either I3 or
3191 I2 and used to die in a previous insn. If we built two new
3192 patterns, move from I1 to I2 then I2 to I3 so that we get the
3193 proper movement on registers that I2 modifies. */
3197 move_deaths (newi2pat
, NULL_RTX
, INSN_CUID (i1
), i2
, &midnotes
);
3198 move_deaths (newpat
, newi2pat
, INSN_CUID (i1
), i3
, &midnotes
);
3201 move_deaths (newpat
, NULL_RTX
, i1
? INSN_CUID (i1
) : INSN_CUID (i2
),
3204 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
3206 distribute_notes (i3notes
, i3
, i3
, newi2pat
? i2
: NULL_RTX
,
3209 distribute_notes (i2notes
, i2
, i3
, newi2pat
? i2
: NULL_RTX
,
3212 distribute_notes (i1notes
, i1
, i3
, newi2pat
? i2
: NULL_RTX
,
3215 distribute_notes (midnotes
, NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3218 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
3219 know these are REG_UNUSED and want them to go to the desired insn,
3220 so we always pass it as i3. We have not counted the notes in
3221 reg_n_deaths yet, so we need to do so now. */
3223 if (newi2pat
&& new_i2_notes
)
3225 for (temp
= new_i2_notes
; temp
; temp
= XEXP (temp
, 1))
3226 if (REG_P (XEXP (temp
, 0)))
3227 REG_N_DEATHS (REGNO (XEXP (temp
, 0)))++;
3229 distribute_notes (new_i2_notes
, i2
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3234 for (temp
= new_i3_notes
; temp
; temp
= XEXP (temp
, 1))
3235 if (REG_P (XEXP (temp
, 0)))
3236 REG_N_DEATHS (REGNO (XEXP (temp
, 0)))++;
3238 distribute_notes (new_i3_notes
, i3
, i3
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3241 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
3242 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
3243 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
3244 in that case, it might delete I2. Similarly for I2 and I1.
3245 Show an additional death due to the REG_DEAD note we make here. If
3246 we discard it in distribute_notes, we will decrement it again. */
3250 if (REG_P (i3dest_killed
))
3251 REG_N_DEATHS (REGNO (i3dest_killed
))++;
3253 if (newi2pat
&& reg_set_p (i3dest_killed
, newi2pat
))
3254 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i3dest_killed
,
3256 NULL_RTX
, i2
, NULL_RTX
, elim_i2
, elim_i1
);
3258 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i3dest_killed
,
3260 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3264 if (i2dest_in_i2src
)
3267 REG_N_DEATHS (REGNO (i2dest
))++;
3269 if (newi2pat
&& reg_set_p (i2dest
, newi2pat
))
3270 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i2dest
, NULL_RTX
),
3271 NULL_RTX
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3273 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i2dest
, NULL_RTX
),
3274 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3275 NULL_RTX
, NULL_RTX
);
3278 if (i1dest_in_i1src
)
3281 REG_N_DEATHS (REGNO (i1dest
))++;
3283 if (newi2pat
&& reg_set_p (i1dest
, newi2pat
))
3284 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i1dest
, NULL_RTX
),
3285 NULL_RTX
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
3287 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD
, i1dest
, NULL_RTX
),
3288 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
3289 NULL_RTX
, NULL_RTX
);
3292 distribute_links (i3links
);
3293 distribute_links (i2links
);
3294 distribute_links (i1links
);
3299 rtx i2_insn
= 0, i2_val
= 0, set
;
3301 /* The insn that used to set this register doesn't exist, and
3302 this life of the register may not exist either. See if one of
3303 I3's links points to an insn that sets I2DEST. If it does,
3304 that is now the last known value for I2DEST. If we don't update
3305 this and I2 set the register to a value that depended on its old
3306 contents, we will get confused. If this insn is used, thing
3307 will be set correctly in combine_instructions. */
3309 for (link
= LOG_LINKS (i3
); link
; link
= XEXP (link
, 1))
3310 if ((set
= single_set (XEXP (link
, 0))) != 0
3311 && rtx_equal_p (i2dest
, SET_DEST (set
)))
3312 i2_insn
= XEXP (link
, 0), i2_val
= SET_SRC (set
);
3314 record_value_for_reg (i2dest
, i2_insn
, i2_val
);
3316 /* If the reg formerly set in I2 died only once and that was in I3,
3317 zero its use count so it won't make `reload' do any work. */
3319 && (newi2pat
== 0 || ! reg_mentioned_p (i2dest
, newi2pat
))
3320 && ! i2dest_in_i2src
)
3322 regno
= REGNO (i2dest
);
3323 REG_N_SETS (regno
)--;
3327 if (i1
&& REG_P (i1dest
))
3330 rtx i1_insn
= 0, i1_val
= 0, set
;
3332 for (link
= LOG_LINKS (i3
); link
; link
= XEXP (link
, 1))
3333 if ((set
= single_set (XEXP (link
, 0))) != 0
3334 && rtx_equal_p (i1dest
, SET_DEST (set
)))
3335 i1_insn
= XEXP (link
, 0), i1_val
= SET_SRC (set
);
3337 record_value_for_reg (i1dest
, i1_insn
, i1_val
);
3339 regno
= REGNO (i1dest
);
3340 if (! added_sets_1
&& ! i1dest_in_i1src
)
3341 REG_N_SETS (regno
)--;
3344 /* Update reg_stat[].nonzero_bits et al for any changes that may have
3345 been made to this insn. The order of
3346 set_nonzero_bits_and_sign_copies() is important. Because newi2pat
3347 can affect nonzero_bits of newpat */
3349 note_stores (newi2pat
, set_nonzero_bits_and_sign_copies
, NULL
);
3350 note_stores (newpat
, set_nonzero_bits_and_sign_copies
, NULL
);
3352 /* Set new_direct_jump_p if a new return or simple jump instruction
3355 If I3 is now an unconditional jump, ensure that it has a
3356 BARRIER following it since it may have initially been a
3357 conditional jump. It may also be the last nonnote insn. */
3359 if (returnjump_p (i3
) || any_uncondjump_p (i3
))
3361 *new_direct_jump_p
= 1;
3362 mark_jump_label (PATTERN (i3
), i3
, 0);
3364 if ((temp
= next_nonnote_insn (i3
)) == NULL_RTX
3365 || !BARRIER_P (temp
))
3366 emit_barrier_after (i3
);
3369 if (undobuf
.other_insn
!= NULL_RTX
3370 && (returnjump_p (undobuf
.other_insn
)
3371 || any_uncondjump_p (undobuf
.other_insn
)))
3373 *new_direct_jump_p
= 1;
3375 if ((temp
= next_nonnote_insn (undobuf
.other_insn
)) == NULL_RTX
3376 || !BARRIER_P (temp
))
3377 emit_barrier_after (undobuf
.other_insn
);
3380 /* An NOOP jump does not need barrier, but it does need cleaning up
3382 if (GET_CODE (newpat
) == SET
3383 && SET_SRC (newpat
) == pc_rtx
3384 && SET_DEST (newpat
) == pc_rtx
)
3385 *new_direct_jump_p
= 1;
3388 combine_successes
++;
3391 if (added_links_insn
3392 && (newi2pat
== 0 || INSN_CUID (added_links_insn
) < INSN_CUID (i2
))
3393 && INSN_CUID (added_links_insn
) < INSN_CUID (i3
))
3394 return added_links_insn
;
3396 return newi2pat
? i2
: i3
;
3399 /* Undo all the modifications recorded in undobuf. */
3404 struct undo
*undo
, *next
;
3406 for (undo
= undobuf
.undos
; undo
; undo
= next
)
3412 *undo
->where
.r
= undo
->old_contents
.r
;
3415 *undo
->where
.i
= undo
->old_contents
.i
;
3418 PUT_MODE (*undo
->where
.r
, undo
->old_contents
.m
);
3424 undo
->next
= undobuf
.frees
;
3425 undobuf
.frees
= undo
;
3431 /* We've committed to accepting the changes we made. Move all
3432 of the undos to the free list. */
3437 struct undo
*undo
, *next
;
3439 for (undo
= undobuf
.undos
; undo
; undo
= next
)
3442 undo
->next
= undobuf
.frees
;
3443 undobuf
.frees
= undo
;
3448 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
3449 where we have an arithmetic expression and return that point. LOC will
3452 try_combine will call this function to see if an insn can be split into
3456 find_split_point (rtx
*loc
, rtx insn
)
3459 enum rtx_code code
= GET_CODE (x
);
3461 unsigned HOST_WIDE_INT len
= 0;
3462 HOST_WIDE_INT pos
= 0;
3464 rtx inner
= NULL_RTX
;
3466 /* First special-case some codes. */
3470 #ifdef INSN_SCHEDULING
3471 /* If we are making a paradoxical SUBREG invalid, it becomes a split
3473 if (MEM_P (SUBREG_REG (x
)))
3476 return find_split_point (&SUBREG_REG (x
), insn
);
3480 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3481 using LO_SUM and HIGH. */
3482 if (GET_CODE (XEXP (x
, 0)) == CONST
3483 || GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
3486 gen_rtx_LO_SUM (Pmode
,
3487 gen_rtx_HIGH (Pmode
, XEXP (x
, 0)),
3489 return &XEXP (XEXP (x
, 0), 0);
3493 /* If we have a PLUS whose second operand is a constant and the
3494 address is not valid, perhaps will can split it up using
3495 the machine-specific way to split large constants. We use
3496 the first pseudo-reg (one of the virtual regs) as a placeholder;
3497 it will not remain in the result. */
3498 if (GET_CODE (XEXP (x
, 0)) == PLUS
3499 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3500 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
3502 rtx reg
= regno_reg_rtx
[FIRST_PSEUDO_REGISTER
];
3503 rtx seq
= split_insns (gen_rtx_SET (VOIDmode
, reg
, XEXP (x
, 0)),
3506 /* This should have produced two insns, each of which sets our
3507 placeholder. If the source of the second is a valid address,
3508 we can make put both sources together and make a split point
3512 && NEXT_INSN (seq
) != NULL_RTX
3513 && NEXT_INSN (NEXT_INSN (seq
)) == NULL_RTX
3514 && NONJUMP_INSN_P (seq
)
3515 && GET_CODE (PATTERN (seq
)) == SET
3516 && SET_DEST (PATTERN (seq
)) == reg
3517 && ! reg_mentioned_p (reg
,
3518 SET_SRC (PATTERN (seq
)))
3519 && NONJUMP_INSN_P (NEXT_INSN (seq
))
3520 && GET_CODE (PATTERN (NEXT_INSN (seq
))) == SET
3521 && SET_DEST (PATTERN (NEXT_INSN (seq
))) == reg
3522 && memory_address_p (GET_MODE (x
),
3523 SET_SRC (PATTERN (NEXT_INSN (seq
)))))
3525 rtx src1
= SET_SRC (PATTERN (seq
));
3526 rtx src2
= SET_SRC (PATTERN (NEXT_INSN (seq
)));
3528 /* Replace the placeholder in SRC2 with SRC1. If we can
3529 find where in SRC2 it was placed, that can become our
3530 split point and we can replace this address with SRC2.
3531 Just try two obvious places. */
3533 src2
= replace_rtx (src2
, reg
, src1
);
3535 if (XEXP (src2
, 0) == src1
)
3536 split
= &XEXP (src2
, 0);
3537 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2
, 0)))[0] == 'e'
3538 && XEXP (XEXP (src2
, 0), 0) == src1
)
3539 split
= &XEXP (XEXP (src2
, 0), 0);
3543 SUBST (XEXP (x
, 0), src2
);
3548 /* If that didn't work, perhaps the first operand is complex and
3549 needs to be computed separately, so make a split point there.
3550 This will occur on machines that just support REG + CONST
3551 and have a constant moved through some previous computation. */
3553 else if (!OBJECT_P (XEXP (XEXP (x
, 0), 0))
3554 && ! (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SUBREG
3555 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x
, 0), 0)))))
3556 return &XEXP (XEXP (x
, 0), 0);
3562 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3563 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3564 we need to put the operand into a register. So split at that
3567 if (SET_DEST (x
) == cc0_rtx
3568 && GET_CODE (SET_SRC (x
)) != COMPARE
3569 && GET_CODE (SET_SRC (x
)) != ZERO_EXTRACT
3570 && !OBJECT_P (SET_SRC (x
))
3571 && ! (GET_CODE (SET_SRC (x
)) == SUBREG
3572 && OBJECT_P (SUBREG_REG (SET_SRC (x
)))))
3573 return &SET_SRC (x
);
3576 /* See if we can split SET_SRC as it stands. */
3577 split
= find_split_point (&SET_SRC (x
), insn
);
3578 if (split
&& split
!= &SET_SRC (x
))
3581 /* See if we can split SET_DEST as it stands. */
3582 split
= find_split_point (&SET_DEST (x
), insn
);
3583 if (split
&& split
!= &SET_DEST (x
))
3586 /* See if this is a bitfield assignment with everything constant. If
3587 so, this is an IOR of an AND, so split it into that. */
3588 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
3589 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
3590 <= HOST_BITS_PER_WIDE_INT
)
3591 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
3592 && GET_CODE (XEXP (SET_DEST (x
), 2)) == CONST_INT
3593 && GET_CODE (SET_SRC (x
)) == CONST_INT
3594 && ((INTVAL (XEXP (SET_DEST (x
), 1))
3595 + INTVAL (XEXP (SET_DEST (x
), 2)))
3596 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0))))
3597 && ! side_effects_p (XEXP (SET_DEST (x
), 0)))
3599 HOST_WIDE_INT pos
= INTVAL (XEXP (SET_DEST (x
), 2));
3600 unsigned HOST_WIDE_INT len
= INTVAL (XEXP (SET_DEST (x
), 1));
3601 unsigned HOST_WIDE_INT src
= INTVAL (SET_SRC (x
));
3602 rtx dest
= XEXP (SET_DEST (x
), 0);
3603 enum machine_mode mode
= GET_MODE (dest
);
3604 unsigned HOST_WIDE_INT mask
= ((HOST_WIDE_INT
) 1 << len
) - 1;
3607 if (BITS_BIG_ENDIAN
)
3608 pos
= GET_MODE_BITSIZE (mode
) - len
- pos
;
3610 or_mask
= gen_int_mode (src
<< pos
, mode
);
3613 simplify_gen_binary (IOR
, mode
, dest
, or_mask
));
3616 rtx negmask
= gen_int_mode (~(mask
<< pos
), mode
);
3618 simplify_gen_binary (IOR
, mode
,
3619 simplify_gen_binary (AND
, mode
,
3624 SUBST (SET_DEST (x
), dest
);
3626 split
= find_split_point (&SET_SRC (x
), insn
);
3627 if (split
&& split
!= &SET_SRC (x
))
3631 /* Otherwise, see if this is an operation that we can split into two.
3632 If so, try to split that. */
3633 code
= GET_CODE (SET_SRC (x
));
3638 /* If we are AND'ing with a large constant that is only a single
3639 bit and the result is only being used in a context where we
3640 need to know if it is zero or nonzero, replace it with a bit
3641 extraction. This will avoid the large constant, which might
3642 have taken more than one insn to make. If the constant were
3643 not a valid argument to the AND but took only one insn to make,
3644 this is no worse, but if it took more than one insn, it will
3647 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
3648 && REG_P (XEXP (SET_SRC (x
), 0))
3649 && (pos
= exact_log2 (INTVAL (XEXP (SET_SRC (x
), 1)))) >= 7
3650 && REG_P (SET_DEST (x
))
3651 && (split
= find_single_use (SET_DEST (x
), insn
, (rtx
*) 0)) != 0
3652 && (GET_CODE (*split
) == EQ
|| GET_CODE (*split
) == NE
)
3653 && XEXP (*split
, 0) == SET_DEST (x
)
3654 && XEXP (*split
, 1) == const0_rtx
)
3656 rtx extraction
= make_extraction (GET_MODE (SET_DEST (x
)),
3657 XEXP (SET_SRC (x
), 0),
3658 pos
, NULL_RTX
, 1, 1, 0, 0);
3659 if (extraction
!= 0)
3661 SUBST (SET_SRC (x
), extraction
);
3662 return find_split_point (loc
, insn
);
3668 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3669 is known to be on, this can be converted into a NEG of a shift. */
3670 if (STORE_FLAG_VALUE
== -1 && XEXP (SET_SRC (x
), 1) == const0_rtx
3671 && GET_MODE (SET_SRC (x
)) == GET_MODE (XEXP (SET_SRC (x
), 0))
3672 && 1 <= (pos
= exact_log2
3673 (nonzero_bits (XEXP (SET_SRC (x
), 0),
3674 GET_MODE (XEXP (SET_SRC (x
), 0))))))
3676 enum machine_mode mode
= GET_MODE (XEXP (SET_SRC (x
), 0));
3680 gen_rtx_LSHIFTRT (mode
,
3681 XEXP (SET_SRC (x
), 0),
3684 split
= find_split_point (&SET_SRC (x
), insn
);
3685 if (split
&& split
!= &SET_SRC (x
))
3691 inner
= XEXP (SET_SRC (x
), 0);
3693 /* We can't optimize if either mode is a partial integer
3694 mode as we don't know how many bits are significant
3696 if (GET_MODE_CLASS (GET_MODE (inner
)) == MODE_PARTIAL_INT
3697 || GET_MODE_CLASS (GET_MODE (SET_SRC (x
))) == MODE_PARTIAL_INT
)
3701 len
= GET_MODE_BITSIZE (GET_MODE (inner
));
3707 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
3708 && GET_CODE (XEXP (SET_SRC (x
), 2)) == CONST_INT
)
3710 inner
= XEXP (SET_SRC (x
), 0);
3711 len
= INTVAL (XEXP (SET_SRC (x
), 1));
3712 pos
= INTVAL (XEXP (SET_SRC (x
), 2));
3714 if (BITS_BIG_ENDIAN
)
3715 pos
= GET_MODE_BITSIZE (GET_MODE (inner
)) - len
- pos
;
3716 unsignedp
= (code
== ZERO_EXTRACT
);
3724 if (len
&& pos
>= 0 && pos
+ len
<= GET_MODE_BITSIZE (GET_MODE (inner
)))
3726 enum machine_mode mode
= GET_MODE (SET_SRC (x
));
3728 /* For unsigned, we have a choice of a shift followed by an
3729 AND or two shifts. Use two shifts for field sizes where the
3730 constant might be too large. We assume here that we can
3731 always at least get 8-bit constants in an AND insn, which is
3732 true for every current RISC. */
3734 if (unsignedp
&& len
<= 8)
3739 (mode
, gen_lowpart (mode
, inner
),
3741 GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1)));
3743 split
= find_split_point (&SET_SRC (x
), insn
);
3744 if (split
&& split
!= &SET_SRC (x
))
3751 (unsignedp
? LSHIFTRT
: ASHIFTRT
, mode
,
3752 gen_rtx_ASHIFT (mode
,
3753 gen_lowpart (mode
, inner
),
3754 GEN_INT (GET_MODE_BITSIZE (mode
)
3756 GEN_INT (GET_MODE_BITSIZE (mode
) - len
)));
3758 split
= find_split_point (&SET_SRC (x
), insn
);
3759 if (split
&& split
!= &SET_SRC (x
))
3764 /* See if this is a simple operation with a constant as the second
3765 operand. It might be that this constant is out of range and hence
3766 could be used as a split point. */
3767 if (BINARY_P (SET_SRC (x
))
3768 && CONSTANT_P (XEXP (SET_SRC (x
), 1))
3769 && (OBJECT_P (XEXP (SET_SRC (x
), 0))
3770 || (GET_CODE (XEXP (SET_SRC (x
), 0)) == SUBREG
3771 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x
), 0))))))
3772 return &XEXP (SET_SRC (x
), 1);
3774 /* Finally, see if this is a simple operation with its first operand
3775 not in a register. The operation might require this operand in a
3776 register, so return it as a split point. We can always do this
3777 because if the first operand were another operation, we would have
3778 already found it as a split point. */
3779 if ((BINARY_P (SET_SRC (x
)) || UNARY_P (SET_SRC (x
)))
3780 && ! register_operand (XEXP (SET_SRC (x
), 0), VOIDmode
))
3781 return &XEXP (SET_SRC (x
), 0);
3787 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3788 it is better to write this as (not (ior A B)) so we can split it.
3789 Similarly for IOR. */
3790 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == NOT
)
3793 gen_rtx_NOT (GET_MODE (x
),
3794 gen_rtx_fmt_ee (code
== IOR
? AND
: IOR
,
3796 XEXP (XEXP (x
, 0), 0),
3797 XEXP (XEXP (x
, 1), 0))));
3798 return find_split_point (loc
, insn
);
3801 /* Many RISC machines have a large set of logical insns. If the
3802 second operand is a NOT, put it first so we will try to split the
3803 other operand first. */
3804 if (GET_CODE (XEXP (x
, 1)) == NOT
)
3806 rtx tem
= XEXP (x
, 0);
3807 SUBST (XEXP (x
, 0), XEXP (x
, 1));
3808 SUBST (XEXP (x
, 1), tem
);
3816 /* Otherwise, select our actions depending on our rtx class. */
3817 switch (GET_RTX_CLASS (code
))
3819 case RTX_BITFIELD_OPS
: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3821 split
= find_split_point (&XEXP (x
, 2), insn
);
3824 /* ... fall through ... */
3826 case RTX_COMM_ARITH
:
3828 case RTX_COMM_COMPARE
:
3829 split
= find_split_point (&XEXP (x
, 1), insn
);
3832 /* ... fall through ... */
3834 /* Some machines have (and (shift ...) ...) insns. If X is not
3835 an AND, but XEXP (X, 0) is, use it as our split point. */
3836 if (GET_CODE (x
) != AND
&& GET_CODE (XEXP (x
, 0)) == AND
)
3837 return &XEXP (x
, 0);
3839 split
= find_split_point (&XEXP (x
, 0), insn
);
3845 /* Otherwise, we don't have a split point. */
3850 /* Throughout X, replace FROM with TO, and return the result.
3851 The result is TO if X is FROM;
3852 otherwise the result is X, but its contents may have been modified.
3853 If they were modified, a record was made in undobuf so that
3854 undo_all will (among other things) return X to its original state.
3856 If the number of changes necessary is too much to record to undo,
3857 the excess changes are not made, so the result is invalid.
3858 The changes already made can still be undone.
3859 undobuf.num_undo is incremented for such changes, so by testing that
3860 the caller can tell whether the result is valid.
3862 `n_occurrences' is incremented each time FROM is replaced.
3864 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3866 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
3867 by copying if `n_occurrences' is nonzero. */
3870 subst (rtx x
, rtx from
, rtx to
, int in_dest
, int unique_copy
)
3872 enum rtx_code code
= GET_CODE (x
);
3873 enum machine_mode op0_mode
= VOIDmode
;
3878 /* Two expressions are equal if they are identical copies of a shared
3879 RTX or if they are both registers with the same register number
3882 #define COMBINE_RTX_EQUAL_P(X,Y) \
3884 || (REG_P (X) && REG_P (Y) \
3885 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3887 if (! in_dest
&& COMBINE_RTX_EQUAL_P (x
, from
))
3890 return (unique_copy
&& n_occurrences
> 1 ? copy_rtx (to
) : to
);
3893 /* If X and FROM are the same register but different modes, they will
3894 not have been seen as equal above. However, flow.c will make a
3895 LOG_LINKS entry for that case. If we do nothing, we will try to
3896 rerecognize our original insn and, when it succeeds, we will
3897 delete the feeding insn, which is incorrect.
3899 So force this insn not to match in this (rare) case. */
3900 if (! in_dest
&& code
== REG
&& REG_P (from
)
3901 && REGNO (x
) == REGNO (from
))
3902 return gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
3904 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3905 of which may contain things that can be combined. */
3906 if (code
!= MEM
&& code
!= LO_SUM
&& OBJECT_P (x
))
3909 /* It is possible to have a subexpression appear twice in the insn.
3910 Suppose that FROM is a register that appears within TO.
3911 Then, after that subexpression has been scanned once by `subst',
3912 the second time it is scanned, TO may be found. If we were
3913 to scan TO here, we would find FROM within it and create a
3914 self-referent rtl structure which is completely wrong. */
3915 if (COMBINE_RTX_EQUAL_P (x
, to
))
3918 /* Parallel asm_operands need special attention because all of the
3919 inputs are shared across the arms. Furthermore, unsharing the
3920 rtl results in recognition failures. Failure to handle this case
3921 specially can result in circular rtl.
3923 Solve this by doing a normal pass across the first entry of the
3924 parallel, and only processing the SET_DESTs of the subsequent
3927 if (code
== PARALLEL
3928 && GET_CODE (XVECEXP (x
, 0, 0)) == SET
3929 && GET_CODE (SET_SRC (XVECEXP (x
, 0, 0))) == ASM_OPERANDS
)
3931 new = subst (XVECEXP (x
, 0, 0), from
, to
, 0, unique_copy
);
3933 /* If this substitution failed, this whole thing fails. */
3934 if (GET_CODE (new) == CLOBBER
3935 && XEXP (new, 0) == const0_rtx
)
3938 SUBST (XVECEXP (x
, 0, 0), new);
3940 for (i
= XVECLEN (x
, 0) - 1; i
>= 1; i
--)
3942 rtx dest
= SET_DEST (XVECEXP (x
, 0, i
));
3945 && GET_CODE (dest
) != CC0
3946 && GET_CODE (dest
) != PC
)
3948 new = subst (dest
, from
, to
, 0, unique_copy
);
3950 /* If this substitution failed, this whole thing fails. */
3951 if (GET_CODE (new) == CLOBBER
3952 && XEXP (new, 0) == const0_rtx
)
3955 SUBST (SET_DEST (XVECEXP (x
, 0, i
)), new);
3961 len
= GET_RTX_LENGTH (code
);
3962 fmt
= GET_RTX_FORMAT (code
);
3964 /* We don't need to process a SET_DEST that is a register, CC0,
3965 or PC, so set up to skip this common case. All other cases
3966 where we want to suppress replacing something inside a
3967 SET_SRC are handled via the IN_DEST operand. */
3969 && (REG_P (SET_DEST (x
))
3970 || GET_CODE (SET_DEST (x
)) == CC0
3971 || GET_CODE (SET_DEST (x
)) == PC
))
3974 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3977 op0_mode
= GET_MODE (XEXP (x
, 0));
3979 for (i
= 0; i
< len
; i
++)
3984 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3986 if (COMBINE_RTX_EQUAL_P (XVECEXP (x
, i
, j
), from
))
3988 new = (unique_copy
&& n_occurrences
3989 ? copy_rtx (to
) : to
);
3994 new = subst (XVECEXP (x
, i
, j
), from
, to
, 0,
3997 /* If this substitution failed, this whole thing
3999 if (GET_CODE (new) == CLOBBER
4000 && XEXP (new, 0) == const0_rtx
)
4004 SUBST (XVECEXP (x
, i
, j
), new);
4007 else if (fmt
[i
] == 'e')
4009 /* If this is a register being set, ignore it. */
4013 && (((code
== SUBREG
|| code
== ZERO_EXTRACT
)
4015 || code
== STRICT_LOW_PART
))
4018 else if (COMBINE_RTX_EQUAL_P (XEXP (x
, i
), from
))
4020 /* In general, don't install a subreg involving two
4021 modes not tieable. It can worsen register
4022 allocation, and can even make invalid reload
4023 insns, since the reg inside may need to be copied
4024 from in the outside mode, and that may be invalid
4025 if it is an fp reg copied in integer mode.
4027 We allow two exceptions to this: It is valid if
4028 it is inside another SUBREG and the mode of that
4029 SUBREG and the mode of the inside of TO is
4030 tieable and it is valid if X is a SET that copies
4033 if (GET_CODE (to
) == SUBREG
4034 && ! MODES_TIEABLE_P (GET_MODE (to
),
4035 GET_MODE (SUBREG_REG (to
)))
4036 && ! (code
== SUBREG
4037 && MODES_TIEABLE_P (GET_MODE (x
),
4038 GET_MODE (SUBREG_REG (to
))))
4040 && ! (code
== SET
&& i
== 1 && XEXP (x
, 0) == cc0_rtx
)
4043 return gen_rtx_CLOBBER (VOIDmode
, const0_rtx
);
4045 #ifdef CANNOT_CHANGE_MODE_CLASS
4048 && REGNO (to
) < FIRST_PSEUDO_REGISTER
4049 && REG_CANNOT_CHANGE_MODE_P (REGNO (to
),
4052 return gen_rtx_CLOBBER (VOIDmode
, const0_rtx
);
4055 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
4059 /* If we are in a SET_DEST, suppress most cases unless we
4060 have gone inside a MEM, in which case we want to
4061 simplify the address. We assume here that things that
4062 are actually part of the destination have their inner
4063 parts in the first expression. This is true for SUBREG,
4064 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4065 things aside from REG and MEM that should appear in a
4067 new = subst (XEXP (x
, i
), from
, to
,
4069 && (code
== SUBREG
|| code
== STRICT_LOW_PART
4070 || code
== ZERO_EXTRACT
))
4072 && i
== 0), unique_copy
);
4074 /* If we found that we will have to reject this combination,
4075 indicate that by returning the CLOBBER ourselves, rather than
4076 an expression containing it. This will speed things up as
4077 well as prevent accidents where two CLOBBERs are considered
4078 to be equal, thus producing an incorrect simplification. */
4080 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
4083 if (GET_CODE (x
) == SUBREG
4084 && (GET_CODE (new) == CONST_INT
4085 || GET_CODE (new) == CONST_DOUBLE
))
4087 enum machine_mode mode
= GET_MODE (x
);
4089 x
= simplify_subreg (GET_MODE (x
), new,
4090 GET_MODE (SUBREG_REG (x
)),
4093 x
= gen_rtx_CLOBBER (mode
, const0_rtx
);
4095 else if (GET_CODE (new) == CONST_INT
4096 && GET_CODE (x
) == ZERO_EXTEND
)
4098 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
4099 new, GET_MODE (XEXP (x
, 0)));
4103 SUBST (XEXP (x
, i
), new);
4108 /* Try to simplify X. If the simplification changed the code, it is likely
4109 that further simplification will help, so loop, but limit the number
4110 of repetitions that will be performed. */
4112 for (i
= 0; i
< 4; i
++)
4114 /* If X is sufficiently simple, don't bother trying to do anything
4116 if (code
!= CONST_INT
&& code
!= REG
&& code
!= CLOBBER
)
4117 x
= combine_simplify_rtx (x
, op0_mode
, in_dest
);
4119 if (GET_CODE (x
) == code
)
4122 code
= GET_CODE (x
);
4124 /* We no longer know the original mode of operand 0 since we
4125 have changed the form of X) */
4126 op0_mode
= VOIDmode
;
4132 /* Simplify X, a piece of RTL. We just operate on the expression at the
4133 outer level; call `subst' to simplify recursively. Return the new
4136 OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero
4137 if we are inside a SET_DEST. */
4140 combine_simplify_rtx (rtx x
, enum machine_mode op0_mode
, int in_dest
)
4142 enum rtx_code code
= GET_CODE (x
);
4143 enum machine_mode mode
= GET_MODE (x
);
4147 /* If this is a commutative operation, put a constant last and a complex
4148 expression first. We don't need to do this for comparisons here. */
4149 if (COMMUTATIVE_ARITH_P (x
)
4150 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
4153 SUBST (XEXP (x
, 0), XEXP (x
, 1));
4154 SUBST (XEXP (x
, 1), temp
);
4157 /* If this is a simple operation applied to an IF_THEN_ELSE, try
4158 applying it to the arms of the IF_THEN_ELSE. This often simplifies
4159 things. Check for cases where both arms are testing the same
4162 Don't do anything if all operands are very simple. */
4165 && ((!OBJECT_P (XEXP (x
, 0))
4166 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
4167 && OBJECT_P (SUBREG_REG (XEXP (x
, 0)))))
4168 || (!OBJECT_P (XEXP (x
, 1))
4169 && ! (GET_CODE (XEXP (x
, 1)) == SUBREG
4170 && OBJECT_P (SUBREG_REG (XEXP (x
, 1)))))))
4172 && (!OBJECT_P (XEXP (x
, 0))
4173 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
4174 && OBJECT_P (SUBREG_REG (XEXP (x
, 0)))))))
4176 rtx cond
, true_rtx
, false_rtx
;
4178 cond
= if_then_else_cond (x
, &true_rtx
, &false_rtx
);
4180 /* If everything is a comparison, what we have is highly unlikely
4181 to be simpler, so don't use it. */
4182 && ! (COMPARISON_P (x
)
4183 && (COMPARISON_P (true_rtx
) || COMPARISON_P (false_rtx
))))
4185 rtx cop1
= const0_rtx
;
4186 enum rtx_code cond_code
= simplify_comparison (NE
, &cond
, &cop1
);
4188 if (cond_code
== NE
&& COMPARISON_P (cond
))
4191 /* Simplify the alternative arms; this may collapse the true and
4192 false arms to store-flag values. Be careful to use copy_rtx
4193 here since true_rtx or false_rtx might share RTL with x as a
4194 result of the if_then_else_cond call above. */
4195 true_rtx
= subst (copy_rtx (true_rtx
), pc_rtx
, pc_rtx
, 0, 0);
4196 false_rtx
= subst (copy_rtx (false_rtx
), pc_rtx
, pc_rtx
, 0, 0);
4198 /* If true_rtx and false_rtx are not general_operands, an if_then_else
4199 is unlikely to be simpler. */
4200 if (general_operand (true_rtx
, VOIDmode
)
4201 && general_operand (false_rtx
, VOIDmode
))
4203 enum rtx_code reversed
;
4205 /* Restarting if we generate a store-flag expression will cause
4206 us to loop. Just drop through in this case. */
4208 /* If the result values are STORE_FLAG_VALUE and zero, we can
4209 just make the comparison operation. */
4210 if (true_rtx
== const_true_rtx
&& false_rtx
== const0_rtx
)
4211 x
= simplify_gen_relational (cond_code
, mode
, VOIDmode
,
4213 else if (true_rtx
== const0_rtx
&& false_rtx
== const_true_rtx
4214 && ((reversed
= reversed_comparison_code_parts
4215 (cond_code
, cond
, cop1
, NULL
))
4217 x
= simplify_gen_relational (reversed
, mode
, VOIDmode
,
4220 /* Likewise, we can make the negate of a comparison operation
4221 if the result values are - STORE_FLAG_VALUE and zero. */
4222 else if (GET_CODE (true_rtx
) == CONST_INT
4223 && INTVAL (true_rtx
) == - STORE_FLAG_VALUE
4224 && false_rtx
== const0_rtx
)
4225 x
= simplify_gen_unary (NEG
, mode
,
4226 simplify_gen_relational (cond_code
,
4230 else if (GET_CODE (false_rtx
) == CONST_INT
4231 && INTVAL (false_rtx
) == - STORE_FLAG_VALUE
4232 && true_rtx
== const0_rtx
4233 && ((reversed
= reversed_comparison_code_parts
4234 (cond_code
, cond
, cop1
, NULL
))
4236 x
= simplify_gen_unary (NEG
, mode
,
4237 simplify_gen_relational (reversed
,
4242 return gen_rtx_IF_THEN_ELSE (mode
,
4243 simplify_gen_relational (cond_code
,
4248 true_rtx
, false_rtx
);
4250 code
= GET_CODE (x
);
4251 op0_mode
= VOIDmode
;
4256 /* Try to fold this expression in case we have constants that weren't
4259 switch (GET_RTX_CLASS (code
))
4262 if (op0_mode
== VOIDmode
)
4263 op0_mode
= GET_MODE (XEXP (x
, 0));
4264 temp
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
4267 case RTX_COMM_COMPARE
:
4269 enum machine_mode cmp_mode
= GET_MODE (XEXP (x
, 0));
4270 if (cmp_mode
== VOIDmode
)
4272 cmp_mode
= GET_MODE (XEXP (x
, 1));
4273 if (cmp_mode
== VOIDmode
)
4274 cmp_mode
= op0_mode
;
4276 temp
= simplify_relational_operation (code
, mode
, cmp_mode
,
4277 XEXP (x
, 0), XEXP (x
, 1));
4280 case RTX_COMM_ARITH
:
4282 temp
= simplify_binary_operation (code
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4284 case RTX_BITFIELD_OPS
:
4286 temp
= simplify_ternary_operation (code
, mode
, op0_mode
, XEXP (x
, 0),
4287 XEXP (x
, 1), XEXP (x
, 2));
4296 code
= GET_CODE (temp
);
4297 op0_mode
= VOIDmode
;
4298 mode
= GET_MODE (temp
);
4301 /* First see if we can apply the inverse distributive law. */
4302 if (code
== PLUS
|| code
== MINUS
4303 || code
== AND
|| code
== IOR
|| code
== XOR
)
4305 x
= apply_distributive_law (x
);
4306 code
= GET_CODE (x
);
4307 op0_mode
= VOIDmode
;
4310 /* If CODE is an associative operation not otherwise handled, see if we
4311 can associate some operands. This can win if they are constants or
4312 if they are logically related (i.e. (a & b) & a). */
4313 if ((code
== PLUS
|| code
== MINUS
|| code
== MULT
|| code
== DIV
4314 || code
== AND
|| code
== IOR
|| code
== XOR
4315 || code
== SMAX
|| code
== SMIN
|| code
== UMAX
|| code
== UMIN
)
4316 && ((INTEGRAL_MODE_P (mode
) && code
!= DIV
)
4317 || (flag_unsafe_math_optimizations
&& FLOAT_MODE_P (mode
))))
4319 if (GET_CODE (XEXP (x
, 0)) == code
)
4321 rtx other
= XEXP (XEXP (x
, 0), 0);
4322 rtx inner_op0
= XEXP (XEXP (x
, 0), 1);
4323 rtx inner_op1
= XEXP (x
, 1);
4326 /* Make sure we pass the constant operand if any as the second
4327 one if this is a commutative operation. */
4328 if (CONSTANT_P (inner_op0
) && COMMUTATIVE_ARITH_P (x
))
4330 rtx tem
= inner_op0
;
4331 inner_op0
= inner_op1
;
4334 inner
= simplify_binary_operation (code
== MINUS
? PLUS
4335 : code
== DIV
? MULT
4337 mode
, inner_op0
, inner_op1
);
4339 /* For commutative operations, try the other pair if that one
4341 if (inner
== 0 && COMMUTATIVE_ARITH_P (x
))
4343 other
= XEXP (XEXP (x
, 0), 1);
4344 inner
= simplify_binary_operation (code
, mode
,
4345 XEXP (XEXP (x
, 0), 0),
4350 return simplify_gen_binary (code
, mode
, other
, inner
);
4354 /* A little bit of algebraic simplification here. */
4358 /* Ensure that our address has any ASHIFTs converted to MULT in case
4359 address-recognizing predicates are called later. */
4360 temp
= make_compound_operation (XEXP (x
, 0), MEM
);
4361 SUBST (XEXP (x
, 0), temp
);
4365 if (op0_mode
== VOIDmode
)
4366 op0_mode
= GET_MODE (SUBREG_REG (x
));
4368 /* See if this can be moved to simplify_subreg. */
4369 if (CONSTANT_P (SUBREG_REG (x
))
4370 && subreg_lowpart_offset (mode
, op0_mode
) == SUBREG_BYTE (x
)
4371 /* Don't call gen_lowpart if the inner mode
4372 is VOIDmode and we cannot simplify it, as SUBREG without
4373 inner mode is invalid. */
4374 && (GET_MODE (SUBREG_REG (x
)) != VOIDmode
4375 || gen_lowpart_common (mode
, SUBREG_REG (x
))))
4376 return gen_lowpart (mode
, SUBREG_REG (x
));
4378 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x
))) == MODE_CC
)
4382 temp
= simplify_subreg (mode
, SUBREG_REG (x
), op0_mode
,
4388 /* Don't change the mode of the MEM if that would change the meaning
4390 if (MEM_P (SUBREG_REG (x
))
4391 && (MEM_VOLATILE_P (SUBREG_REG (x
))
4392 || mode_dependent_address_p (XEXP (SUBREG_REG (x
), 0))))
4393 return gen_rtx_CLOBBER (mode
, const0_rtx
);
4395 /* Note that we cannot do any narrowing for non-constants since
4396 we might have been counting on using the fact that some bits were
4397 zero. We now do this in the SET. */
4402 temp
= expand_compound_operation (XEXP (x
, 0));
4404 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4405 replaced by (lshiftrt X C). This will convert
4406 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4408 if (GET_CODE (temp
) == ASHIFTRT
4409 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
4410 && INTVAL (XEXP (temp
, 1)) == GET_MODE_BITSIZE (mode
) - 1)
4411 return simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
, XEXP (temp
, 0),
4412 INTVAL (XEXP (temp
, 1)));
4414 /* If X has only a single bit that might be nonzero, say, bit I, convert
4415 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4416 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4417 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4418 or a SUBREG of one since we'd be making the expression more
4419 complex if it was just a register. */
4422 && ! (GET_CODE (temp
) == SUBREG
4423 && REG_P (SUBREG_REG (temp
)))
4424 && (i
= exact_log2 (nonzero_bits (temp
, mode
))) >= 0)
4426 rtx temp1
= simplify_shift_const
4427 (NULL_RTX
, ASHIFTRT
, mode
,
4428 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, temp
,
4429 GET_MODE_BITSIZE (mode
) - 1 - i
),
4430 GET_MODE_BITSIZE (mode
) - 1 - i
);
4432 /* If all we did was surround TEMP with the two shifts, we
4433 haven't improved anything, so don't use it. Otherwise,
4434 we are better off with TEMP1. */
4435 if (GET_CODE (temp1
) != ASHIFTRT
4436 || GET_CODE (XEXP (temp1
, 0)) != ASHIFT
4437 || XEXP (XEXP (temp1
, 0), 0) != temp
)
4443 /* We can't handle truncation to a partial integer mode here
4444 because we don't know the real bitsize of the partial
4446 if (GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
4449 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4450 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
4451 GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))))
4453 force_to_mode (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
4454 GET_MODE_MASK (mode
), 0));
4456 /* Similarly to what we do in simplify-rtx.c, a truncate of a register
4457 whose value is a comparison can be replaced with a subreg if
4458 STORE_FLAG_VALUE permits. */
4459 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4460 && ((HOST_WIDE_INT
) STORE_FLAG_VALUE
& ~GET_MODE_MASK (mode
)) == 0
4461 && (temp
= get_last_value (XEXP (x
, 0)))
4462 && COMPARISON_P (temp
))
4463 return gen_lowpart (mode
, XEXP (x
, 0));
4468 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4469 using cc0, in which case we want to leave it as a COMPARE
4470 so we can distinguish it from a register-register-copy. */
4471 if (XEXP (x
, 1) == const0_rtx
)
4474 /* x - 0 is the same as x unless x's mode has signed zeros and
4475 allows rounding towards -infinity. Under those conditions,
4477 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x
, 0)))
4478 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x
, 0))))
4479 && XEXP (x
, 1) == CONST0_RTX (GET_MODE (XEXP (x
, 0))))
4485 /* (const (const X)) can become (const X). Do it this way rather than
4486 returning the inner CONST since CONST can be shared with a
4488 if (GET_CODE (XEXP (x
, 0)) == CONST
)
4489 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
4494 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4495 can add in an offset. find_split_point will split this address up
4496 again if it doesn't match. */
4497 if (GET_CODE (XEXP (x
, 0)) == HIGH
4498 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)))
4504 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4505 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4506 bit-field and can be replaced by either a sign_extend or a
4507 sign_extract. The `and' may be a zero_extend and the two
4508 <c>, -<c> constants may be reversed. */
4509 if (GET_CODE (XEXP (x
, 0)) == XOR
4510 && GET_CODE (XEXP (x
, 1)) == CONST_INT
4511 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4512 && INTVAL (XEXP (x
, 1)) == -INTVAL (XEXP (XEXP (x
, 0), 1))
4513 && ((i
= exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) >= 0
4514 || (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
4515 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4516 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == AND
4517 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
4518 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
4519 == ((HOST_WIDE_INT
) 1 << (i
+ 1)) - 1))
4520 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ZERO_EXTEND
4521 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)))
4522 == (unsigned int) i
+ 1))))
4523 return simplify_shift_const
4524 (NULL_RTX
, ASHIFTRT
, mode
,
4525 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
4526 XEXP (XEXP (XEXP (x
, 0), 0), 0),
4527 GET_MODE_BITSIZE (mode
) - (i
+ 1)),
4528 GET_MODE_BITSIZE (mode
) - (i
+ 1));
4530 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4531 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4532 the bitsize of the mode - 1. This allows simplification of
4533 "a = (b & 8) == 0;" */
4534 if (XEXP (x
, 1) == constm1_rtx
4535 && !REG_P (XEXP (x
, 0))
4536 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
4537 && REG_P (SUBREG_REG (XEXP (x
, 0))))
4538 && nonzero_bits (XEXP (x
, 0), mode
) == 1)
4539 return simplify_shift_const (NULL_RTX
, ASHIFTRT
, mode
,
4540 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
4541 gen_rtx_XOR (mode
, XEXP (x
, 0), const1_rtx
),
4542 GET_MODE_BITSIZE (mode
) - 1),
4543 GET_MODE_BITSIZE (mode
) - 1);
4545 /* If we are adding two things that have no bits in common, convert
4546 the addition into an IOR. This will often be further simplified,
4547 for example in cases like ((a & 1) + (a & 2)), which can
4550 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4551 && (nonzero_bits (XEXP (x
, 0), mode
)
4552 & nonzero_bits (XEXP (x
, 1), mode
)) == 0)
4554 /* Try to simplify the expression further. */
4555 rtx tor
= simplify_gen_binary (IOR
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4556 temp
= combine_simplify_rtx (tor
, mode
, in_dest
);
4558 /* If we could, great. If not, do not go ahead with the IOR
4559 replacement, since PLUS appears in many special purpose
4560 address arithmetic instructions. */
4561 if (GET_CODE (temp
) != CLOBBER
&& temp
!= tor
)
4567 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4568 (and <foo> (const_int pow2-1)) */
4569 if (GET_CODE (XEXP (x
, 1)) == AND
4570 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
4571 && exact_log2 (-INTVAL (XEXP (XEXP (x
, 1), 1))) >= 0
4572 && rtx_equal_p (XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)))
4573 return simplify_and_const_int (NULL_RTX
, mode
, XEXP (x
, 0),
4574 -INTVAL (XEXP (XEXP (x
, 1), 1)) - 1);
4578 /* If we have (mult (plus A B) C), apply the distributive law and then
4579 the inverse distributive law to see if things simplify. This
4580 occurs mostly in addresses, often when unrolling loops. */
4582 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
4584 rtx result
= distribute_and_simplify_rtx (x
, 0);
4589 /* Try simplify a*(b/c) as (a*b)/c. */
4590 if (FLOAT_MODE_P (mode
) && flag_unsafe_math_optimizations
4591 && GET_CODE (XEXP (x
, 0)) == DIV
)
4593 rtx tem
= simplify_binary_operation (MULT
, mode
,
4594 XEXP (XEXP (x
, 0), 0),
4597 return simplify_gen_binary (DIV
, mode
, tem
, XEXP (XEXP (x
, 0), 1));
4602 /* If this is a divide by a power of two, treat it as a shift if
4603 its first operand is a shift. */
4604 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4605 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
4606 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
4607 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4608 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
4609 || GET_CODE (XEXP (x
, 0)) == ROTATE
4610 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
4611 return simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
, XEXP (x
, 0), i
);
4615 case GT
: case GTU
: case GE
: case GEU
:
4616 case LT
: case LTU
: case LE
: case LEU
:
4617 case UNEQ
: case LTGT
:
4618 case UNGT
: case UNGE
:
4619 case UNLT
: case UNLE
:
4620 case UNORDERED
: case ORDERED
:
4621 /* If the first operand is a condition code, we can't do anything
4623 if (GET_CODE (XEXP (x
, 0)) == COMPARE
4624 || (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) != MODE_CC
4625 && ! CC0_P (XEXP (x
, 0))))
4627 rtx op0
= XEXP (x
, 0);
4628 rtx op1
= XEXP (x
, 1);
4629 enum rtx_code new_code
;
4631 if (GET_CODE (op0
) == COMPARE
)
4632 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
4634 /* Simplify our comparison, if possible. */
4635 new_code
= simplify_comparison (code
, &op0
, &op1
);
4637 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4638 if only the low-order bit is possibly nonzero in X (such as when
4639 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4640 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4641 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4644 Remove any ZERO_EXTRACT we made when thinking this was a
4645 comparison. It may now be simpler to use, e.g., an AND. If a
4646 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4647 the call to make_compound_operation in the SET case. */
4649 if (STORE_FLAG_VALUE
== 1
4650 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
4651 && op1
== const0_rtx
4652 && mode
== GET_MODE (op0
)
4653 && nonzero_bits (op0
, mode
) == 1)
4654 return gen_lowpart (mode
,
4655 expand_compound_operation (op0
));
4657 else if (STORE_FLAG_VALUE
== 1
4658 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
4659 && op1
== const0_rtx
4660 && mode
== GET_MODE (op0
)
4661 && (num_sign_bit_copies (op0
, mode
)
4662 == GET_MODE_BITSIZE (mode
)))
4664 op0
= expand_compound_operation (op0
);
4665 return simplify_gen_unary (NEG
, mode
,
4666 gen_lowpart (mode
, op0
),
4670 else if (STORE_FLAG_VALUE
== 1
4671 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
4672 && op1
== const0_rtx
4673 && mode
== GET_MODE (op0
)
4674 && nonzero_bits (op0
, mode
) == 1)
4676 op0
= expand_compound_operation (op0
);
4677 return simplify_gen_binary (XOR
, mode
,
4678 gen_lowpart (mode
, op0
),
4682 else if (STORE_FLAG_VALUE
== 1
4683 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
4684 && op1
== const0_rtx
4685 && mode
== GET_MODE (op0
)
4686 && (num_sign_bit_copies (op0
, mode
)
4687 == GET_MODE_BITSIZE (mode
)))
4689 op0
= expand_compound_operation (op0
);
4690 return plus_constant (gen_lowpart (mode
, op0
), 1);
4693 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4695 if (STORE_FLAG_VALUE
== -1
4696 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
4697 && op1
== const0_rtx
4698 && (num_sign_bit_copies (op0
, mode
)
4699 == GET_MODE_BITSIZE (mode
)))
4700 return gen_lowpart (mode
,
4701 expand_compound_operation (op0
));
4703 else if (STORE_FLAG_VALUE
== -1
4704 && new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
4705 && op1
== const0_rtx
4706 && mode
== GET_MODE (op0
)
4707 && nonzero_bits (op0
, mode
) == 1)
4709 op0
= expand_compound_operation (op0
);
4710 return simplify_gen_unary (NEG
, mode
,
4711 gen_lowpart (mode
, op0
),
4715 else if (STORE_FLAG_VALUE
== -1
4716 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
4717 && op1
== const0_rtx
4718 && mode
== GET_MODE (op0
)
4719 && (num_sign_bit_copies (op0
, mode
)
4720 == GET_MODE_BITSIZE (mode
)))
4722 op0
= expand_compound_operation (op0
);
4723 return simplify_gen_unary (NOT
, mode
,
4724 gen_lowpart (mode
, op0
),
4728 /* If X is 0/1, (eq X 0) is X-1. */
4729 else if (STORE_FLAG_VALUE
== -1
4730 && new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
4731 && op1
== const0_rtx
4732 && mode
== GET_MODE (op0
)
4733 && nonzero_bits (op0
, mode
) == 1)
4735 op0
= expand_compound_operation (op0
);
4736 return plus_constant (gen_lowpart (mode
, op0
), -1);
4739 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4740 one bit that might be nonzero, we can convert (ne x 0) to
4741 (ashift x c) where C puts the bit in the sign bit. Remove any
4742 AND with STORE_FLAG_VALUE when we are done, since we are only
4743 going to test the sign bit. */
4744 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
4745 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4746 && ((STORE_FLAG_VALUE
& GET_MODE_MASK (mode
))
4747 == (unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (mode
) - 1))
4748 && op1
== const0_rtx
4749 && mode
== GET_MODE (op0
)
4750 && (i
= exact_log2 (nonzero_bits (op0
, mode
))) >= 0)
4752 x
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
4753 expand_compound_operation (op0
),
4754 GET_MODE_BITSIZE (mode
) - 1 - i
);
4755 if (GET_CODE (x
) == AND
&& XEXP (x
, 1) == const_true_rtx
)
4761 /* If the code changed, return a whole new comparison. */
4762 if (new_code
!= code
)
4763 return gen_rtx_fmt_ee (new_code
, mode
, op0
, op1
);
4765 /* Otherwise, keep this operation, but maybe change its operands.
4766 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4767 SUBST (XEXP (x
, 0), op0
);
4768 SUBST (XEXP (x
, 1), op1
);
4773 return simplify_if_then_else (x
);
4779 /* If we are processing SET_DEST, we are done. */
4783 return expand_compound_operation (x
);
4786 return simplify_set (x
);
4790 return simplify_logical (x
);
4797 /* If this is a shift by a constant amount, simplify it. */
4798 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4799 return simplify_shift_const (x
, code
, mode
, XEXP (x
, 0),
4800 INTVAL (XEXP (x
, 1)));
4802 else if (SHIFT_COUNT_TRUNCATED
&& !REG_P (XEXP (x
, 1)))
4804 force_to_mode (XEXP (x
, 1), GET_MODE (XEXP (x
, 1)),
4806 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x
))))
4818 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4821 simplify_if_then_else (rtx x
)
4823 enum machine_mode mode
= GET_MODE (x
);
4824 rtx cond
= XEXP (x
, 0);
4825 rtx true_rtx
= XEXP (x
, 1);
4826 rtx false_rtx
= XEXP (x
, 2);
4827 enum rtx_code true_code
= GET_CODE (cond
);
4828 int comparison_p
= COMPARISON_P (cond
);
4831 enum rtx_code false_code
;
4834 /* Simplify storing of the truth value. */
4835 if (comparison_p
&& true_rtx
== const_true_rtx
&& false_rtx
== const0_rtx
)
4836 return simplify_gen_relational (true_code
, mode
, VOIDmode
,
4837 XEXP (cond
, 0), XEXP (cond
, 1));
4839 /* Also when the truth value has to be reversed. */
4841 && true_rtx
== const0_rtx
&& false_rtx
== const_true_rtx
4842 && (reversed
= reversed_comparison (cond
, mode
)))
4845 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4846 in it is being compared against certain values. Get the true and false
4847 comparisons and see if that says anything about the value of each arm. */
4850 && ((false_code
= reversed_comparison_code (cond
, NULL
))
4852 && REG_P (XEXP (cond
, 0)))
4855 rtx from
= XEXP (cond
, 0);
4856 rtx true_val
= XEXP (cond
, 1);
4857 rtx false_val
= true_val
;
4860 /* If FALSE_CODE is EQ, swap the codes and arms. */
4862 if (false_code
== EQ
)
4864 swapped
= 1, true_code
= EQ
, false_code
= NE
;
4865 temp
= true_rtx
, true_rtx
= false_rtx
, false_rtx
= temp
;
4868 /* If we are comparing against zero and the expression being tested has
4869 only a single bit that might be nonzero, that is its value when it is
4870 not equal to zero. Similarly if it is known to be -1 or 0. */
4872 if (true_code
== EQ
&& true_val
== const0_rtx
4873 && exact_log2 (nzb
= nonzero_bits (from
, GET_MODE (from
))) >= 0)
4874 false_code
= EQ
, false_val
= GEN_INT (nzb
);
4875 else if (true_code
== EQ
&& true_val
== const0_rtx
4876 && (num_sign_bit_copies (from
, GET_MODE (from
))
4877 == GET_MODE_BITSIZE (GET_MODE (from
))))
4878 false_code
= EQ
, false_val
= constm1_rtx
;
4880 /* Now simplify an arm if we know the value of the register in the
4881 branch and it is used in the arm. Be careful due to the potential
4882 of locally-shared RTL. */
4884 if (reg_mentioned_p (from
, true_rtx
))
4885 true_rtx
= subst (known_cond (copy_rtx (true_rtx
), true_code
,
4887 pc_rtx
, pc_rtx
, 0, 0);
4888 if (reg_mentioned_p (from
, false_rtx
))
4889 false_rtx
= subst (known_cond (copy_rtx (false_rtx
), false_code
,
4891 pc_rtx
, pc_rtx
, 0, 0);
4893 SUBST (XEXP (x
, 1), swapped
? false_rtx
: true_rtx
);
4894 SUBST (XEXP (x
, 2), swapped
? true_rtx
: false_rtx
);
4896 true_rtx
= XEXP (x
, 1);
4897 false_rtx
= XEXP (x
, 2);
4898 true_code
= GET_CODE (cond
);
4901 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4902 reversed, do so to avoid needing two sets of patterns for
4903 subtract-and-branch insns. Similarly if we have a constant in the true
4904 arm, the false arm is the same as the first operand of the comparison, or
4905 the false arm is more complicated than the true arm. */
4908 && reversed_comparison_code (cond
, NULL
) != UNKNOWN
4909 && (true_rtx
== pc_rtx
4910 || (CONSTANT_P (true_rtx
)
4911 && GET_CODE (false_rtx
) != CONST_INT
&& false_rtx
!= pc_rtx
)
4912 || true_rtx
== const0_rtx
4913 || (OBJECT_P (true_rtx
) && !OBJECT_P (false_rtx
))
4914 || (GET_CODE (true_rtx
) == SUBREG
&& OBJECT_P (SUBREG_REG (true_rtx
))
4915 && !OBJECT_P (false_rtx
))
4916 || reg_mentioned_p (true_rtx
, false_rtx
)
4917 || rtx_equal_p (false_rtx
, XEXP (cond
, 0))))
4919 true_code
= reversed_comparison_code (cond
, NULL
);
4920 SUBST (XEXP (x
, 0), reversed_comparison (cond
, GET_MODE (cond
)));
4921 SUBST (XEXP (x
, 1), false_rtx
);
4922 SUBST (XEXP (x
, 2), true_rtx
);
4924 temp
= true_rtx
, true_rtx
= false_rtx
, false_rtx
= temp
;
4927 /* It is possible that the conditional has been simplified out. */
4928 true_code
= GET_CODE (cond
);
4929 comparison_p
= COMPARISON_P (cond
);
4932 /* If the two arms are identical, we don't need the comparison. */
4934 if (rtx_equal_p (true_rtx
, false_rtx
) && ! side_effects_p (cond
))
4937 /* Convert a == b ? b : a to "a". */
4938 if (true_code
== EQ
&& ! side_effects_p (cond
)
4939 && !HONOR_NANS (mode
)
4940 && rtx_equal_p (XEXP (cond
, 0), false_rtx
)
4941 && rtx_equal_p (XEXP (cond
, 1), true_rtx
))
4943 else if (true_code
== NE
&& ! side_effects_p (cond
)
4944 && !HONOR_NANS (mode
)
4945 && rtx_equal_p (XEXP (cond
, 0), true_rtx
)
4946 && rtx_equal_p (XEXP (cond
, 1), false_rtx
))
4949 /* Look for cases where we have (abs x) or (neg (abs X)). */
4951 if (GET_MODE_CLASS (mode
) == MODE_INT
4952 && GET_CODE (false_rtx
) == NEG
4953 && rtx_equal_p (true_rtx
, XEXP (false_rtx
, 0))
4955 && rtx_equal_p (true_rtx
, XEXP (cond
, 0))
4956 && ! side_effects_p (true_rtx
))
4961 return simplify_gen_unary (ABS
, mode
, true_rtx
, mode
);
4965 simplify_gen_unary (NEG
, mode
,
4966 simplify_gen_unary (ABS
, mode
, true_rtx
, mode
),
4972 /* Look for MIN or MAX. */
4974 if ((! FLOAT_MODE_P (mode
) || flag_unsafe_math_optimizations
)
4976 && rtx_equal_p (XEXP (cond
, 0), true_rtx
)
4977 && rtx_equal_p (XEXP (cond
, 1), false_rtx
)
4978 && ! side_effects_p (cond
))
4983 return simplify_gen_binary (SMAX
, mode
, true_rtx
, false_rtx
);
4986 return simplify_gen_binary (SMIN
, mode
, true_rtx
, false_rtx
);
4989 return simplify_gen_binary (UMAX
, mode
, true_rtx
, false_rtx
);
4992 return simplify_gen_binary (UMIN
, mode
, true_rtx
, false_rtx
);
4997 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4998 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4999 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5000 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5001 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5002 neither 1 or -1, but it isn't worth checking for. */
5004 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
5006 && GET_MODE_CLASS (mode
) == MODE_INT
5007 && ! side_effects_p (x
))
5009 rtx t
= make_compound_operation (true_rtx
, SET
);
5010 rtx f
= make_compound_operation (false_rtx
, SET
);
5011 rtx cond_op0
= XEXP (cond
, 0);
5012 rtx cond_op1
= XEXP (cond
, 1);
5013 enum rtx_code op
= UNKNOWN
, extend_op
= UNKNOWN
;
5014 enum machine_mode m
= mode
;
5015 rtx z
= 0, c1
= NULL_RTX
;
5017 if ((GET_CODE (t
) == PLUS
|| GET_CODE (t
) == MINUS
5018 || GET_CODE (t
) == IOR
|| GET_CODE (t
) == XOR
5019 || GET_CODE (t
) == ASHIFT
5020 || GET_CODE (t
) == LSHIFTRT
|| GET_CODE (t
) == ASHIFTRT
)
5021 && rtx_equal_p (XEXP (t
, 0), f
))
5022 c1
= XEXP (t
, 1), op
= GET_CODE (t
), z
= f
;
5024 /* If an identity-zero op is commutative, check whether there
5025 would be a match if we swapped the operands. */
5026 else if ((GET_CODE (t
) == PLUS
|| GET_CODE (t
) == IOR
5027 || GET_CODE (t
) == XOR
)
5028 && rtx_equal_p (XEXP (t
, 1), f
))
5029 c1
= XEXP (t
, 0), op
= GET_CODE (t
), z
= f
;
5030 else if (GET_CODE (t
) == SIGN_EXTEND
5031 && (GET_CODE (XEXP (t
, 0)) == PLUS
5032 || GET_CODE (XEXP (t
, 0)) == MINUS
5033 || GET_CODE (XEXP (t
, 0)) == IOR
5034 || GET_CODE (XEXP (t
, 0)) == XOR
5035 || GET_CODE (XEXP (t
, 0)) == ASHIFT
5036 || GET_CODE (XEXP (t
, 0)) == LSHIFTRT
5037 || GET_CODE (XEXP (t
, 0)) == ASHIFTRT
)
5038 && GET_CODE (XEXP (XEXP (t
, 0), 0)) == SUBREG
5039 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 0))
5040 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 0)), f
)
5041 && (num_sign_bit_copies (f
, GET_MODE (f
))
5043 (GET_MODE_BITSIZE (mode
)
5044 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t
, 0), 0))))))
5046 c1
= XEXP (XEXP (t
, 0), 1); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5047 extend_op
= SIGN_EXTEND
;
5048 m
= GET_MODE (XEXP (t
, 0));
5050 else if (GET_CODE (t
) == SIGN_EXTEND
5051 && (GET_CODE (XEXP (t
, 0)) == PLUS
5052 || GET_CODE (XEXP (t
, 0)) == IOR
5053 || GET_CODE (XEXP (t
, 0)) == XOR
)
5054 && GET_CODE (XEXP (XEXP (t
, 0), 1)) == SUBREG
5055 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 1))
5056 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 1)), f
)
5057 && (num_sign_bit_copies (f
, GET_MODE (f
))
5059 (GET_MODE_BITSIZE (mode
)
5060 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t
, 0), 1))))))
5062 c1
= XEXP (XEXP (t
, 0), 0); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5063 extend_op
= SIGN_EXTEND
;
5064 m
= GET_MODE (XEXP (t
, 0));
5066 else if (GET_CODE (t
) == ZERO_EXTEND
5067 && (GET_CODE (XEXP (t
, 0)) == PLUS
5068 || GET_CODE (XEXP (t
, 0)) == MINUS
5069 || GET_CODE (XEXP (t
, 0)) == IOR
5070 || GET_CODE (XEXP (t
, 0)) == XOR
5071 || GET_CODE (XEXP (t
, 0)) == ASHIFT
5072 || GET_CODE (XEXP (t
, 0)) == LSHIFTRT
5073 || GET_CODE (XEXP (t
, 0)) == ASHIFTRT
)
5074 && GET_CODE (XEXP (XEXP (t
, 0), 0)) == SUBREG
5075 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5076 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 0))
5077 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 0)), f
)
5078 && ((nonzero_bits (f
, GET_MODE (f
))
5079 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t
, 0), 0))))
5082 c1
= XEXP (XEXP (t
, 0), 1); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5083 extend_op
= ZERO_EXTEND
;
5084 m
= GET_MODE (XEXP (t
, 0));
5086 else if (GET_CODE (t
) == ZERO_EXTEND
5087 && (GET_CODE (XEXP (t
, 0)) == PLUS
5088 || GET_CODE (XEXP (t
, 0)) == IOR
5089 || GET_CODE (XEXP (t
, 0)) == XOR
)
5090 && GET_CODE (XEXP (XEXP (t
, 0), 1)) == SUBREG
5091 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5092 && subreg_lowpart_p (XEXP (XEXP (t
, 0), 1))
5093 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t
, 0), 1)), f
)
5094 && ((nonzero_bits (f
, GET_MODE (f
))
5095 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t
, 0), 1))))
5098 c1
= XEXP (XEXP (t
, 0), 0); z
= f
; op
= GET_CODE (XEXP (t
, 0));
5099 extend_op
= ZERO_EXTEND
;
5100 m
= GET_MODE (XEXP (t
, 0));
5105 temp
= subst (simplify_gen_relational (true_code
, m
, VOIDmode
,
5106 cond_op0
, cond_op1
),
5107 pc_rtx
, pc_rtx
, 0, 0);
5108 temp
= simplify_gen_binary (MULT
, m
, temp
,
5109 simplify_gen_binary (MULT
, m
, c1
,
5111 temp
= subst (temp
, pc_rtx
, pc_rtx
, 0, 0);
5112 temp
= simplify_gen_binary (op
, m
, gen_lowpart (m
, z
), temp
);
5114 if (extend_op
!= UNKNOWN
)
5115 temp
= simplify_gen_unary (extend_op
, mode
, temp
, m
);
5121 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5122 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5123 negation of a single bit, we can convert this operation to a shift. We
5124 can actually do this more generally, but it doesn't seem worth it. */
5126 if (true_code
== NE
&& XEXP (cond
, 1) == const0_rtx
5127 && false_rtx
== const0_rtx
&& GET_CODE (true_rtx
) == CONST_INT
5128 && ((1 == nonzero_bits (XEXP (cond
, 0), mode
)
5129 && (i
= exact_log2 (INTVAL (true_rtx
))) >= 0)
5130 || ((num_sign_bit_copies (XEXP (cond
, 0), mode
)
5131 == GET_MODE_BITSIZE (mode
))
5132 && (i
= exact_log2 (-INTVAL (true_rtx
))) >= 0)))
5134 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
5135 gen_lowpart (mode
, XEXP (cond
, 0)), i
);
5137 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
5138 if (true_code
== NE
&& XEXP (cond
, 1) == const0_rtx
5139 && false_rtx
== const0_rtx
&& GET_CODE (true_rtx
) == CONST_INT
5140 && GET_MODE (XEXP (cond
, 0)) == mode
5141 && (INTVAL (true_rtx
) & GET_MODE_MASK (mode
))
5142 == nonzero_bits (XEXP (cond
, 0), mode
)
5143 && (i
= exact_log2 (INTVAL (true_rtx
) & GET_MODE_MASK (mode
))) >= 0)
5144 return XEXP (cond
, 0);
5149 /* Simplify X, a SET expression. Return the new expression. */
5152 simplify_set (rtx x
)
5154 rtx src
= SET_SRC (x
);
5155 rtx dest
= SET_DEST (x
);
5156 enum machine_mode mode
5157 = GET_MODE (src
) != VOIDmode
? GET_MODE (src
) : GET_MODE (dest
);
5161 /* (set (pc) (return)) gets written as (return). */
5162 if (GET_CODE (dest
) == PC
&& GET_CODE (src
) == RETURN
)
5165 /* Now that we know for sure which bits of SRC we are using, see if we can
5166 simplify the expression for the object knowing that we only need the
5169 if (GET_MODE_CLASS (mode
) == MODE_INT
5170 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
5172 src
= force_to_mode (src
, mode
, ~(HOST_WIDE_INT
) 0, 0);
5173 SUBST (SET_SRC (x
), src
);
5176 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5177 the comparison result and try to simplify it unless we already have used
5178 undobuf.other_insn. */
5179 if ((GET_MODE_CLASS (mode
) == MODE_CC
5180 || GET_CODE (src
) == COMPARE
5182 && (cc_use
= find_single_use (dest
, subst_insn
, &other_insn
)) != 0
5183 && (undobuf
.other_insn
== 0 || other_insn
== undobuf
.other_insn
)
5184 && COMPARISON_P (*cc_use
)
5185 && rtx_equal_p (XEXP (*cc_use
, 0), dest
))
5187 enum rtx_code old_code
= GET_CODE (*cc_use
);
5188 enum rtx_code new_code
;
5190 int other_changed
= 0;
5191 enum machine_mode compare_mode
= GET_MODE (dest
);
5193 if (GET_CODE (src
) == COMPARE
)
5194 op0
= XEXP (src
, 0), op1
= XEXP (src
, 1);
5196 op0
= src
, op1
= CONST0_RTX (GET_MODE (src
));
5198 tmp
= simplify_relational_operation (old_code
, compare_mode
, VOIDmode
,
5201 new_code
= old_code
;
5202 else if (!CONSTANT_P (tmp
))
5204 new_code
= GET_CODE (tmp
);
5205 op0
= XEXP (tmp
, 0);
5206 op1
= XEXP (tmp
, 1);
5210 rtx pat
= PATTERN (other_insn
);
5211 undobuf
.other_insn
= other_insn
;
5212 SUBST (*cc_use
, tmp
);
5214 /* Attempt to simplify CC user. */
5215 if (GET_CODE (pat
) == SET
)
5217 rtx
new = simplify_rtx (SET_SRC (pat
));
5218 if (new != NULL_RTX
)
5219 SUBST (SET_SRC (pat
), new);
5222 /* Convert X into a no-op move. */
5223 SUBST (SET_DEST (x
), pc_rtx
);
5224 SUBST (SET_SRC (x
), pc_rtx
);
5228 /* Simplify our comparison, if possible. */
5229 new_code
= simplify_comparison (new_code
, &op0
, &op1
);
5231 #ifdef SELECT_CC_MODE
5232 /* If this machine has CC modes other than CCmode, check to see if we
5233 need to use a different CC mode here. */
5234 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
5235 compare_mode
= GET_MODE (op0
);
5237 compare_mode
= SELECT_CC_MODE (new_code
, op0
, op1
);
5240 /* If the mode changed, we have to change SET_DEST, the mode in the
5241 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5242 a hard register, just build new versions with the proper mode. If it
5243 is a pseudo, we lose unless it is only time we set the pseudo, in
5244 which case we can safely change its mode. */
5245 if (compare_mode
!= GET_MODE (dest
))
5247 if (can_change_dest_mode (dest
, 0, compare_mode
))
5249 unsigned int regno
= REGNO (dest
);
5252 if (regno
< FIRST_PSEUDO_REGISTER
)
5253 new_dest
= gen_rtx_REG (compare_mode
, regno
);
5256 SUBST_MODE (regno_reg_rtx
[regno
], compare_mode
);
5257 new_dest
= regno_reg_rtx
[regno
];
5260 SUBST (SET_DEST (x
), new_dest
);
5261 SUBST (XEXP (*cc_use
, 0), new_dest
);
5268 #endif /* SELECT_CC_MODE */
5270 /* If the code changed, we have to build a new comparison in
5271 undobuf.other_insn. */
5272 if (new_code
!= old_code
)
5274 int other_changed_previously
= other_changed
;
5275 unsigned HOST_WIDE_INT mask
;
5277 SUBST (*cc_use
, gen_rtx_fmt_ee (new_code
, GET_MODE (*cc_use
),
5281 /* If the only change we made was to change an EQ into an NE or
5282 vice versa, OP0 has only one bit that might be nonzero, and OP1
5283 is zero, check if changing the user of the condition code will
5284 produce a valid insn. If it won't, we can keep the original code
5285 in that insn by surrounding our operation with an XOR. */
5287 if (((old_code
== NE
&& new_code
== EQ
)
5288 || (old_code
== EQ
&& new_code
== NE
))
5289 && ! other_changed_previously
&& op1
== const0_rtx
5290 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
5291 && exact_log2 (mask
= nonzero_bits (op0
, GET_MODE (op0
))) >= 0)
5293 rtx pat
= PATTERN (other_insn
), note
= 0;
5295 if ((recog_for_combine (&pat
, other_insn
, ¬e
) < 0
5296 && ! check_asm_operands (pat
)))
5298 PUT_CODE (*cc_use
, old_code
);
5301 op0
= simplify_gen_binary (XOR
, GET_MODE (op0
),
5302 op0
, GEN_INT (mask
));
5308 undobuf
.other_insn
= other_insn
;
5311 /* If we are now comparing against zero, change our source if
5312 needed. If we do not use cc0, we always have a COMPARE. */
5313 if (op1
== const0_rtx
&& dest
== cc0_rtx
)
5315 SUBST (SET_SRC (x
), op0
);
5321 /* Otherwise, if we didn't previously have a COMPARE in the
5322 correct mode, we need one. */
5323 if (GET_CODE (src
) != COMPARE
|| GET_MODE (src
) != compare_mode
)
5325 SUBST (SET_SRC (x
), gen_rtx_COMPARE (compare_mode
, op0
, op1
));
5328 else if (GET_MODE (op0
) == compare_mode
&& op1
== const0_rtx
)
5330 SUBST(SET_SRC (x
), op0
);
5335 /* Otherwise, update the COMPARE if needed. */
5336 SUBST (XEXP (src
, 0), op0
);
5337 SUBST (XEXP (src
, 1), op1
);
5342 /* Get SET_SRC in a form where we have placed back any
5343 compound expressions. Then do the checks below. */
5344 src
= make_compound_operation (src
, SET
);
5345 SUBST (SET_SRC (x
), src
);
5348 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5349 and X being a REG or (subreg (reg)), we may be able to convert this to
5350 (set (subreg:m2 x) (op)).
5352 We can always do this if M1 is narrower than M2 because that means that
5353 we only care about the low bits of the result.
5355 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5356 perform a narrower operation than requested since the high-order bits will
5357 be undefined. On machine where it is defined, this transformation is safe
5358 as long as M1 and M2 have the same number of words. */
5360 if (GET_CODE (src
) == SUBREG
&& subreg_lowpart_p (src
)
5361 && !OBJECT_P (SUBREG_REG (src
))
5362 && (((GET_MODE_SIZE (GET_MODE (src
)) + (UNITS_PER_WORD
- 1))
5364 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))
5365 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
5366 #ifndef WORD_REGISTER_OPERATIONS
5367 && (GET_MODE_SIZE (GET_MODE (src
))
5368 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
))))
5370 #ifdef CANNOT_CHANGE_MODE_CLASS
5371 && ! (REG_P (dest
) && REGNO (dest
) < FIRST_PSEUDO_REGISTER
5372 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest
),
5373 GET_MODE (SUBREG_REG (src
)),
5377 || (GET_CODE (dest
) == SUBREG
5378 && REG_P (SUBREG_REG (dest
)))))
5380 SUBST (SET_DEST (x
),
5381 gen_lowpart (GET_MODE (SUBREG_REG (src
)),
5383 SUBST (SET_SRC (x
), SUBREG_REG (src
));
5385 src
= SET_SRC (x
), dest
= SET_DEST (x
);
5389 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5392 && GET_CODE (src
) == SUBREG
5393 && subreg_lowpart_p (src
)
5394 && (GET_MODE_BITSIZE (GET_MODE (src
))
5395 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src
)))))
5397 rtx inner
= SUBREG_REG (src
);
5398 enum machine_mode inner_mode
= GET_MODE (inner
);
5400 /* Here we make sure that we don't have a sign bit on. */
5401 if (GET_MODE_BITSIZE (inner_mode
) <= HOST_BITS_PER_WIDE_INT
5402 && (nonzero_bits (inner
, inner_mode
)
5403 < ((unsigned HOST_WIDE_INT
) 1
5404 << (GET_MODE_BITSIZE (GET_MODE (src
)) - 1))))
5406 SUBST (SET_SRC (x
), inner
);
5412 #ifdef LOAD_EXTEND_OP
5413 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5414 would require a paradoxical subreg. Replace the subreg with a
5415 zero_extend to avoid the reload that would otherwise be required. */
5417 if (GET_CODE (src
) == SUBREG
&& subreg_lowpart_p (src
)
5418 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src
))) != UNKNOWN
5419 && SUBREG_BYTE (src
) == 0
5420 && (GET_MODE_SIZE (GET_MODE (src
))
5421 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
))))
5422 && MEM_P (SUBREG_REG (src
)))
5425 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src
))),
5426 GET_MODE (src
), SUBREG_REG (src
)));
5432 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5433 are comparing an item known to be 0 or -1 against 0, use a logical
5434 operation instead. Check for one of the arms being an IOR of the other
5435 arm with some value. We compute three terms to be IOR'ed together. In
5436 practice, at most two will be nonzero. Then we do the IOR's. */
5438 if (GET_CODE (dest
) != PC
5439 && GET_CODE (src
) == IF_THEN_ELSE
5440 && GET_MODE_CLASS (GET_MODE (src
)) == MODE_INT
5441 && (GET_CODE (XEXP (src
, 0)) == EQ
|| GET_CODE (XEXP (src
, 0)) == NE
)
5442 && XEXP (XEXP (src
, 0), 1) == const0_rtx
5443 && GET_MODE (src
) == GET_MODE (XEXP (XEXP (src
, 0), 0))
5444 #ifdef HAVE_conditional_move
5445 && ! can_conditionally_move_p (GET_MODE (src
))
5447 && (num_sign_bit_copies (XEXP (XEXP (src
, 0), 0),
5448 GET_MODE (XEXP (XEXP (src
, 0), 0)))
5449 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src
, 0), 0))))
5450 && ! side_effects_p (src
))
5452 rtx true_rtx
= (GET_CODE (XEXP (src
, 0)) == NE
5453 ? XEXP (src
, 1) : XEXP (src
, 2));
5454 rtx false_rtx
= (GET_CODE (XEXP (src
, 0)) == NE
5455 ? XEXP (src
, 2) : XEXP (src
, 1));
5456 rtx term1
= const0_rtx
, term2
, term3
;
5458 if (GET_CODE (true_rtx
) == IOR
5459 && rtx_equal_p (XEXP (true_rtx
, 0), false_rtx
))
5460 term1
= false_rtx
, true_rtx
= XEXP (true_rtx
, 1), false_rtx
= const0_rtx
;
5461 else if (GET_CODE (true_rtx
) == IOR
5462 && rtx_equal_p (XEXP (true_rtx
, 1), false_rtx
))
5463 term1
= false_rtx
, true_rtx
= XEXP (true_rtx
, 0), false_rtx
= const0_rtx
;
5464 else if (GET_CODE (false_rtx
) == IOR
5465 && rtx_equal_p (XEXP (false_rtx
, 0), true_rtx
))
5466 term1
= true_rtx
, false_rtx
= XEXP (false_rtx
, 1), true_rtx
= const0_rtx
;
5467 else if (GET_CODE (false_rtx
) == IOR
5468 && rtx_equal_p (XEXP (false_rtx
, 1), true_rtx
))
5469 term1
= true_rtx
, false_rtx
= XEXP (false_rtx
, 0), true_rtx
= const0_rtx
;
5471 term2
= simplify_gen_binary (AND
, GET_MODE (src
),
5472 XEXP (XEXP (src
, 0), 0), true_rtx
);
5473 term3
= simplify_gen_binary (AND
, GET_MODE (src
),
5474 simplify_gen_unary (NOT
, GET_MODE (src
),
5475 XEXP (XEXP (src
, 0), 0),
5480 simplify_gen_binary (IOR
, GET_MODE (src
),
5481 simplify_gen_binary (IOR
, GET_MODE (src
),
5488 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5489 whole thing fail. */
5490 if (GET_CODE (src
) == CLOBBER
&& XEXP (src
, 0) == const0_rtx
)
5492 else if (GET_CODE (dest
) == CLOBBER
&& XEXP (dest
, 0) == const0_rtx
)
5495 /* Convert this into a field assignment operation, if possible. */
5496 return make_field_assignment (x
);
5499 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5503 simplify_logical (rtx x
)
5505 enum machine_mode mode
= GET_MODE (x
);
5506 rtx op0
= XEXP (x
, 0);
5507 rtx op1
= XEXP (x
, 1);
5509 switch (GET_CODE (x
))
5512 /* We can call simplify_and_const_int only if we don't lose
5513 any (sign) bits when converting INTVAL (op1) to
5514 "unsigned HOST_WIDE_INT". */
5515 if (GET_CODE (op1
) == CONST_INT
5516 && (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5517 || INTVAL (op1
) > 0))
5519 x
= simplify_and_const_int (x
, mode
, op0
, INTVAL (op1
));
5520 if (GET_CODE (x
) != AND
)
5527 /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5528 apply the distributive law and then the inverse distributive
5529 law to see if things simplify. */
5530 if (GET_CODE (op0
) == IOR
|| GET_CODE (op0
) == XOR
)
5532 rtx result
= distribute_and_simplify_rtx (x
, 0);
5536 if (GET_CODE (op1
) == IOR
|| GET_CODE (op1
) == XOR
)
5538 rtx result
= distribute_and_simplify_rtx (x
, 1);
5545 /* If we have (ior (and A B) C), apply the distributive law and then
5546 the inverse distributive law to see if things simplify. */
5548 if (GET_CODE (op0
) == AND
)
5550 rtx result
= distribute_and_simplify_rtx (x
, 0);
5555 if (GET_CODE (op1
) == AND
)
5557 rtx result
= distribute_and_simplify_rtx (x
, 1);
5570 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5571 operations" because they can be replaced with two more basic operations.
5572 ZERO_EXTEND is also considered "compound" because it can be replaced with
5573 an AND operation, which is simpler, though only one operation.
5575 The function expand_compound_operation is called with an rtx expression
5576 and will convert it to the appropriate shifts and AND operations,
5577 simplifying at each stage.
5579 The function make_compound_operation is called to convert an expression
5580 consisting of shifts and ANDs into the equivalent compound expression.
5581 It is the inverse of this function, loosely speaking. */
5584 expand_compound_operation (rtx x
)
5586 unsigned HOST_WIDE_INT pos
= 0, len
;
5588 unsigned int modewidth
;
5591 switch (GET_CODE (x
))
5596 /* We can't necessarily use a const_int for a multiword mode;
5597 it depends on implicitly extending the value.
5598 Since we don't know the right way to extend it,
5599 we can't tell whether the implicit way is right.
5601 Even for a mode that is no wider than a const_int,
5602 we can't win, because we need to sign extend one of its bits through
5603 the rest of it, and we don't know which bit. */
5604 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
5607 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5608 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5609 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5610 reloaded. If not for that, MEM's would very rarely be safe.
5612 Reject MODEs bigger than a word, because we might not be able
5613 to reference a two-register group starting with an arbitrary register
5614 (and currently gen_lowpart might crash for a SUBREG). */
5616 if (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))) > UNITS_PER_WORD
)
5619 /* Reject MODEs that aren't scalar integers because turning vector
5620 or complex modes into shifts causes problems. */
5622 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x
, 0))))
5625 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)));
5626 /* If the inner object has VOIDmode (the only way this can happen
5627 is if it is an ASM_OPERANDS), we can't do anything since we don't
5628 know how much masking to do. */
5637 /* ... fall through ... */
5640 /* If the operand is a CLOBBER, just return it. */
5641 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
5644 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
5645 || GET_CODE (XEXP (x
, 2)) != CONST_INT
5646 || GET_MODE (XEXP (x
, 0)) == VOIDmode
)
5649 /* Reject MODEs that aren't scalar integers because turning vector
5650 or complex modes into shifts causes problems. */
5652 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x
, 0))))
5655 len
= INTVAL (XEXP (x
, 1));
5656 pos
= INTVAL (XEXP (x
, 2));
5658 /* This should stay within the object being extracted, fail otherwise. */
5659 if (len
+ pos
> GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
5662 if (BITS_BIG_ENDIAN
)
5663 pos
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - len
- pos
;
5670 /* Convert sign extension to zero extension, if we know that the high
5671 bit is not set, as this is easier to optimize. It will be converted
5672 back to cheaper alternative in make_extraction. */
5673 if (GET_CODE (x
) == SIGN_EXTEND
5674 && (GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
5675 && ((nonzero_bits (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
5676 & ~(((unsigned HOST_WIDE_INT
)
5677 GET_MODE_MASK (GET_MODE (XEXP (x
, 0))))
5681 rtx temp
= gen_rtx_ZERO_EXTEND (GET_MODE (x
), XEXP (x
, 0));
5682 rtx temp2
= expand_compound_operation (temp
);
5684 /* Make sure this is a profitable operation. */
5685 if (rtx_cost (x
, SET
) > rtx_cost (temp2
, SET
))
5687 else if (rtx_cost (x
, SET
) > rtx_cost (temp
, SET
))
5693 /* We can optimize some special cases of ZERO_EXTEND. */
5694 if (GET_CODE (x
) == ZERO_EXTEND
)
5696 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5697 know that the last value didn't have any inappropriate bits
5699 if (GET_CODE (XEXP (x
, 0)) == TRUNCATE
5700 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == GET_MODE (x
)
5701 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
5702 && (nonzero_bits (XEXP (XEXP (x
, 0), 0), GET_MODE (x
))
5703 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
5704 return XEXP (XEXP (x
, 0), 0);
5706 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5707 if (GET_CODE (XEXP (x
, 0)) == SUBREG
5708 && GET_MODE (SUBREG_REG (XEXP (x
, 0))) == GET_MODE (x
)
5709 && subreg_lowpart_p (XEXP (x
, 0))
5710 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
5711 && (nonzero_bits (SUBREG_REG (XEXP (x
, 0)), GET_MODE (x
))
5712 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
5713 return SUBREG_REG (XEXP (x
, 0));
5715 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5716 is a comparison and STORE_FLAG_VALUE permits. This is like
5717 the first case, but it works even when GET_MODE (x) is larger
5718 than HOST_WIDE_INT. */
5719 if (GET_CODE (XEXP (x
, 0)) == TRUNCATE
5720 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == GET_MODE (x
)
5721 && COMPARISON_P (XEXP (XEXP (x
, 0), 0))
5722 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
5723 <= HOST_BITS_PER_WIDE_INT
)
5724 && ((HOST_WIDE_INT
) STORE_FLAG_VALUE
5725 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
5726 return XEXP (XEXP (x
, 0), 0);
5728 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5729 if (GET_CODE (XEXP (x
, 0)) == SUBREG
5730 && GET_MODE (SUBREG_REG (XEXP (x
, 0))) == GET_MODE (x
)
5731 && subreg_lowpart_p (XEXP (x
, 0))
5732 && COMPARISON_P (SUBREG_REG (XEXP (x
, 0)))
5733 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
5734 <= HOST_BITS_PER_WIDE_INT
)
5735 && ((HOST_WIDE_INT
) STORE_FLAG_VALUE
5736 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0)))) == 0)
5737 return SUBREG_REG (XEXP (x
, 0));
5741 /* If we reach here, we want to return a pair of shifts. The inner
5742 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5743 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5744 logical depending on the value of UNSIGNEDP.
5746 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5747 converted into an AND of a shift.
5749 We must check for the case where the left shift would have a negative
5750 count. This can happen in a case like (x >> 31) & 255 on machines
5751 that can't shift by a constant. On those machines, we would first
5752 combine the shift with the AND to produce a variable-position
5753 extraction. Then the constant of 31 would be substituted in to produce
5754 a such a position. */
5756 modewidth
= GET_MODE_BITSIZE (GET_MODE (x
));
5757 if (modewidth
+ len
>= pos
)
5759 enum machine_mode mode
= GET_MODE (x
);
5760 tem
= gen_lowpart (mode
, XEXP (x
, 0));
5761 if (!tem
|| GET_CODE (tem
) == CLOBBER
)
5763 tem
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
5764 tem
, modewidth
- pos
- len
);
5765 tem
= simplify_shift_const (NULL_RTX
, unsignedp
? LSHIFTRT
: ASHIFTRT
,
5766 mode
, tem
, modewidth
- len
);
5768 else if (unsignedp
&& len
< HOST_BITS_PER_WIDE_INT
)
5769 tem
= simplify_and_const_int (NULL_RTX
, GET_MODE (x
),
5770 simplify_shift_const (NULL_RTX
, LSHIFTRT
,
5773 ((HOST_WIDE_INT
) 1 << len
) - 1);
5775 /* Any other cases we can't handle. */
5778 /* If we couldn't do this for some reason, return the original
5780 if (GET_CODE (tem
) == CLOBBER
)
5786 /* X is a SET which contains an assignment of one object into
5787 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5788 or certain SUBREGS). If possible, convert it into a series of
5791 We half-heartedly support variable positions, but do not at all
5792 support variable lengths. */
5795 expand_field_assignment (rtx x
)
5798 rtx pos
; /* Always counts from low bit. */
5800 rtx mask
, cleared
, masked
;
5801 enum machine_mode compute_mode
;
5803 /* Loop until we find something we can't simplify. */
5806 if (GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
5807 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
)
5809 inner
= SUBREG_REG (XEXP (SET_DEST (x
), 0));
5810 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)));
5811 pos
= GEN_INT (subreg_lsb (XEXP (SET_DEST (x
), 0)));
5813 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
5814 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
)
5816 inner
= XEXP (SET_DEST (x
), 0);
5817 len
= INTVAL (XEXP (SET_DEST (x
), 1));
5818 pos
= XEXP (SET_DEST (x
), 2);
5820 /* A constant position should stay within the width of INNER. */
5821 if (GET_CODE (pos
) == CONST_INT
5822 && INTVAL (pos
) + len
> GET_MODE_BITSIZE (GET_MODE (inner
)))
5825 if (BITS_BIG_ENDIAN
)
5827 if (GET_CODE (pos
) == CONST_INT
)
5828 pos
= GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner
)) - len
5830 else if (GET_CODE (pos
) == MINUS
5831 && GET_CODE (XEXP (pos
, 1)) == CONST_INT
5832 && (INTVAL (XEXP (pos
, 1))
5833 == GET_MODE_BITSIZE (GET_MODE (inner
)) - len
))
5834 /* If position is ADJUST - X, new position is X. */
5835 pos
= XEXP (pos
, 0);
5837 pos
= simplify_gen_binary (MINUS
, GET_MODE (pos
),
5838 GEN_INT (GET_MODE_BITSIZE (
5845 /* A SUBREG between two modes that occupy the same numbers of words
5846 can be done by moving the SUBREG to the source. */
5847 else if (GET_CODE (SET_DEST (x
)) == SUBREG
5848 /* We need SUBREGs to compute nonzero_bits properly. */
5849 && nonzero_sign_valid
5850 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
5851 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
5852 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
5853 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
5855 x
= gen_rtx_SET (VOIDmode
, SUBREG_REG (SET_DEST (x
)),
5857 (GET_MODE (SUBREG_REG (SET_DEST (x
))),
5864 while (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
5865 inner
= SUBREG_REG (inner
);
5867 compute_mode
= GET_MODE (inner
);
5869 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
5870 if (! SCALAR_INT_MODE_P (compute_mode
))
5872 enum machine_mode imode
;
5874 /* Don't do anything for vector or complex integral types. */
5875 if (! FLOAT_MODE_P (compute_mode
))
5878 /* Try to find an integral mode to pun with. */
5879 imode
= mode_for_size (GET_MODE_BITSIZE (compute_mode
), MODE_INT
, 0);
5880 if (imode
== BLKmode
)
5883 compute_mode
= imode
;
5884 inner
= gen_lowpart (imode
, inner
);
5887 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5888 if (len
>= HOST_BITS_PER_WIDE_INT
)
5891 /* Now compute the equivalent expression. Make a copy of INNER
5892 for the SET_DEST in case it is a MEM into which we will substitute;
5893 we don't want shared RTL in that case. */
5894 mask
= GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1);
5895 cleared
= simplify_gen_binary (AND
, compute_mode
,
5896 simplify_gen_unary (NOT
, compute_mode
,
5897 simplify_gen_binary (ASHIFT
,
5902 masked
= simplify_gen_binary (ASHIFT
, compute_mode
,
5903 simplify_gen_binary (
5905 gen_lowpart (compute_mode
, SET_SRC (x
)),
5909 x
= gen_rtx_SET (VOIDmode
, copy_rtx (inner
),
5910 simplify_gen_binary (IOR
, compute_mode
,
5917 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5918 it is an RTX that represents a variable starting position; otherwise,
5919 POS is the (constant) starting bit position (counted from the LSB).
5921 UNSIGNEDP is nonzero for an unsigned reference and zero for a
5924 IN_DEST is nonzero if this is a reference in the destination of a
5925 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
5926 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5929 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
5930 ZERO_EXTRACT should be built even for bits starting at bit 0.
5932 MODE is the desired mode of the result (if IN_DEST == 0).
5934 The result is an RTX for the extraction or NULL_RTX if the target
5938 make_extraction (enum machine_mode mode
, rtx inner
, HOST_WIDE_INT pos
,
5939 rtx pos_rtx
, unsigned HOST_WIDE_INT len
, int unsignedp
,
5940 int in_dest
, int in_compare
)
5942 /* This mode describes the size of the storage area
5943 to fetch the overall value from. Within that, we
5944 ignore the POS lowest bits, etc. */
5945 enum machine_mode is_mode
= GET_MODE (inner
);
5946 enum machine_mode inner_mode
;
5947 enum machine_mode wanted_inner_mode
;
5948 enum machine_mode wanted_inner_reg_mode
= word_mode
;
5949 enum machine_mode pos_mode
= word_mode
;
5950 enum machine_mode extraction_mode
= word_mode
;
5951 enum machine_mode tmode
= mode_for_size (len
, MODE_INT
, 1);
5953 rtx orig_pos_rtx
= pos_rtx
;
5954 HOST_WIDE_INT orig_pos
;
5956 if (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
5958 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5959 consider just the QI as the memory to extract from.
5960 The subreg adds or removes high bits; its mode is
5961 irrelevant to the meaning of this extraction,
5962 since POS and LEN count from the lsb. */
5963 if (MEM_P (SUBREG_REG (inner
)))
5964 is_mode
= GET_MODE (SUBREG_REG (inner
));
5965 inner
= SUBREG_REG (inner
);
5967 else if (GET_CODE (inner
) == ASHIFT
5968 && GET_CODE (XEXP (inner
, 1)) == CONST_INT
5969 && pos_rtx
== 0 && pos
== 0
5970 && len
> (unsigned HOST_WIDE_INT
) INTVAL (XEXP (inner
, 1)))
5972 /* We're extracting the least significant bits of an rtx
5973 (ashift X (const_int C)), where LEN > C. Extract the
5974 least significant (LEN - C) bits of X, giving an rtx
5975 whose mode is MODE, then shift it left C times. */
5976 new = make_extraction (mode
, XEXP (inner
, 0),
5977 0, 0, len
- INTVAL (XEXP (inner
, 1)),
5978 unsignedp
, in_dest
, in_compare
);
5980 return gen_rtx_ASHIFT (mode
, new, XEXP (inner
, 1));
5983 inner_mode
= GET_MODE (inner
);
5985 if (pos_rtx
&& GET_CODE (pos_rtx
) == CONST_INT
)
5986 pos
= INTVAL (pos_rtx
), pos_rtx
= 0;
5988 /* See if this can be done without an extraction. We never can if the
5989 width of the field is not the same as that of some integer mode. For
5990 registers, we can only avoid the extraction if the position is at the
5991 low-order bit and this is either not in the destination or we have the
5992 appropriate STRICT_LOW_PART operation available.
5994 For MEM, we can avoid an extract if the field starts on an appropriate
5995 boundary and we can change the mode of the memory reference. */
5997 if (tmode
!= BLKmode
5998 && ((pos_rtx
== 0 && (pos
% BITS_PER_WORD
) == 0
6000 && (inner_mode
== tmode
6002 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode
),
6003 GET_MODE_BITSIZE (inner_mode
))
6004 || reg_truncated_to_mode (tmode
, inner
))
6007 && have_insn_for (STRICT_LOW_PART
, tmode
))))
6008 || (MEM_P (inner
) && pos_rtx
== 0
6010 % (STRICT_ALIGNMENT
? GET_MODE_ALIGNMENT (tmode
)
6011 : BITS_PER_UNIT
)) == 0
6012 /* We can't do this if we are widening INNER_MODE (it
6013 may not be aligned, for one thing). */
6014 && GET_MODE_BITSIZE (inner_mode
) >= GET_MODE_BITSIZE (tmode
)
6015 && (inner_mode
== tmode
6016 || (! mode_dependent_address_p (XEXP (inner
, 0))
6017 && ! MEM_VOLATILE_P (inner
))))))
6019 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6020 field. If the original and current mode are the same, we need not
6021 adjust the offset. Otherwise, we do if bytes big endian.
6023 If INNER is not a MEM, get a piece consisting of just the field
6024 of interest (in this case POS % BITS_PER_WORD must be 0). */
6028 HOST_WIDE_INT offset
;
6030 /* POS counts from lsb, but make OFFSET count in memory order. */
6031 if (BYTES_BIG_ENDIAN
)
6032 offset
= (GET_MODE_BITSIZE (is_mode
) - len
- pos
) / BITS_PER_UNIT
;
6034 offset
= pos
/ BITS_PER_UNIT
;
6036 new = adjust_address_nv (inner
, tmode
, offset
);
6038 else if (REG_P (inner
))
6040 if (tmode
!= inner_mode
)
6042 /* We can't call gen_lowpart in a DEST since we
6043 always want a SUBREG (see below) and it would sometimes
6044 return a new hard register. */
6047 HOST_WIDE_INT final_word
= pos
/ BITS_PER_WORD
;
6049 if (WORDS_BIG_ENDIAN
6050 && GET_MODE_SIZE (inner_mode
) > UNITS_PER_WORD
)
6051 final_word
= ((GET_MODE_SIZE (inner_mode
)
6052 - GET_MODE_SIZE (tmode
))
6053 / UNITS_PER_WORD
) - final_word
;
6055 final_word
*= UNITS_PER_WORD
;
6056 if (BYTES_BIG_ENDIAN
&&
6057 GET_MODE_SIZE (inner_mode
) > GET_MODE_SIZE (tmode
))
6058 final_word
+= (GET_MODE_SIZE (inner_mode
)
6059 - GET_MODE_SIZE (tmode
)) % UNITS_PER_WORD
;
6061 /* Avoid creating invalid subregs, for example when
6062 simplifying (x>>32)&255. */
6063 if (!validate_subreg (tmode
, inner_mode
, inner
, final_word
))
6066 new = gen_rtx_SUBREG (tmode
, inner
, final_word
);
6069 new = gen_lowpart (tmode
, inner
);
6075 new = force_to_mode (inner
, tmode
,
6076 len
>= HOST_BITS_PER_WIDE_INT
6077 ? ~(unsigned HOST_WIDE_INT
) 0
6078 : ((unsigned HOST_WIDE_INT
) 1 << len
) - 1,
6081 /* If this extraction is going into the destination of a SET,
6082 make a STRICT_LOW_PART unless we made a MEM. */
6085 return (MEM_P (new) ? new
6086 : (GET_CODE (new) != SUBREG
6087 ? gen_rtx_CLOBBER (tmode
, const0_rtx
)
6088 : gen_rtx_STRICT_LOW_PART (VOIDmode
, new)));
6093 if (GET_CODE (new) == CONST_INT
)
6094 return gen_int_mode (INTVAL (new), mode
);
6096 /* If we know that no extraneous bits are set, and that the high
6097 bit is not set, convert the extraction to the cheaper of
6098 sign and zero extension, that are equivalent in these cases. */
6099 if (flag_expensive_optimizations
6100 && (GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
6101 && ((nonzero_bits (new, tmode
)
6102 & ~(((unsigned HOST_WIDE_INT
)
6103 GET_MODE_MASK (tmode
))
6107 rtx temp
= gen_rtx_ZERO_EXTEND (mode
, new);
6108 rtx temp1
= gen_rtx_SIGN_EXTEND (mode
, new);
6110 /* Prefer ZERO_EXTENSION, since it gives more information to
6112 if (rtx_cost (temp
, SET
) <= rtx_cost (temp1
, SET
))
6117 /* Otherwise, sign- or zero-extend unless we already are in the
6120 return (gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
,
6124 /* Unless this is a COMPARE or we have a funny memory reference,
6125 don't do anything with zero-extending field extracts starting at
6126 the low-order bit since they are simple AND operations. */
6127 if (pos_rtx
== 0 && pos
== 0 && ! in_dest
6128 && ! in_compare
&& unsignedp
)
6131 /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6132 if the position is not a constant and the length is not 1. In all
6133 other cases, we would only be going outside our object in cases when
6134 an original shift would have been undefined. */
6136 && ((pos_rtx
== 0 && pos
+ len
> GET_MODE_BITSIZE (is_mode
))
6137 || (pos_rtx
!= 0 && len
!= 1)))
6140 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6141 and the mode for the result. */
6142 if (in_dest
&& mode_for_extraction (EP_insv
, -1) != MAX_MACHINE_MODE
)
6144 wanted_inner_reg_mode
= mode_for_extraction (EP_insv
, 0);
6145 pos_mode
= mode_for_extraction (EP_insv
, 2);
6146 extraction_mode
= mode_for_extraction (EP_insv
, 3);
6149 if (! in_dest
&& unsignedp
6150 && mode_for_extraction (EP_extzv
, -1) != MAX_MACHINE_MODE
)
6152 wanted_inner_reg_mode
= mode_for_extraction (EP_extzv
, 1);
6153 pos_mode
= mode_for_extraction (EP_extzv
, 3);
6154 extraction_mode
= mode_for_extraction (EP_extzv
, 0);
6157 if (! in_dest
&& ! unsignedp
6158 && mode_for_extraction (EP_extv
, -1) != MAX_MACHINE_MODE
)
6160 wanted_inner_reg_mode
= mode_for_extraction (EP_extv
, 1);
6161 pos_mode
= mode_for_extraction (EP_extv
, 3);
6162 extraction_mode
= mode_for_extraction (EP_extv
, 0);
6165 /* Never narrow an object, since that might not be safe. */
6167 if (mode
!= VOIDmode
6168 && GET_MODE_SIZE (extraction_mode
) < GET_MODE_SIZE (mode
))
6169 extraction_mode
= mode
;
6171 if (pos_rtx
&& GET_MODE (pos_rtx
) != VOIDmode
6172 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
6173 pos_mode
= GET_MODE (pos_rtx
);
6175 /* If this is not from memory, the desired mode is the preferred mode
6176 for an extraction pattern's first input operand, or word_mode if there
6179 wanted_inner_mode
= wanted_inner_reg_mode
;
6182 /* Be careful not to go beyond the extracted object and maintain the
6183 natural alignment of the memory. */
6184 wanted_inner_mode
= smallest_mode_for_size (len
, MODE_INT
);
6185 while (pos
% GET_MODE_BITSIZE (wanted_inner_mode
) + len
6186 > GET_MODE_BITSIZE (wanted_inner_mode
))
6188 wanted_inner_mode
= GET_MODE_WIDER_MODE (wanted_inner_mode
);
6189 gcc_assert (wanted_inner_mode
!= VOIDmode
);
6192 /* If we have to change the mode of memory and cannot, the desired mode
6193 is EXTRACTION_MODE. */
6194 if (inner_mode
!= wanted_inner_mode
6195 && (mode_dependent_address_p (XEXP (inner
, 0))
6196 || MEM_VOLATILE_P (inner
)
6198 wanted_inner_mode
= extraction_mode
;
6203 if (BITS_BIG_ENDIAN
)
6205 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6206 BITS_BIG_ENDIAN style. If position is constant, compute new
6207 position. Otherwise, build subtraction.
6208 Note that POS is relative to the mode of the original argument.
6209 If it's a MEM we need to recompute POS relative to that.
6210 However, if we're extracting from (or inserting into) a register,
6211 we want to recompute POS relative to wanted_inner_mode. */
6212 int width
= (MEM_P (inner
)
6213 ? GET_MODE_BITSIZE (is_mode
)
6214 : GET_MODE_BITSIZE (wanted_inner_mode
));
6217 pos
= width
- len
- pos
;
6220 = gen_rtx_MINUS (GET_MODE (pos_rtx
), GEN_INT (width
- len
), pos_rtx
);
6221 /* POS may be less than 0 now, but we check for that below.
6222 Note that it can only be less than 0 if !MEM_P (inner). */
6225 /* If INNER has a wider mode, and this is a constant extraction, try to
6226 make it smaller and adjust the byte to point to the byte containing
6228 if (wanted_inner_mode
!= VOIDmode
6229 && inner_mode
!= wanted_inner_mode
6231 && GET_MODE_SIZE (wanted_inner_mode
) < GET_MODE_SIZE (is_mode
)
6233 && ! mode_dependent_address_p (XEXP (inner
, 0))
6234 && ! MEM_VOLATILE_P (inner
))
6238 /* The computations below will be correct if the machine is big
6239 endian in both bits and bytes or little endian in bits and bytes.
6240 If it is mixed, we must adjust. */
6242 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6243 adjust OFFSET to compensate. */
6244 if (BYTES_BIG_ENDIAN
6245 && GET_MODE_SIZE (inner_mode
) < GET_MODE_SIZE (is_mode
))
6246 offset
-= GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (inner_mode
);
6248 /* We can now move to the desired byte. */
6249 offset
+= (pos
/ GET_MODE_BITSIZE (wanted_inner_mode
))
6250 * GET_MODE_SIZE (wanted_inner_mode
);
6251 pos
%= GET_MODE_BITSIZE (wanted_inner_mode
);
6253 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
6254 && is_mode
!= wanted_inner_mode
)
6255 offset
= (GET_MODE_SIZE (is_mode
)
6256 - GET_MODE_SIZE (wanted_inner_mode
) - offset
);
6258 inner
= adjust_address_nv (inner
, wanted_inner_mode
, offset
);
6261 /* If INNER is not memory, we can always get it into the proper mode. If we
6262 are changing its mode, POS must be a constant and smaller than the size
6264 else if (!MEM_P (inner
))
6266 if (GET_MODE (inner
) != wanted_inner_mode
6268 || orig_pos
+ len
> GET_MODE_BITSIZE (wanted_inner_mode
)))
6274 inner
= force_to_mode (inner
, wanted_inner_mode
,
6276 || len
+ orig_pos
>= HOST_BITS_PER_WIDE_INT
6277 ? ~(unsigned HOST_WIDE_INT
) 0
6278 : ((((unsigned HOST_WIDE_INT
) 1 << len
) - 1)
6283 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6284 have to zero extend. Otherwise, we can just use a SUBREG. */
6286 && GET_MODE_SIZE (pos_mode
) > GET_MODE_SIZE (GET_MODE (pos_rtx
)))
6288 rtx temp
= gen_rtx_ZERO_EXTEND (pos_mode
, pos_rtx
);
6290 /* If we know that no extraneous bits are set, and that the high
6291 bit is not set, convert extraction to cheaper one - either
6292 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6294 if (flag_expensive_optimizations
6295 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx
)) <= HOST_BITS_PER_WIDE_INT
6296 && ((nonzero_bits (pos_rtx
, GET_MODE (pos_rtx
))
6297 & ~(((unsigned HOST_WIDE_INT
)
6298 GET_MODE_MASK (GET_MODE (pos_rtx
)))
6302 rtx temp1
= gen_rtx_SIGN_EXTEND (pos_mode
, pos_rtx
);
6304 /* Prefer ZERO_EXTENSION, since it gives more information to
6306 if (rtx_cost (temp1
, SET
) < rtx_cost (temp
, SET
))
6311 else if (pos_rtx
!= 0
6312 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
6313 pos_rtx
= gen_lowpart (pos_mode
, pos_rtx
);
6315 /* Make POS_RTX unless we already have it and it is correct. If we don't
6316 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6318 if (pos_rtx
== 0 && orig_pos_rtx
!= 0 && INTVAL (orig_pos_rtx
) == pos
)
6319 pos_rtx
= orig_pos_rtx
;
6321 else if (pos_rtx
== 0)
6322 pos_rtx
= GEN_INT (pos
);
6324 /* Make the required operation. See if we can use existing rtx. */
6325 new = gen_rtx_fmt_eee (unsignedp
? ZERO_EXTRACT
: SIGN_EXTRACT
,
6326 extraction_mode
, inner
, GEN_INT (len
), pos_rtx
);
6328 new = gen_lowpart (mode
, new);
6333 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6334 with any other operations in X. Return X without that shift if so. */
6337 extract_left_shift (rtx x
, int count
)
6339 enum rtx_code code
= GET_CODE (x
);
6340 enum machine_mode mode
= GET_MODE (x
);
6346 /* This is the shift itself. If it is wide enough, we will return
6347 either the value being shifted if the shift count is equal to
6348 COUNT or a shift for the difference. */
6349 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
6350 && INTVAL (XEXP (x
, 1)) >= count
)
6351 return simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, XEXP (x
, 0),
6352 INTVAL (XEXP (x
, 1)) - count
);
6356 if ((tem
= extract_left_shift (XEXP (x
, 0), count
)) != 0)
6357 return simplify_gen_unary (code
, mode
, tem
, mode
);
6361 case PLUS
: case IOR
: case XOR
: case AND
:
6362 /* If we can safely shift this constant and we find the inner shift,
6363 make a new operation. */
6364 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
6365 && (INTVAL (XEXP (x
, 1)) & ((((HOST_WIDE_INT
) 1 << count
)) - 1)) == 0
6366 && (tem
= extract_left_shift (XEXP (x
, 0), count
)) != 0)
6367 return simplify_gen_binary (code
, mode
, tem
,
6368 GEN_INT (INTVAL (XEXP (x
, 1)) >> count
));
6379 /* Look at the expression rooted at X. Look for expressions
6380 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6381 Form these expressions.
6383 Return the new rtx, usually just X.
6385 Also, for machines like the VAX that don't have logical shift insns,
6386 try to convert logical to arithmetic shift operations in cases where
6387 they are equivalent. This undoes the canonicalizations to logical
6388 shifts done elsewhere.
6390 We try, as much as possible, to re-use rtl expressions to save memory.
6392 IN_CODE says what kind of expression we are processing. Normally, it is
6393 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6394 being kludges), it is MEM. When processing the arguments of a comparison
6395 or a COMPARE against zero, it is COMPARE. */
6398 make_compound_operation (rtx x
, enum rtx_code in_code
)
6400 enum rtx_code code
= GET_CODE (x
);
6401 enum machine_mode mode
= GET_MODE (x
);
6402 int mode_width
= GET_MODE_BITSIZE (mode
);
6404 enum rtx_code next_code
;
6410 /* Select the code to be used in recursive calls. Once we are inside an
6411 address, we stay there. If we have a comparison, set to COMPARE,
6412 but once inside, go back to our default of SET. */
6414 next_code
= (code
== MEM
|| code
== PLUS
|| code
== MINUS
? MEM
6415 : ((code
== COMPARE
|| COMPARISON_P (x
))
6416 && XEXP (x
, 1) == const0_rtx
) ? COMPARE
6417 : in_code
== COMPARE
? SET
: in_code
);
6419 /* Process depending on the code of this operation. If NEW is set
6420 nonzero, it will be returned. */
6425 /* Convert shifts by constants into multiplications if inside
6427 if (in_code
== MEM
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
6428 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
6429 && INTVAL (XEXP (x
, 1)) >= 0)
6431 new = make_compound_operation (XEXP (x
, 0), next_code
);
6432 new = gen_rtx_MULT (mode
, new,
6433 GEN_INT ((HOST_WIDE_INT
) 1
6434 << INTVAL (XEXP (x
, 1))));
6439 /* If the second operand is not a constant, we can't do anything
6441 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6444 /* If the constant is a power of two minus one and the first operand
6445 is a logical right shift, make an extraction. */
6446 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
6447 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6449 new = make_compound_operation (XEXP (XEXP (x
, 0), 0), next_code
);
6450 new = make_extraction (mode
, new, 0, XEXP (XEXP (x
, 0), 1), i
, 1,
6451 0, in_code
== COMPARE
);
6454 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6455 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
6456 && subreg_lowpart_p (XEXP (x
, 0))
6457 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == LSHIFTRT
6458 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6460 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x
, 0)), 0),
6462 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x
, 0))), new, 0,
6463 XEXP (SUBREG_REG (XEXP (x
, 0)), 1), i
, 1,
6464 0, in_code
== COMPARE
);
6466 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6467 else if ((GET_CODE (XEXP (x
, 0)) == XOR
6468 || GET_CODE (XEXP (x
, 0)) == IOR
)
6469 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == LSHIFTRT
6470 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == LSHIFTRT
6471 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6473 /* Apply the distributive law, and then try to make extractions. */
6474 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x
, 0)), mode
,
6475 gen_rtx_AND (mode
, XEXP (XEXP (x
, 0), 0),
6477 gen_rtx_AND (mode
, XEXP (XEXP (x
, 0), 1),
6479 new = make_compound_operation (new, in_code
);
6482 /* If we are have (and (rotate X C) M) and C is larger than the number
6483 of bits in M, this is an extraction. */
6485 else if (GET_CODE (XEXP (x
, 0)) == ROTATE
6486 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
6487 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0
6488 && i
<= INTVAL (XEXP (XEXP (x
, 0), 1)))
6490 new = make_compound_operation (XEXP (XEXP (x
, 0), 0), next_code
);
6491 new = make_extraction (mode
, new,
6492 (GET_MODE_BITSIZE (mode
)
6493 - INTVAL (XEXP (XEXP (x
, 0), 1))),
6494 NULL_RTX
, i
, 1, 0, in_code
== COMPARE
);
6497 /* On machines without logical shifts, if the operand of the AND is
6498 a logical shift and our mask turns off all the propagated sign
6499 bits, we can replace the logical shift with an arithmetic shift. */
6500 else if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
6501 && !have_insn_for (LSHIFTRT
, mode
)
6502 && have_insn_for (ASHIFTRT
, mode
)
6503 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
6504 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
6505 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
6506 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
6508 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
6510 mask
>>= INTVAL (XEXP (XEXP (x
, 0), 1));
6511 if ((INTVAL (XEXP (x
, 1)) & ~mask
) == 0)
6513 gen_rtx_ASHIFTRT (mode
,
6514 make_compound_operation
6515 (XEXP (XEXP (x
, 0), 0), next_code
),
6516 XEXP (XEXP (x
, 0), 1)));
6519 /* If the constant is one less than a power of two, this might be
6520 representable by an extraction even if no shift is present.
6521 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6522 we are in a COMPARE. */
6523 else if ((i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
6524 new = make_extraction (mode
,
6525 make_compound_operation (XEXP (x
, 0),
6527 0, NULL_RTX
, i
, 1, 0, in_code
== COMPARE
);
6529 /* If we are in a comparison and this is an AND with a power of two,
6530 convert this into the appropriate bit extract. */
6531 else if (in_code
== COMPARE
6532 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
6533 new = make_extraction (mode
,
6534 make_compound_operation (XEXP (x
, 0),
6536 i
, NULL_RTX
, 1, 1, 0, 1);
6541 /* If the sign bit is known to be zero, replace this with an
6542 arithmetic shift. */
6543 if (have_insn_for (ASHIFTRT
, mode
)
6544 && ! have_insn_for (LSHIFTRT
, mode
)
6545 && mode_width
<= HOST_BITS_PER_WIDE_INT
6546 && (nonzero_bits (XEXP (x
, 0), mode
) & (1 << (mode_width
- 1))) == 0)
6548 new = gen_rtx_ASHIFTRT (mode
,
6549 make_compound_operation (XEXP (x
, 0),
6555 /* ... fall through ... */
6561 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6562 this is a SIGN_EXTRACT. */
6563 if (GET_CODE (rhs
) == CONST_INT
6564 && GET_CODE (lhs
) == ASHIFT
6565 && GET_CODE (XEXP (lhs
, 1)) == CONST_INT
6566 && INTVAL (rhs
) >= INTVAL (XEXP (lhs
, 1)))
6568 new = make_compound_operation (XEXP (lhs
, 0), next_code
);
6569 new = make_extraction (mode
, new,
6570 INTVAL (rhs
) - INTVAL (XEXP (lhs
, 1)),
6571 NULL_RTX
, mode_width
- INTVAL (rhs
),
6572 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
6576 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6577 If so, try to merge the shifts into a SIGN_EXTEND. We could
6578 also do this for some cases of SIGN_EXTRACT, but it doesn't
6579 seem worth the effort; the case checked for occurs on Alpha. */
6582 && ! (GET_CODE (lhs
) == SUBREG
6583 && (OBJECT_P (SUBREG_REG (lhs
))))
6584 && GET_CODE (rhs
) == CONST_INT
6585 && INTVAL (rhs
) < HOST_BITS_PER_WIDE_INT
6586 && (new = extract_left_shift (lhs
, INTVAL (rhs
))) != 0)
6587 new = make_extraction (mode
, make_compound_operation (new, next_code
),
6588 0, NULL_RTX
, mode_width
- INTVAL (rhs
),
6589 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
6594 /* Call ourselves recursively on the inner expression. If we are
6595 narrowing the object and it has a different RTL code from
6596 what it originally did, do this SUBREG as a force_to_mode. */
6598 tem
= make_compound_operation (SUBREG_REG (x
), in_code
);
6602 simplified
= simplify_subreg (GET_MODE (x
), tem
, GET_MODE (tem
),
6608 if (GET_CODE (tem
) != GET_CODE (SUBREG_REG (x
))
6609 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (tem
))
6610 && subreg_lowpart_p (x
))
6612 rtx newer
= force_to_mode (tem
, mode
, ~(HOST_WIDE_INT
) 0,
6615 /* If we have something other than a SUBREG, we might have
6616 done an expansion, so rerun ourselves. */
6617 if (GET_CODE (newer
) != SUBREG
)
6618 newer
= make_compound_operation (newer
, in_code
);
6634 x
= gen_lowpart (mode
, new);
6635 code
= GET_CODE (x
);
6638 /* Now recursively process each operand of this operation. */
6639 fmt
= GET_RTX_FORMAT (code
);
6640 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6643 new = make_compound_operation (XEXP (x
, i
), next_code
);
6644 SUBST (XEXP (x
, i
), new);
6647 /* If this is a commutative operation, the changes to the operands
6648 may have made it noncanonical. */
6649 if (COMMUTATIVE_ARITH_P (x
)
6650 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
6653 SUBST (XEXP (x
, 0), XEXP (x
, 1));
6654 SUBST (XEXP (x
, 1), tem
);
6660 /* Given M see if it is a value that would select a field of bits
6661 within an item, but not the entire word. Return -1 if not.
6662 Otherwise, return the starting position of the field, where 0 is the
6665 *PLEN is set to the length of the field. */
6668 get_pos_from_mask (unsigned HOST_WIDE_INT m
, unsigned HOST_WIDE_INT
*plen
)
6670 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6671 int pos
= exact_log2 (m
& -m
);
6675 /* Now shift off the low-order zero bits and see if we have a
6676 power of two minus 1. */
6677 len
= exact_log2 ((m
>> pos
) + 1);
6686 /* If X refers to a register that equals REG in value, replace these
6687 references with REG. */
6689 canon_reg_for_combine (rtx x
, rtx reg
)
6696 enum rtx_code code
= GET_CODE (x
);
6697 switch (GET_RTX_CLASS (code
))
6700 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
6701 if (op0
!= XEXP (x
, 0))
6702 return simplify_gen_unary (GET_CODE (x
), GET_MODE (x
), op0
,
6707 case RTX_COMM_ARITH
:
6708 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
6709 op1
= canon_reg_for_combine (XEXP (x
, 1), reg
);
6710 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
6711 return simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), op0
, op1
);
6715 case RTX_COMM_COMPARE
:
6716 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
6717 op1
= canon_reg_for_combine (XEXP (x
, 1), reg
);
6718 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
6719 return simplify_gen_relational (GET_CODE (x
), GET_MODE (x
),
6720 GET_MODE (op0
), op0
, op1
);
6724 case RTX_BITFIELD_OPS
:
6725 op0
= canon_reg_for_combine (XEXP (x
, 0), reg
);
6726 op1
= canon_reg_for_combine (XEXP (x
, 1), reg
);
6727 op2
= canon_reg_for_combine (XEXP (x
, 2), reg
);
6728 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1) || op2
!= XEXP (x
, 2))
6729 return simplify_gen_ternary (GET_CODE (x
), GET_MODE (x
),
6730 GET_MODE (op0
), op0
, op1
, op2
);
6735 if (rtx_equal_p (get_last_value (reg
), x
)
6736 || rtx_equal_p (reg
, get_last_value (x
)))
6745 fmt
= GET_RTX_FORMAT (code
);
6747 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6750 rtx op
= canon_reg_for_combine (XEXP (x
, i
), reg
);
6751 if (op
!= XEXP (x
, i
))
6761 else if (fmt
[i
] == 'E')
6764 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
6766 rtx op
= canon_reg_for_combine (XVECEXP (x
, i
, j
), reg
);
6767 if (op
!= XVECEXP (x
, i
, j
))
6774 XVECEXP (x
, i
, j
) = op
;
6785 /* Return X converted to MODE. If the value is already truncated to
6786 MODE we can just return a subreg even though in the general case we
6787 would need an explicit truncation. */
6790 gen_lowpart_or_truncate (enum machine_mode mode
, rtx x
)
6792 if (GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (mode
)
6793 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
6794 GET_MODE_BITSIZE (GET_MODE (x
)))
6795 || (REG_P (x
) && reg_truncated_to_mode (mode
, x
)))
6796 return gen_lowpart (mode
, x
);
6798 return simplify_gen_unary (TRUNCATE
, mode
, x
, GET_MODE (x
));
6801 /* See if X can be simplified knowing that we will only refer to it in
6802 MODE and will only refer to those bits that are nonzero in MASK.
6803 If other bits are being computed or if masking operations are done
6804 that select a superset of the bits in MASK, they can sometimes be
6807 Return a possibly simplified expression, but always convert X to
6808 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6810 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6811 are all off in X. This is used when X will be complemented, by either
6812 NOT, NEG, or XOR. */
6815 force_to_mode (rtx x
, enum machine_mode mode
, unsigned HOST_WIDE_INT mask
,
6818 enum rtx_code code
= GET_CODE (x
);
6819 int next_select
= just_select
|| code
== XOR
|| code
== NOT
|| code
== NEG
;
6820 enum machine_mode op_mode
;
6821 unsigned HOST_WIDE_INT fuller_mask
, nonzero
;
6824 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6825 code below will do the wrong thing since the mode of such an
6826 expression is VOIDmode.
6828 Also do nothing if X is a CLOBBER; this can happen if X was
6829 the return value from a call to gen_lowpart. */
6830 if (code
== CALL
|| code
== ASM_OPERANDS
|| code
== CLOBBER
)
6833 /* We want to perform the operation is its present mode unless we know
6834 that the operation is valid in MODE, in which case we do the operation
6836 op_mode
= ((GET_MODE_CLASS (mode
) == GET_MODE_CLASS (GET_MODE (x
))
6837 && have_insn_for (code
, mode
))
6838 ? mode
: GET_MODE (x
));
6840 /* It is not valid to do a right-shift in a narrower mode
6841 than the one it came in with. */
6842 if ((code
== LSHIFTRT
|| code
== ASHIFTRT
)
6843 && GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (GET_MODE (x
)))
6844 op_mode
= GET_MODE (x
);
6846 /* Truncate MASK to fit OP_MODE. */
6848 mask
&= GET_MODE_MASK (op_mode
);
6850 /* When we have an arithmetic operation, or a shift whose count we
6851 do not know, we need to assume that all bits up to the highest-order
6852 bit in MASK will be needed. This is how we form such a mask. */
6853 if (mask
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1)))
6854 fuller_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6856 fuller_mask
= (((unsigned HOST_WIDE_INT
) 1 << (floor_log2 (mask
) + 1))
6859 /* Determine what bits of X are guaranteed to be (non)zero. */
6860 nonzero
= nonzero_bits (x
, mode
);
6862 /* If none of the bits in X are needed, return a zero. */
6863 if (! just_select
&& (nonzero
& mask
) == 0)
6866 /* If X is a CONST_INT, return a new one. Do this here since the
6867 test below will fail. */
6868 if (GET_CODE (x
) == CONST_INT
)
6870 if (SCALAR_INT_MODE_P (mode
))
6871 return gen_int_mode (INTVAL (x
) & mask
, mode
);
6874 x
= GEN_INT (INTVAL (x
) & mask
);
6875 return gen_lowpart_common (mode
, x
);
6879 /* If X is narrower than MODE and we want all the bits in X's mode, just
6880 get X in the proper mode. */
6881 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
)
6882 && (GET_MODE_MASK (GET_MODE (x
)) & ~mask
) == 0)
6883 return gen_lowpart (mode
, x
);
6888 /* If X is a (clobber (const_int)), return it since we know we are
6889 generating something that won't match. */
6896 x
= expand_compound_operation (x
);
6897 if (GET_CODE (x
) != code
)
6898 return force_to_mode (x
, mode
, mask
, next_select
);
6902 if (subreg_lowpart_p (x
)
6903 /* We can ignore the effect of this SUBREG if it narrows the mode or
6904 if the constant masks to zero all the bits the mode doesn't
6906 && ((GET_MODE_SIZE (GET_MODE (x
))
6907 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
6909 & GET_MODE_MASK (GET_MODE (x
))
6910 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x
)))))))
6911 return force_to_mode (SUBREG_REG (x
), mode
, mask
, next_select
);
6915 /* If this is an AND with a constant, convert it into an AND
6916 whose constant is the AND of that constant with MASK. If it
6917 remains an AND of MASK, delete it since it is redundant. */
6919 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
6921 x
= simplify_and_const_int (x
, op_mode
, XEXP (x
, 0),
6922 mask
& INTVAL (XEXP (x
, 1)));
6924 /* If X is still an AND, see if it is an AND with a mask that
6925 is just some low-order bits. If so, and it is MASK, we don't
6928 if (GET_CODE (x
) == AND
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
6929 && ((INTVAL (XEXP (x
, 1)) & GET_MODE_MASK (GET_MODE (x
)))
6933 /* If it remains an AND, try making another AND with the bits
6934 in the mode mask that aren't in MASK turned on. If the
6935 constant in the AND is wide enough, this might make a
6936 cheaper constant. */
6938 if (GET_CODE (x
) == AND
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
6939 && GET_MODE_MASK (GET_MODE (x
)) != mask
6940 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
)
6942 HOST_WIDE_INT cval
= (INTVAL (XEXP (x
, 1))
6943 | (GET_MODE_MASK (GET_MODE (x
)) & ~mask
));
6944 int width
= GET_MODE_BITSIZE (GET_MODE (x
));
6947 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
6948 number, sign extend it. */
6949 if (width
> 0 && width
< HOST_BITS_PER_WIDE_INT
6950 && (cval
& ((HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
6951 cval
|= (HOST_WIDE_INT
) -1 << width
;
6953 y
= simplify_gen_binary (AND
, GET_MODE (x
),
6954 XEXP (x
, 0), GEN_INT (cval
));
6955 if (rtx_cost (y
, SET
) < rtx_cost (x
, SET
))
6965 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6966 low-order bits (as in an alignment operation) and FOO is already
6967 aligned to that boundary, mask C1 to that boundary as well.
6968 This may eliminate that PLUS and, later, the AND. */
6971 unsigned int width
= GET_MODE_BITSIZE (mode
);
6972 unsigned HOST_WIDE_INT smask
= mask
;
6974 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6975 number, sign extend it. */
6977 if (width
< HOST_BITS_PER_WIDE_INT
6978 && (smask
& ((HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
6979 smask
|= (HOST_WIDE_INT
) -1 << width
;
6981 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
6982 && exact_log2 (- smask
) >= 0
6983 && (nonzero_bits (XEXP (x
, 0), mode
) & ~smask
) == 0
6984 && (INTVAL (XEXP (x
, 1)) & ~smask
) != 0)
6985 return force_to_mode (plus_constant (XEXP (x
, 0),
6986 (INTVAL (XEXP (x
, 1)) & smask
)),
6987 mode
, smask
, next_select
);
6990 /* ... fall through ... */
6993 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6994 most significant bit in MASK since carries from those bits will
6995 affect the bits we are interested in. */
7000 /* If X is (minus C Y) where C's least set bit is larger than any bit
7001 in the mask, then we may replace with (neg Y). */
7002 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
7003 && (((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 0))
7004 & -INTVAL (XEXP (x
, 0))))
7007 x
= simplify_gen_unary (NEG
, GET_MODE (x
), XEXP (x
, 1),
7009 return force_to_mode (x
, mode
, mask
, next_select
);
7012 /* Similarly, if C contains every bit in the fuller_mask, then we may
7013 replace with (not Y). */
7014 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
7015 && ((INTVAL (XEXP (x
, 0)) | (HOST_WIDE_INT
) fuller_mask
)
7016 == INTVAL (XEXP (x
, 0))))
7018 x
= simplify_gen_unary (NOT
, GET_MODE (x
),
7019 XEXP (x
, 1), GET_MODE (x
));
7020 return force_to_mode (x
, mode
, mask
, next_select
);
7028 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7029 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7030 operation which may be a bitfield extraction. Ensure that the
7031 constant we form is not wider than the mode of X. */
7033 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
7034 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
7035 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
7036 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
7037 && GET_CODE (XEXP (x
, 1)) == CONST_INT
7038 && ((INTVAL (XEXP (XEXP (x
, 0), 1))
7039 + floor_log2 (INTVAL (XEXP (x
, 1))))
7040 < GET_MODE_BITSIZE (GET_MODE (x
)))
7041 && (INTVAL (XEXP (x
, 1))
7042 & ~nonzero_bits (XEXP (x
, 0), GET_MODE (x
))) == 0)
7044 temp
= GEN_INT ((INTVAL (XEXP (x
, 1)) & mask
)
7045 << INTVAL (XEXP (XEXP (x
, 0), 1)));
7046 temp
= simplify_gen_binary (GET_CODE (x
), GET_MODE (x
),
7047 XEXP (XEXP (x
, 0), 0), temp
);
7048 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
), temp
,
7049 XEXP (XEXP (x
, 0), 1));
7050 return force_to_mode (x
, mode
, mask
, next_select
);
7054 /* For most binary operations, just propagate into the operation and
7055 change the mode if we have an operation of that mode. */
7057 op0
= gen_lowpart_or_truncate (op_mode
,
7058 force_to_mode (XEXP (x
, 0), mode
, mask
,
7060 op1
= gen_lowpart_or_truncate (op_mode
,
7061 force_to_mode (XEXP (x
, 1), mode
, mask
,
7064 if (op_mode
!= GET_MODE (x
) || op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
7065 x
= simplify_gen_binary (code
, op_mode
, op0
, op1
);
7069 /* For left shifts, do the same, but just for the first operand.
7070 However, we cannot do anything with shifts where we cannot
7071 guarantee that the counts are smaller than the size of the mode
7072 because such a count will have a different meaning in a
7075 if (! (GET_CODE (XEXP (x
, 1)) == CONST_INT
7076 && INTVAL (XEXP (x
, 1)) >= 0
7077 && INTVAL (XEXP (x
, 1)) < GET_MODE_BITSIZE (mode
))
7078 && ! (GET_MODE (XEXP (x
, 1)) != VOIDmode
7079 && (nonzero_bits (XEXP (x
, 1), GET_MODE (XEXP (x
, 1)))
7080 < (unsigned HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
))))
7083 /* If the shift count is a constant and we can do arithmetic in
7084 the mode of the shift, refine which bits we need. Otherwise, use the
7085 conservative form of the mask. */
7086 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7087 && INTVAL (XEXP (x
, 1)) >= 0
7088 && INTVAL (XEXP (x
, 1)) < GET_MODE_BITSIZE (op_mode
)
7089 && GET_MODE_BITSIZE (op_mode
) <= HOST_BITS_PER_WIDE_INT
)
7090 mask
>>= INTVAL (XEXP (x
, 1));
7094 op0
= gen_lowpart_or_truncate (op_mode
,
7095 force_to_mode (XEXP (x
, 0), op_mode
,
7096 mask
, next_select
));
7098 if (op_mode
!= GET_MODE (x
) || op0
!= XEXP (x
, 0))
7099 x
= simplify_gen_binary (code
, op_mode
, op0
, XEXP (x
, 1));
7103 /* Here we can only do something if the shift count is a constant,
7104 this shift constant is valid for the host, and we can do arithmetic
7107 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7108 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
7109 && GET_MODE_BITSIZE (op_mode
) <= HOST_BITS_PER_WIDE_INT
)
7111 rtx inner
= XEXP (x
, 0);
7112 unsigned HOST_WIDE_INT inner_mask
;
7114 /* Select the mask of the bits we need for the shift operand. */
7115 inner_mask
= mask
<< INTVAL (XEXP (x
, 1));
7117 /* We can only change the mode of the shift if we can do arithmetic
7118 in the mode of the shift and INNER_MASK is no wider than the
7119 width of X's mode. */
7120 if ((inner_mask
& ~GET_MODE_MASK (GET_MODE (x
))) != 0)
7121 op_mode
= GET_MODE (x
);
7123 inner
= force_to_mode (inner
, op_mode
, inner_mask
, next_select
);
7125 if (GET_MODE (x
) != op_mode
|| inner
!= XEXP (x
, 0))
7126 x
= simplify_gen_binary (LSHIFTRT
, op_mode
, inner
, XEXP (x
, 1));
7129 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7130 shift and AND produces only copies of the sign bit (C2 is one less
7131 than a power of two), we can do this with just a shift. */
7133 if (GET_CODE (x
) == LSHIFTRT
7134 && GET_CODE (XEXP (x
, 1)) == CONST_INT
7135 /* The shift puts one of the sign bit copies in the least significant
7137 && ((INTVAL (XEXP (x
, 1))
7138 + num_sign_bit_copies (XEXP (x
, 0), GET_MODE (XEXP (x
, 0))))
7139 >= GET_MODE_BITSIZE (GET_MODE (x
)))
7140 && exact_log2 (mask
+ 1) >= 0
7141 /* Number of bits left after the shift must be more than the mask
7143 && ((INTVAL (XEXP (x
, 1)) + exact_log2 (mask
+ 1))
7144 <= GET_MODE_BITSIZE (GET_MODE (x
)))
7145 /* Must be more sign bit copies than the mask needs. */
7146 && ((int) num_sign_bit_copies (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
7147 >= exact_log2 (mask
+ 1)))
7148 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
), XEXP (x
, 0),
7149 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x
))
7150 - exact_log2 (mask
+ 1)));
7155 /* If we are just looking for the sign bit, we don't need this shift at
7156 all, even if it has a variable count. */
7157 if (GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
7158 && (mask
== ((unsigned HOST_WIDE_INT
) 1
7159 << (GET_MODE_BITSIZE (GET_MODE (x
)) - 1))))
7160 return force_to_mode (XEXP (x
, 0), mode
, mask
, next_select
);
7162 /* If this is a shift by a constant, get a mask that contains those bits
7163 that are not copies of the sign bit. We then have two cases: If
7164 MASK only includes those bits, this can be a logical shift, which may
7165 allow simplifications. If MASK is a single-bit field not within
7166 those bits, we are requesting a copy of the sign bit and hence can
7167 shift the sign bit to the appropriate location. */
7169 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) >= 0
7170 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
7174 /* If the considered data is wider than HOST_WIDE_INT, we can't
7175 represent a mask for all its bits in a single scalar.
7176 But we only care about the lower bits, so calculate these. */
7178 if (GET_MODE_BITSIZE (GET_MODE (x
)) > HOST_BITS_PER_WIDE_INT
)
7180 nonzero
= ~(HOST_WIDE_INT
) 0;
7182 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7183 is the number of bits a full-width mask would have set.
7184 We need only shift if these are fewer than nonzero can
7185 hold. If not, we must keep all bits set in nonzero. */
7187 if (GET_MODE_BITSIZE (GET_MODE (x
)) - INTVAL (XEXP (x
, 1))
7188 < HOST_BITS_PER_WIDE_INT
)
7189 nonzero
>>= INTVAL (XEXP (x
, 1))
7190 + HOST_BITS_PER_WIDE_INT
7191 - GET_MODE_BITSIZE (GET_MODE (x
)) ;
7195 nonzero
= GET_MODE_MASK (GET_MODE (x
));
7196 nonzero
>>= INTVAL (XEXP (x
, 1));
7199 if ((mask
& ~nonzero
) == 0)
7201 x
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, GET_MODE (x
),
7202 XEXP (x
, 0), INTVAL (XEXP (x
, 1)));
7203 if (GET_CODE (x
) != ASHIFTRT
)
7204 return force_to_mode (x
, mode
, mask
, next_select
);
7207 else if ((i
= exact_log2 (mask
)) >= 0)
7209 x
= simplify_shift_const
7210 (NULL_RTX
, LSHIFTRT
, GET_MODE (x
), XEXP (x
, 0),
7211 GET_MODE_BITSIZE (GET_MODE (x
)) - 1 - i
);
7213 if (GET_CODE (x
) != ASHIFTRT
)
7214 return force_to_mode (x
, mode
, mask
, next_select
);
7218 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7219 even if the shift count isn't a constant. */
7221 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
),
7222 XEXP (x
, 0), XEXP (x
, 1));
7226 /* If this is a zero- or sign-extension operation that just affects bits
7227 we don't care about, remove it. Be sure the call above returned
7228 something that is still a shift. */
7230 if ((GET_CODE (x
) == LSHIFTRT
|| GET_CODE (x
) == ASHIFTRT
)
7231 && GET_CODE (XEXP (x
, 1)) == CONST_INT
7232 && INTVAL (XEXP (x
, 1)) >= 0
7233 && (INTVAL (XEXP (x
, 1))
7234 <= GET_MODE_BITSIZE (GET_MODE (x
)) - (floor_log2 (mask
) + 1))
7235 && GET_CODE (XEXP (x
, 0)) == ASHIFT
7236 && XEXP (XEXP (x
, 0), 1) == XEXP (x
, 1))
7237 return force_to_mode (XEXP (XEXP (x
, 0), 0), mode
, mask
,
7244 /* If the shift count is constant and we can do computations
7245 in the mode of X, compute where the bits we care about are.
7246 Otherwise, we can't do anything. Don't change the mode of
7247 the shift or propagate MODE into the shift, though. */
7248 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
7249 && INTVAL (XEXP (x
, 1)) >= 0)
7251 temp
= simplify_binary_operation (code
== ROTATE
? ROTATERT
: ROTATE
,
7252 GET_MODE (x
), GEN_INT (mask
),
7254 if (temp
&& GET_CODE (temp
) == CONST_INT
)
7256 force_to_mode (XEXP (x
, 0), GET_MODE (x
),
7257 INTVAL (temp
), next_select
));
7262 /* If we just want the low-order bit, the NEG isn't needed since it
7263 won't change the low-order bit. */
7265 return force_to_mode (XEXP (x
, 0), mode
, mask
, just_select
);
7267 /* We need any bits less significant than the most significant bit in
7268 MASK since carries from those bits will affect the bits we are
7274 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7275 same as the XOR case above. Ensure that the constant we form is not
7276 wider than the mode of X. */
7278 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
7279 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
7280 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
7281 && (INTVAL (XEXP (XEXP (x
, 0), 1)) + floor_log2 (mask
)
7282 < GET_MODE_BITSIZE (GET_MODE (x
)))
7283 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
7285 temp
= gen_int_mode (mask
<< INTVAL (XEXP (XEXP (x
, 0), 1)),
7287 temp
= simplify_gen_binary (XOR
, GET_MODE (x
),
7288 XEXP (XEXP (x
, 0), 0), temp
);
7289 x
= simplify_gen_binary (LSHIFTRT
, GET_MODE (x
),
7290 temp
, XEXP (XEXP (x
, 0), 1));
7292 return force_to_mode (x
, mode
, mask
, next_select
);
7295 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7296 use the full mask inside the NOT. */
7300 op0
= gen_lowpart_or_truncate (op_mode
,
7301 force_to_mode (XEXP (x
, 0), mode
, mask
,
7303 if (op_mode
!= GET_MODE (x
) || op0
!= XEXP (x
, 0))
7304 x
= simplify_gen_unary (code
, op_mode
, op0
, op_mode
);
7308 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7309 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7310 which is equal to STORE_FLAG_VALUE. */
7311 if ((mask
& ~STORE_FLAG_VALUE
) == 0 && XEXP (x
, 1) == const0_rtx
7312 && GET_MODE (XEXP (x
, 0)) == mode
7313 && exact_log2 (nonzero_bits (XEXP (x
, 0), mode
)) >= 0
7314 && (nonzero_bits (XEXP (x
, 0), mode
)
7315 == (unsigned HOST_WIDE_INT
) STORE_FLAG_VALUE
))
7316 return force_to_mode (XEXP (x
, 0), mode
, mask
, next_select
);
7321 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7322 written in a narrower mode. We play it safe and do not do so. */
7325 gen_lowpart_or_truncate (GET_MODE (x
),
7326 force_to_mode (XEXP (x
, 1), mode
,
7327 mask
, next_select
)));
7329 gen_lowpart_or_truncate (GET_MODE (x
),
7330 force_to_mode (XEXP (x
, 2), mode
,
7331 mask
, next_select
)));
7338 /* Ensure we return a value of the proper mode. */
7339 return gen_lowpart_or_truncate (mode
, x
);
7342 /* Return nonzero if X is an expression that has one of two values depending on
7343 whether some other value is zero or nonzero. In that case, we return the
7344 value that is being tested, *PTRUE is set to the value if the rtx being
7345 returned has a nonzero value, and *PFALSE is set to the other alternative.
7347 If we return zero, we set *PTRUE and *PFALSE to X. */
7350 if_then_else_cond (rtx x
, rtx
*ptrue
, rtx
*pfalse
)
7352 enum machine_mode mode
= GET_MODE (x
);
7353 enum rtx_code code
= GET_CODE (x
);
7354 rtx cond0
, cond1
, true0
, true1
, false0
, false1
;
7355 unsigned HOST_WIDE_INT nz
;
7357 /* If we are comparing a value against zero, we are done. */
7358 if ((code
== NE
|| code
== EQ
)
7359 && XEXP (x
, 1) == const0_rtx
)
7361 *ptrue
= (code
== NE
) ? const_true_rtx
: const0_rtx
;
7362 *pfalse
= (code
== NE
) ? const0_rtx
: const_true_rtx
;
7366 /* If this is a unary operation whose operand has one of two values, apply
7367 our opcode to compute those values. */
7368 else if (UNARY_P (x
)
7369 && (cond0
= if_then_else_cond (XEXP (x
, 0), &true0
, &false0
)) != 0)
7371 *ptrue
= simplify_gen_unary (code
, mode
, true0
, GET_MODE (XEXP (x
, 0)));
7372 *pfalse
= simplify_gen_unary (code
, mode
, false0
,
7373 GET_MODE (XEXP (x
, 0)));
7377 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7378 make can't possibly match and would suppress other optimizations. */
7379 else if (code
== COMPARE
)
7382 /* If this is a binary operation, see if either side has only one of two
7383 values. If either one does or if both do and they are conditional on
7384 the same value, compute the new true and false values. */
7385 else if (BINARY_P (x
))
7387 cond0
= if_then_else_cond (XEXP (x
, 0), &true0
, &false0
);
7388 cond1
= if_then_else_cond (XEXP (x
, 1), &true1
, &false1
);
7390 if ((cond0
!= 0 || cond1
!= 0)
7391 && ! (cond0
!= 0 && cond1
!= 0 && ! rtx_equal_p (cond0
, cond1
)))
7393 /* If if_then_else_cond returned zero, then true/false are the
7394 same rtl. We must copy one of them to prevent invalid rtl
7397 true0
= copy_rtx (true0
);
7398 else if (cond1
== 0)
7399 true1
= copy_rtx (true1
);
7401 if (COMPARISON_P (x
))
7403 *ptrue
= simplify_gen_relational (code
, mode
, VOIDmode
,
7405 *pfalse
= simplify_gen_relational (code
, mode
, VOIDmode
,
7410 *ptrue
= simplify_gen_binary (code
, mode
, true0
, true1
);
7411 *pfalse
= simplify_gen_binary (code
, mode
, false0
, false1
);
7414 return cond0
? cond0
: cond1
;
7417 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7418 operands is zero when the other is nonzero, and vice-versa,
7419 and STORE_FLAG_VALUE is 1 or -1. */
7421 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
7422 && (code
== PLUS
|| code
== IOR
|| code
== XOR
|| code
== MINUS
7424 && GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == MULT
)
7426 rtx op0
= XEXP (XEXP (x
, 0), 1);
7427 rtx op1
= XEXP (XEXP (x
, 1), 1);
7429 cond0
= XEXP (XEXP (x
, 0), 0);
7430 cond1
= XEXP (XEXP (x
, 1), 0);
7432 if (COMPARISON_P (cond0
)
7433 && COMPARISON_P (cond1
)
7434 && ((GET_CODE (cond0
) == reversed_comparison_code (cond1
, NULL
)
7435 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 0))
7436 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 1)))
7437 || ((swap_condition (GET_CODE (cond0
))
7438 == reversed_comparison_code (cond1
, NULL
))
7439 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 1))
7440 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 0))))
7441 && ! side_effects_p (x
))
7443 *ptrue
= simplify_gen_binary (MULT
, mode
, op0
, const_true_rtx
);
7444 *pfalse
= simplify_gen_binary (MULT
, mode
,
7446 ? simplify_gen_unary (NEG
, mode
,
7454 /* Similarly for MULT, AND and UMIN, except that for these the result
7456 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
7457 && (code
== MULT
|| code
== AND
|| code
== UMIN
)
7458 && GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == MULT
)
7460 cond0
= XEXP (XEXP (x
, 0), 0);
7461 cond1
= XEXP (XEXP (x
, 1), 0);
7463 if (COMPARISON_P (cond0
)
7464 && COMPARISON_P (cond1
)
7465 && ((GET_CODE (cond0
) == reversed_comparison_code (cond1
, NULL
)
7466 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 0))
7467 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 1)))
7468 || ((swap_condition (GET_CODE (cond0
))
7469 == reversed_comparison_code (cond1
, NULL
))
7470 && rtx_equal_p (XEXP (cond0
, 0), XEXP (cond1
, 1))
7471 && rtx_equal_p (XEXP (cond0
, 1), XEXP (cond1
, 0))))
7472 && ! side_effects_p (x
))
7474 *ptrue
= *pfalse
= const0_rtx
;
7480 else if (code
== IF_THEN_ELSE
)
7482 /* If we have IF_THEN_ELSE already, extract the condition and
7483 canonicalize it if it is NE or EQ. */
7484 cond0
= XEXP (x
, 0);
7485 *ptrue
= XEXP (x
, 1), *pfalse
= XEXP (x
, 2);
7486 if (GET_CODE (cond0
) == NE
&& XEXP (cond0
, 1) == const0_rtx
)
7487 return XEXP (cond0
, 0);
7488 else if (GET_CODE (cond0
) == EQ
&& XEXP (cond0
, 1) == const0_rtx
)
7490 *ptrue
= XEXP (x
, 2), *pfalse
= XEXP (x
, 1);
7491 return XEXP (cond0
, 0);
7497 /* If X is a SUBREG, we can narrow both the true and false values
7498 if the inner expression, if there is a condition. */
7499 else if (code
== SUBREG
7500 && 0 != (cond0
= if_then_else_cond (SUBREG_REG (x
),
7503 true0
= simplify_gen_subreg (mode
, true0
,
7504 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
7505 false0
= simplify_gen_subreg (mode
, false0
,
7506 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
7507 if (true0
&& false0
)
7515 /* If X is a constant, this isn't special and will cause confusions
7516 if we treat it as such. Likewise if it is equivalent to a constant. */
7517 else if (CONSTANT_P (x
)
7518 || ((cond0
= get_last_value (x
)) != 0 && CONSTANT_P (cond0
)))
7521 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7522 will be least confusing to the rest of the compiler. */
7523 else if (mode
== BImode
)
7525 *ptrue
= GEN_INT (STORE_FLAG_VALUE
), *pfalse
= const0_rtx
;
7529 /* If X is known to be either 0 or -1, those are the true and
7530 false values when testing X. */
7531 else if (x
== constm1_rtx
|| x
== const0_rtx
7532 || (mode
!= VOIDmode
7533 && num_sign_bit_copies (x
, mode
) == GET_MODE_BITSIZE (mode
)))
7535 *ptrue
= constm1_rtx
, *pfalse
= const0_rtx
;
7539 /* Likewise for 0 or a single bit. */
7540 else if (SCALAR_INT_MODE_P (mode
)
7541 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7542 && exact_log2 (nz
= nonzero_bits (x
, mode
)) >= 0)
7544 *ptrue
= gen_int_mode (nz
, mode
), *pfalse
= const0_rtx
;
7548 /* Otherwise fail; show no condition with true and false values the same. */
7549 *ptrue
= *pfalse
= x
;
7553 /* Return the value of expression X given the fact that condition COND
7554 is known to be true when applied to REG as its first operand and VAL
7555 as its second. X is known to not be shared and so can be modified in
7558 We only handle the simplest cases, and specifically those cases that
7559 arise with IF_THEN_ELSE expressions. */
7562 known_cond (rtx x
, enum rtx_code cond
, rtx reg
, rtx val
)
7564 enum rtx_code code
= GET_CODE (x
);
7569 if (side_effects_p (x
))
7572 /* If either operand of the condition is a floating point value,
7573 then we have to avoid collapsing an EQ comparison. */
7575 && rtx_equal_p (x
, reg
)
7576 && ! FLOAT_MODE_P (GET_MODE (x
))
7577 && ! FLOAT_MODE_P (GET_MODE (val
)))
7580 if (cond
== UNEQ
&& rtx_equal_p (x
, reg
))
7583 /* If X is (abs REG) and we know something about REG's relationship
7584 with zero, we may be able to simplify this. */
7586 if (code
== ABS
&& rtx_equal_p (XEXP (x
, 0), reg
) && val
== const0_rtx
)
7589 case GE
: case GT
: case EQ
:
7592 return simplify_gen_unary (NEG
, GET_MODE (XEXP (x
, 0)),
7594 GET_MODE (XEXP (x
, 0)));
7599 /* The only other cases we handle are MIN, MAX, and comparisons if the
7600 operands are the same as REG and VAL. */
7602 else if (COMPARISON_P (x
) || COMMUTATIVE_ARITH_P (x
))
7604 if (rtx_equal_p (XEXP (x
, 0), val
))
7605 cond
= swap_condition (cond
), temp
= val
, val
= reg
, reg
= temp
;
7607 if (rtx_equal_p (XEXP (x
, 0), reg
) && rtx_equal_p (XEXP (x
, 1), val
))
7609 if (COMPARISON_P (x
))
7611 if (comparison_dominates_p (cond
, code
))
7612 return const_true_rtx
;
7614 code
= reversed_comparison_code (x
, NULL
);
7616 && comparison_dominates_p (cond
, code
))
7621 else if (code
== SMAX
|| code
== SMIN
7622 || code
== UMIN
|| code
== UMAX
)
7624 int unsignedp
= (code
== UMIN
|| code
== UMAX
);
7626 /* Do not reverse the condition when it is NE or EQ.
7627 This is because we cannot conclude anything about
7628 the value of 'SMAX (x, y)' when x is not equal to y,
7629 but we can when x equals y. */
7630 if ((code
== SMAX
|| code
== UMAX
)
7631 && ! (cond
== EQ
|| cond
== NE
))
7632 cond
= reverse_condition (cond
);
7637 return unsignedp
? x
: XEXP (x
, 1);
7639 return unsignedp
? x
: XEXP (x
, 0);
7641 return unsignedp
? XEXP (x
, 1) : x
;
7643 return unsignedp
? XEXP (x
, 0) : x
;
7650 else if (code
== SUBREG
)
7652 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
7653 rtx
new, r
= known_cond (SUBREG_REG (x
), cond
, reg
, val
);
7655 if (SUBREG_REG (x
) != r
)
7657 /* We must simplify subreg here, before we lose track of the
7658 original inner_mode. */
7659 new = simplify_subreg (GET_MODE (x
), r
,
7660 inner_mode
, SUBREG_BYTE (x
));
7664 SUBST (SUBREG_REG (x
), r
);
7669 /* We don't have to handle SIGN_EXTEND here, because even in the
7670 case of replacing something with a modeless CONST_INT, a
7671 CONST_INT is already (supposed to be) a valid sign extension for
7672 its narrower mode, which implies it's already properly
7673 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
7674 story is different. */
7675 else if (code
== ZERO_EXTEND
)
7677 enum machine_mode inner_mode
= GET_MODE (XEXP (x
, 0));
7678 rtx
new, r
= known_cond (XEXP (x
, 0), cond
, reg
, val
);
7680 if (XEXP (x
, 0) != r
)
7682 /* We must simplify the zero_extend here, before we lose
7683 track of the original inner_mode. */
7684 new = simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
7689 SUBST (XEXP (x
, 0), r
);
7695 fmt
= GET_RTX_FORMAT (code
);
7696 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7699 SUBST (XEXP (x
, i
), known_cond (XEXP (x
, i
), cond
, reg
, val
));
7700 else if (fmt
[i
] == 'E')
7701 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7702 SUBST (XVECEXP (x
, i
, j
), known_cond (XVECEXP (x
, i
, j
),
7709 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7710 assignment as a field assignment. */
7713 rtx_equal_for_field_assignment_p (rtx x
, rtx y
)
7715 if (x
== y
|| rtx_equal_p (x
, y
))
7718 if (x
== 0 || y
== 0 || GET_MODE (x
) != GET_MODE (y
))
7721 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7722 Note that all SUBREGs of MEM are paradoxical; otherwise they
7723 would have been rewritten. */
7724 if (MEM_P (x
) && GET_CODE (y
) == SUBREG
7725 && MEM_P (SUBREG_REG (y
))
7726 && rtx_equal_p (SUBREG_REG (y
),
7727 gen_lowpart (GET_MODE (SUBREG_REG (y
)), x
)))
7730 if (MEM_P (y
) && GET_CODE (x
) == SUBREG
7731 && MEM_P (SUBREG_REG (x
))
7732 && rtx_equal_p (SUBREG_REG (x
),
7733 gen_lowpart (GET_MODE (SUBREG_REG (x
)), y
)))
7736 /* We used to see if get_last_value of X and Y were the same but that's
7737 not correct. In one direction, we'll cause the assignment to have
7738 the wrong destination and in the case, we'll import a register into this
7739 insn that might have already have been dead. So fail if none of the
7740 above cases are true. */
7744 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7745 Return that assignment if so.
7747 We only handle the most common cases. */
7750 make_field_assignment (rtx x
)
7752 rtx dest
= SET_DEST (x
);
7753 rtx src
= SET_SRC (x
);
7758 unsigned HOST_WIDE_INT len
;
7760 enum machine_mode mode
;
7762 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7763 a clear of a one-bit field. We will have changed it to
7764 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7767 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == ROTATE
7768 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == CONST_INT
7769 && INTVAL (XEXP (XEXP (src
, 0), 0)) == -2
7770 && rtx_equal_for_field_assignment_p (dest
, XEXP (src
, 1)))
7772 assign
= make_extraction (VOIDmode
, dest
, 0, XEXP (XEXP (src
, 0), 1),
7775 return gen_rtx_SET (VOIDmode
, assign
, const0_rtx
);
7779 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == SUBREG
7780 && subreg_lowpart_p (XEXP (src
, 0))
7781 && (GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
7782 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src
, 0)))))
7783 && GET_CODE (SUBREG_REG (XEXP (src
, 0))) == ROTATE
7784 && GET_CODE (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == CONST_INT
7785 && INTVAL (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == -2
7786 && rtx_equal_for_field_assignment_p (dest
, XEXP (src
, 1)))
7788 assign
= make_extraction (VOIDmode
, dest
, 0,
7789 XEXP (SUBREG_REG (XEXP (src
, 0)), 1),
7792 return gen_rtx_SET (VOIDmode
, assign
, const0_rtx
);
7796 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7798 if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == ASHIFT
7799 && XEXP (XEXP (src
, 0), 0) == const1_rtx
7800 && rtx_equal_for_field_assignment_p (dest
, XEXP (src
, 1)))
7802 assign
= make_extraction (VOIDmode
, dest
, 0, XEXP (XEXP (src
, 0), 1),
7805 return gen_rtx_SET (VOIDmode
, assign
, const1_rtx
);
7809 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
7810 SRC is an AND with all bits of that field set, then we can discard
7812 if (GET_CODE (dest
) == ZERO_EXTRACT
7813 && GET_CODE (XEXP (dest
, 1)) == CONST_INT
7814 && GET_CODE (src
) == AND
7815 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
7817 HOST_WIDE_INT width
= INTVAL (XEXP (dest
, 1));
7818 unsigned HOST_WIDE_INT and_mask
= INTVAL (XEXP (src
, 1));
7819 unsigned HOST_WIDE_INT ze_mask
;
7821 if (width
>= HOST_BITS_PER_WIDE_INT
)
7824 ze_mask
= ((unsigned HOST_WIDE_INT
)1 << width
) - 1;
7826 /* Complete overlap. We can remove the source AND. */
7827 if ((and_mask
& ze_mask
) == ze_mask
)
7828 return gen_rtx_SET (VOIDmode
, dest
, XEXP (src
, 0));
7830 /* Partial overlap. We can reduce the source AND. */
7831 if ((and_mask
& ze_mask
) != and_mask
)
7833 mode
= GET_MODE (src
);
7834 src
= gen_rtx_AND (mode
, XEXP (src
, 0),
7835 gen_int_mode (and_mask
& ze_mask
, mode
));
7836 return gen_rtx_SET (VOIDmode
, dest
, src
);
7840 /* The other case we handle is assignments into a constant-position
7841 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
7842 a mask that has all one bits except for a group of zero bits and
7843 OTHER is known to have zeros where C1 has ones, this is such an
7844 assignment. Compute the position and length from C1. Shift OTHER
7845 to the appropriate position, force it to the required mode, and
7846 make the extraction. Check for the AND in both operands. */
7848 if (GET_CODE (src
) != IOR
&& GET_CODE (src
) != XOR
)
7851 rhs
= expand_compound_operation (XEXP (src
, 0));
7852 lhs
= expand_compound_operation (XEXP (src
, 1));
7854 if (GET_CODE (rhs
) == AND
7855 && GET_CODE (XEXP (rhs
, 1)) == CONST_INT
7856 && rtx_equal_for_field_assignment_p (XEXP (rhs
, 0), dest
))
7857 c1
= INTVAL (XEXP (rhs
, 1)), other
= lhs
;
7858 else if (GET_CODE (lhs
) == AND
7859 && GET_CODE (XEXP (lhs
, 1)) == CONST_INT
7860 && rtx_equal_for_field_assignment_p (XEXP (lhs
, 0), dest
))
7861 c1
= INTVAL (XEXP (lhs
, 1)), other
= rhs
;
7865 pos
= get_pos_from_mask ((~c1
) & GET_MODE_MASK (GET_MODE (dest
)), &len
);
7866 if (pos
< 0 || pos
+ len
> GET_MODE_BITSIZE (GET_MODE (dest
))
7867 || GET_MODE_BITSIZE (GET_MODE (dest
)) > HOST_BITS_PER_WIDE_INT
7868 || (c1
& nonzero_bits (other
, GET_MODE (dest
))) != 0)
7871 assign
= make_extraction (VOIDmode
, dest
, pos
, NULL_RTX
, len
, 1, 1, 0);
7875 /* The mode to use for the source is the mode of the assignment, or of
7876 what is inside a possible STRICT_LOW_PART. */
7877 mode
= (GET_CODE (assign
) == STRICT_LOW_PART
7878 ? GET_MODE (XEXP (assign
, 0)) : GET_MODE (assign
));
7880 /* Shift OTHER right POS places and make it the source, restricting it
7881 to the proper length and mode. */
7883 src
= canon_reg_for_combine (simplify_shift_const (NULL_RTX
, LSHIFTRT
,
7887 src
= force_to_mode (src
, mode
,
7888 GET_MODE_BITSIZE (mode
) >= HOST_BITS_PER_WIDE_INT
7889 ? ~(unsigned HOST_WIDE_INT
) 0
7890 : ((unsigned HOST_WIDE_INT
) 1 << len
) - 1,
7893 /* If SRC is masked by an AND that does not make a difference in
7894 the value being stored, strip it. */
7895 if (GET_CODE (assign
) == ZERO_EXTRACT
7896 && GET_CODE (XEXP (assign
, 1)) == CONST_INT
7897 && INTVAL (XEXP (assign
, 1)) < HOST_BITS_PER_WIDE_INT
7898 && GET_CODE (src
) == AND
7899 && GET_CODE (XEXP (src
, 1)) == CONST_INT
7900 && ((unsigned HOST_WIDE_INT
) INTVAL (XEXP (src
, 1))
7901 == ((unsigned HOST_WIDE_INT
) 1 << INTVAL (XEXP (assign
, 1))) - 1))
7902 src
= XEXP (src
, 0);
7904 return gen_rtx_SET (VOIDmode
, assign
, src
);
7907 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7911 apply_distributive_law (rtx x
)
7913 enum rtx_code code
= GET_CODE (x
);
7914 enum rtx_code inner_code
;
7915 rtx lhs
, rhs
, other
;
7918 /* Distributivity is not true for floating point as it can change the
7919 value. So we don't do it unless -funsafe-math-optimizations. */
7920 if (FLOAT_MODE_P (GET_MODE (x
))
7921 && ! flag_unsafe_math_optimizations
)
7924 /* The outer operation can only be one of the following: */
7925 if (code
!= IOR
&& code
!= AND
&& code
!= XOR
7926 && code
!= PLUS
&& code
!= MINUS
)
7932 /* If either operand is a primitive we can't do anything, so get out
7934 if (OBJECT_P (lhs
) || OBJECT_P (rhs
))
7937 lhs
= expand_compound_operation (lhs
);
7938 rhs
= expand_compound_operation (rhs
);
7939 inner_code
= GET_CODE (lhs
);
7940 if (inner_code
!= GET_CODE (rhs
))
7943 /* See if the inner and outer operations distribute. */
7950 /* These all distribute except over PLUS. */
7951 if (code
== PLUS
|| code
== MINUS
)
7956 if (code
!= PLUS
&& code
!= MINUS
)
7961 /* This is also a multiply, so it distributes over everything. */
7965 /* Non-paradoxical SUBREGs distributes over all operations,
7966 provided the inner modes and byte offsets are the same, this
7967 is an extraction of a low-order part, we don't convert an fp
7968 operation to int or vice versa, this is not a vector mode,
7969 and we would not be converting a single-word operation into a
7970 multi-word operation. The latter test is not required, but
7971 it prevents generating unneeded multi-word operations. Some
7972 of the previous tests are redundant given the latter test,
7973 but are retained because they are required for correctness.
7975 We produce the result slightly differently in this case. */
7977 if (GET_MODE (SUBREG_REG (lhs
)) != GET_MODE (SUBREG_REG (rhs
))
7978 || SUBREG_BYTE (lhs
) != SUBREG_BYTE (rhs
)
7979 || ! subreg_lowpart_p (lhs
)
7980 || (GET_MODE_CLASS (GET_MODE (lhs
))
7981 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs
))))
7982 || (GET_MODE_SIZE (GET_MODE (lhs
))
7983 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))))
7984 || VECTOR_MODE_P (GET_MODE (lhs
))
7985 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))) > UNITS_PER_WORD
7986 /* Result might need to be truncated. Don't change mode if
7987 explicit truncation is needed. */
7988 || !TRULY_NOOP_TRUNCATION
7989 (GET_MODE_BITSIZE (GET_MODE (x
)),
7990 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs
)))))
7993 tem
= simplify_gen_binary (code
, GET_MODE (SUBREG_REG (lhs
)),
7994 SUBREG_REG (lhs
), SUBREG_REG (rhs
));
7995 return gen_lowpart (GET_MODE (x
), tem
);
8001 /* Set LHS and RHS to the inner operands (A and B in the example
8002 above) and set OTHER to the common operand (C in the example).
8003 There is only one way to do this unless the inner operation is
8005 if (COMMUTATIVE_ARITH_P (lhs
)
8006 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 0)))
8007 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 1);
8008 else if (COMMUTATIVE_ARITH_P (lhs
)
8009 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 1)))
8010 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 0);
8011 else if (COMMUTATIVE_ARITH_P (lhs
)
8012 && rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 0)))
8013 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 1);
8014 else if (rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 1)))
8015 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 0);
8019 /* Form the new inner operation, seeing if it simplifies first. */
8020 tem
= simplify_gen_binary (code
, GET_MODE (x
), lhs
, rhs
);
8022 /* There is one exception to the general way of distributing:
8023 (a | c) ^ (b | c) -> (a ^ b) & ~c */
8024 if (code
== XOR
&& inner_code
== IOR
)
8027 other
= simplify_gen_unary (NOT
, GET_MODE (x
), other
, GET_MODE (x
));
8030 /* We may be able to continuing distributing the result, so call
8031 ourselves recursively on the inner operation before forming the
8032 outer operation, which we return. */
8033 return simplify_gen_binary (inner_code
, GET_MODE (x
),
8034 apply_distributive_law (tem
), other
);
8037 /* See if X is of the form (* (+ A B) C), and if so convert to
8038 (+ (* A C) (* B C)) and try to simplify.
8040 Most of the time, this results in no change. However, if some of
8041 the operands are the same or inverses of each other, simplifications
8044 For example, (and (ior A B) (not B)) can occur as the result of
8045 expanding a bit field assignment. When we apply the distributive
8046 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8047 which then simplifies to (and (A (not B))).
8049 Note that no checks happen on the validity of applying the inverse
8050 distributive law. This is pointless since we can do it in the
8051 few places where this routine is called.
8053 N is the index of the term that is decomposed (the arithmetic operation,
8054 i.e. (+ A B) in the first example above). !N is the index of the term that
8055 is distributed, i.e. of C in the first example above. */
8057 distribute_and_simplify_rtx (rtx x
, int n
)
8059 enum machine_mode mode
;
8060 enum rtx_code outer_code
, inner_code
;
8061 rtx decomposed
, distributed
, inner_op0
, inner_op1
, new_op0
, new_op1
, tmp
;
8063 decomposed
= XEXP (x
, n
);
8064 if (!ARITHMETIC_P (decomposed
))
8067 mode
= GET_MODE (x
);
8068 outer_code
= GET_CODE (x
);
8069 distributed
= XEXP (x
, !n
);
8071 inner_code
= GET_CODE (decomposed
);
8072 inner_op0
= XEXP (decomposed
, 0);
8073 inner_op1
= XEXP (decomposed
, 1);
8075 /* Special case (and (xor B C) (not A)), which is equivalent to
8076 (xor (ior A B) (ior A C)) */
8077 if (outer_code
== AND
&& inner_code
== XOR
&& GET_CODE (distributed
) == NOT
)
8079 distributed
= XEXP (distributed
, 0);
8085 /* Distribute the second term. */
8086 new_op0
= simplify_gen_binary (outer_code
, mode
, inner_op0
, distributed
);
8087 new_op1
= simplify_gen_binary (outer_code
, mode
, inner_op1
, distributed
);
8091 /* Distribute the first term. */
8092 new_op0
= simplify_gen_binary (outer_code
, mode
, distributed
, inner_op0
);
8093 new_op1
= simplify_gen_binary (outer_code
, mode
, distributed
, inner_op1
);
8096 tmp
= apply_distributive_law (simplify_gen_binary (inner_code
, mode
,
8098 if (GET_CODE (tmp
) != outer_code
8099 && rtx_cost (tmp
, SET
) < rtx_cost (x
, SET
))
8105 /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8106 in MODE. Return an equivalent form, if different from (and VAROP
8107 (const_int CONSTOP)). Otherwise, return NULL_RTX. */
8110 simplify_and_const_int_1 (enum machine_mode mode
, rtx varop
,
8111 unsigned HOST_WIDE_INT constop
)
8113 unsigned HOST_WIDE_INT nonzero
;
8114 unsigned HOST_WIDE_INT orig_constop
;
8119 orig_constop
= constop
;
8120 if (GET_CODE (varop
) == CLOBBER
)
8123 /* Simplify VAROP knowing that we will be only looking at some of the
8126 Note by passing in CONSTOP, we guarantee that the bits not set in
8127 CONSTOP are not significant and will never be examined. We must
8128 ensure that is the case by explicitly masking out those bits
8129 before returning. */
8130 varop
= force_to_mode (varop
, mode
, constop
, 0);
8132 /* If VAROP is a CLOBBER, we will fail so return it. */
8133 if (GET_CODE (varop
) == CLOBBER
)
8136 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8137 to VAROP and return the new constant. */
8138 if (GET_CODE (varop
) == CONST_INT
)
8139 return gen_int_mode (INTVAL (varop
) & constop
, mode
);
8141 /* See what bits may be nonzero in VAROP. Unlike the general case of
8142 a call to nonzero_bits, here we don't care about bits outside
8145 nonzero
= nonzero_bits (varop
, mode
) & GET_MODE_MASK (mode
);
8147 /* Turn off all bits in the constant that are known to already be zero.
8148 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8149 which is tested below. */
8153 /* If we don't have any bits left, return zero. */
8157 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8158 a power of two, we can replace this with an ASHIFT. */
8159 if (GET_CODE (varop
) == NEG
&& nonzero_bits (XEXP (varop
, 0), mode
) == 1
8160 && (i
= exact_log2 (constop
)) >= 0)
8161 return simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, XEXP (varop
, 0), i
);
8163 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8164 or XOR, then try to apply the distributive law. This may eliminate
8165 operations if either branch can be simplified because of the AND.
8166 It may also make some cases more complex, but those cases probably
8167 won't match a pattern either with or without this. */
8169 if (GET_CODE (varop
) == IOR
|| GET_CODE (varop
) == XOR
)
8173 apply_distributive_law
8174 (simplify_gen_binary (GET_CODE (varop
), GET_MODE (varop
),
8175 simplify_and_const_int (NULL_RTX
,
8179 simplify_and_const_int (NULL_RTX
,
8184 /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8185 the AND and see if one of the operands simplifies to zero. If so, we
8186 may eliminate it. */
8188 if (GET_CODE (varop
) == PLUS
8189 && exact_log2 (constop
+ 1) >= 0)
8193 o0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (varop
, 0), constop
);
8194 o1
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (varop
, 1), constop
);
8195 if (o0
== const0_rtx
)
8197 if (o1
== const0_rtx
)
8201 /* Make a SUBREG if necessary. If we can't make it, fail. */
8202 varop
= gen_lowpart (mode
, varop
);
8203 if (varop
== NULL_RTX
|| GET_CODE (varop
) == CLOBBER
)
8206 /* If we are only masking insignificant bits, return VAROP. */
8207 if (constop
== nonzero
)
8210 if (varop
== orig_varop
&& constop
== orig_constop
)
8213 /* Otherwise, return an AND. */
8214 return simplify_gen_binary (AND
, mode
, varop
, gen_int_mode (constop
, mode
));
8218 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8221 Return an equivalent form, if different from X. Otherwise, return X. If
8222 X is zero, we are to always construct the equivalent form. */
8225 simplify_and_const_int (rtx x
, enum machine_mode mode
, rtx varop
,
8226 unsigned HOST_WIDE_INT constop
)
8228 rtx tem
= simplify_and_const_int_1 (mode
, varop
, constop
);
8233 x
= simplify_gen_binary (AND
, GET_MODE (varop
), varop
,
8234 gen_int_mode (constop
, mode
));
8235 if (GET_MODE (x
) != mode
)
8236 x
= gen_lowpart (mode
, x
);
8240 /* Given a REG, X, compute which bits in X can be nonzero.
8241 We don't care about bits outside of those defined in MODE.
8243 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8244 a shift, AND, or zero_extract, we can do better. */
8247 reg_nonzero_bits_for_combine (rtx x
, enum machine_mode mode
,
8248 rtx known_x ATTRIBUTE_UNUSED
,
8249 enum machine_mode known_mode ATTRIBUTE_UNUSED
,
8250 unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED
,
8251 unsigned HOST_WIDE_INT
*nonzero
)
8255 /* If X is a register whose nonzero bits value is current, use it.
8256 Otherwise, if X is a register whose value we can find, use that
8257 value. Otherwise, use the previously-computed global nonzero bits
8258 for this register. */
8260 if (reg_stat
[REGNO (x
)].last_set_value
!= 0
8261 && (reg_stat
[REGNO (x
)].last_set_mode
== mode
8262 || (GET_MODE_CLASS (reg_stat
[REGNO (x
)].last_set_mode
) == MODE_INT
8263 && GET_MODE_CLASS (mode
) == MODE_INT
))
8264 && (reg_stat
[REGNO (x
)].last_set_label
== label_tick
8265 || (REGNO (x
) >= FIRST_PSEUDO_REGISTER
8266 && REG_N_SETS (REGNO (x
)) == 1
8267 && ! REGNO_REG_SET_P
8268 (ENTRY_BLOCK_PTR
->next_bb
->il
.rtl
->global_live_at_start
,
8270 && INSN_CUID (reg_stat
[REGNO (x
)].last_set
) < subst_low_cuid
)
8272 *nonzero
&= reg_stat
[REGNO (x
)].last_set_nonzero_bits
;
8276 tem
= get_last_value (x
);
8280 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8281 /* If X is narrower than MODE and TEM is a non-negative
8282 constant that would appear negative in the mode of X,
8283 sign-extend it for use in reg_nonzero_bits because some
8284 machines (maybe most) will actually do the sign-extension
8285 and this is the conservative approach.
8287 ??? For 2.5, try to tighten up the MD files in this regard
8288 instead of this kludge. */
8290 if (GET_MODE_BITSIZE (GET_MODE (x
)) < GET_MODE_BITSIZE (mode
)
8291 && GET_CODE (tem
) == CONST_INT
8293 && 0 != (INTVAL (tem
)
8294 & ((HOST_WIDE_INT
) 1
8295 << (GET_MODE_BITSIZE (GET_MODE (x
)) - 1))))
8296 tem
= GEN_INT (INTVAL (tem
)
8297 | ((HOST_WIDE_INT
) (-1)
8298 << GET_MODE_BITSIZE (GET_MODE (x
))));
8302 else if (nonzero_sign_valid
&& reg_stat
[REGNO (x
)].nonzero_bits
)
8304 unsigned HOST_WIDE_INT mask
= reg_stat
[REGNO (x
)].nonzero_bits
;
8306 if (GET_MODE_BITSIZE (GET_MODE (x
)) < GET_MODE_BITSIZE (mode
))
8307 /* We don't know anything about the upper bits. */
8308 mask
|= GET_MODE_MASK (mode
) ^ GET_MODE_MASK (GET_MODE (x
));
8315 /* Return the number of bits at the high-order end of X that are known to
8316 be equal to the sign bit. X will be used in mode MODE; if MODE is
8317 VOIDmode, X will be used in its own mode. The returned value will always
8318 be between 1 and the number of bits in MODE. */
8321 reg_num_sign_bit_copies_for_combine (rtx x
, enum machine_mode mode
,
8322 rtx known_x ATTRIBUTE_UNUSED
,
8323 enum machine_mode known_mode
8325 unsigned int known_ret ATTRIBUTE_UNUSED
,
8326 unsigned int *result
)
8330 if (reg_stat
[REGNO (x
)].last_set_value
!= 0
8331 && reg_stat
[REGNO (x
)].last_set_mode
== mode
8332 && (reg_stat
[REGNO (x
)].last_set_label
== label_tick
8333 || (REGNO (x
) >= FIRST_PSEUDO_REGISTER
8334 && REG_N_SETS (REGNO (x
)) == 1
8335 && ! REGNO_REG_SET_P
8336 (ENTRY_BLOCK_PTR
->next_bb
->il
.rtl
->global_live_at_start
,
8338 && INSN_CUID (reg_stat
[REGNO (x
)].last_set
) < subst_low_cuid
)
8340 *result
= reg_stat
[REGNO (x
)].last_set_sign_bit_copies
;
8344 tem
= get_last_value (x
);
8348 if (nonzero_sign_valid
&& reg_stat
[REGNO (x
)].sign_bit_copies
!= 0
8349 && GET_MODE_BITSIZE (GET_MODE (x
)) == GET_MODE_BITSIZE (mode
))
8350 *result
= reg_stat
[REGNO (x
)].sign_bit_copies
;
8355 /* Return the number of "extended" bits there are in X, when interpreted
8356 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8357 unsigned quantities, this is the number of high-order zero bits.
8358 For signed quantities, this is the number of copies of the sign bit
8359 minus 1. In both case, this function returns the number of "spare"
8360 bits. For example, if two quantities for which this function returns
8361 at least 1 are added, the addition is known not to overflow.
8363 This function will always return 0 unless called during combine, which
8364 implies that it must be called from a define_split. */
8367 extended_count (rtx x
, enum machine_mode mode
, int unsignedp
)
8369 if (nonzero_sign_valid
== 0)
8373 ? (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8374 ? (unsigned int) (GET_MODE_BITSIZE (mode
) - 1
8375 - floor_log2 (nonzero_bits (x
, mode
)))
8377 : num_sign_bit_copies (x
, mode
) - 1);
8380 /* This function is called from `simplify_shift_const' to merge two
8381 outer operations. Specifically, we have already found that we need
8382 to perform operation *POP0 with constant *PCONST0 at the outermost
8383 position. We would now like to also perform OP1 with constant CONST1
8384 (with *POP0 being done last).
8386 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8387 the resulting operation. *PCOMP_P is set to 1 if we would need to
8388 complement the innermost operand, otherwise it is unchanged.
8390 MODE is the mode in which the operation will be done. No bits outside
8391 the width of this mode matter. It is assumed that the width of this mode
8392 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8394 If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
8395 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8396 result is simply *PCONST0.
8398 If the resulting operation cannot be expressed as one operation, we
8399 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8402 merge_outer_ops (enum rtx_code
*pop0
, HOST_WIDE_INT
*pconst0
, enum rtx_code op1
, HOST_WIDE_INT const1
, enum machine_mode mode
, int *pcomp_p
)
8404 enum rtx_code op0
= *pop0
;
8405 HOST_WIDE_INT const0
= *pconst0
;
8407 const0
&= GET_MODE_MASK (mode
);
8408 const1
&= GET_MODE_MASK (mode
);
8410 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8414 /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
8417 if (op1
== UNKNOWN
|| op0
== SET
)
8420 else if (op0
== UNKNOWN
)
8421 op0
= op1
, const0
= const1
;
8423 else if (op0
== op1
)
8447 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8448 else if (op0
== PLUS
|| op1
== PLUS
|| op0
== NEG
|| op1
== NEG
)
8451 /* If the two constants aren't the same, we can't do anything. The
8452 remaining six cases can all be done. */
8453 else if (const0
!= const1
)
8461 /* (a & b) | b == b */
8463 else /* op1 == XOR */
8464 /* (a ^ b) | b == a | b */
8470 /* (a & b) ^ b == (~a) & b */
8471 op0
= AND
, *pcomp_p
= 1;
8472 else /* op1 == IOR */
8473 /* (a | b) ^ b == a & ~b */
8474 op0
= AND
, const0
= ~const0
;
8479 /* (a | b) & b == b */
8481 else /* op1 == XOR */
8482 /* (a ^ b) & b) == (~a) & b */
8489 /* Check for NO-OP cases. */
8490 const0
&= GET_MODE_MASK (mode
);
8492 && (op0
== IOR
|| op0
== XOR
|| op0
== PLUS
))
8494 else if (const0
== 0 && op0
== AND
)
8496 else if ((unsigned HOST_WIDE_INT
) const0
== GET_MODE_MASK (mode
)
8500 /* ??? Slightly redundant with the above mask, but not entirely.
8501 Moving this above means we'd have to sign-extend the mode mask
8502 for the final test. */
8503 const0
= trunc_int_for_mode (const0
, mode
);
8511 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8512 The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot
8513 simplify it. Otherwise, return a simplified value.
8515 The shift is normally computed in the widest mode we find in VAROP, as
8516 long as it isn't a different number of words than RESULT_MODE. Exceptions
8517 are ASHIFTRT and ROTATE, which are always done in their original mode. */
8520 simplify_shift_const_1 (enum rtx_code code
, enum machine_mode result_mode
,
8521 rtx varop
, int orig_count
)
8523 enum rtx_code orig_code
= code
;
8524 rtx orig_varop
= varop
;
8526 enum machine_mode mode
= result_mode
;
8527 enum machine_mode shift_mode
, tmode
;
8528 unsigned int mode_words
8529 = (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
8530 /* We form (outer_op (code varop count) (outer_const)). */
8531 enum rtx_code outer_op
= UNKNOWN
;
8532 HOST_WIDE_INT outer_const
= 0;
8533 int complement_p
= 0;
8536 /* Make sure and truncate the "natural" shift on the way in. We don't
8537 want to do this inside the loop as it makes it more difficult to
8539 if (SHIFT_COUNT_TRUNCATED
)
8540 orig_count
&= GET_MODE_BITSIZE (mode
) - 1;
8542 /* If we were given an invalid count, don't do anything except exactly
8543 what was requested. */
8545 if (orig_count
< 0 || orig_count
>= (int) GET_MODE_BITSIZE (mode
))
8550 /* Unless one of the branches of the `if' in this loop does a `continue',
8551 we will `break' the loop after the `if'. */
8555 /* If we have an operand of (clobber (const_int 0)), fail. */
8556 if (GET_CODE (varop
) == CLOBBER
)
8559 /* If we discovered we had to complement VAROP, leave. Making a NOT
8560 here would cause an infinite loop. */
8564 /* Convert ROTATERT to ROTATE. */
8565 if (code
== ROTATERT
)
8567 unsigned int bitsize
= GET_MODE_BITSIZE (result_mode
);;
8569 if (VECTOR_MODE_P (result_mode
))
8570 count
= bitsize
/ GET_MODE_NUNITS (result_mode
) - count
;
8572 count
= bitsize
- count
;
8575 /* We need to determine what mode we will do the shift in. If the
8576 shift is a right shift or a ROTATE, we must always do it in the mode
8577 it was originally done in. Otherwise, we can do it in MODE, the
8578 widest mode encountered. */
8580 = (code
== ASHIFTRT
|| code
== LSHIFTRT
|| code
== ROTATE
8581 ? result_mode
: mode
);
8583 /* Handle cases where the count is greater than the size of the mode
8584 minus 1. For ASHIFT, use the size minus one as the count (this can
8585 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8586 take the count modulo the size. For other shifts, the result is
8589 Since these shifts are being produced by the compiler by combining
8590 multiple operations, each of which are defined, we know what the
8591 result is supposed to be. */
8593 if (count
> (GET_MODE_BITSIZE (shift_mode
) - 1))
8595 if (code
== ASHIFTRT
)
8596 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
8597 else if (code
== ROTATE
|| code
== ROTATERT
)
8598 count
%= GET_MODE_BITSIZE (shift_mode
);
8601 /* We can't simply return zero because there may be an
8609 /* An arithmetic right shift of a quantity known to be -1 or 0
8611 if (code
== ASHIFTRT
8612 && (num_sign_bit_copies (varop
, shift_mode
)
8613 == GET_MODE_BITSIZE (shift_mode
)))
8619 /* If we are doing an arithmetic right shift and discarding all but
8620 the sign bit copies, this is equivalent to doing a shift by the
8621 bitsize minus one. Convert it into that shift because it will often
8622 allow other simplifications. */
8624 if (code
== ASHIFTRT
8625 && (count
+ num_sign_bit_copies (varop
, shift_mode
)
8626 >= GET_MODE_BITSIZE (shift_mode
)))
8627 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
8629 /* We simplify the tests below and elsewhere by converting
8630 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8631 `make_compound_operation' will convert it to an ASHIFTRT for
8632 those machines (such as VAX) that don't have an LSHIFTRT. */
8633 if (GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
8635 && ((nonzero_bits (varop
, shift_mode
)
8636 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (shift_mode
) - 1)))
8640 if (code
== LSHIFTRT
8641 && GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
8642 && !(nonzero_bits (varop
, shift_mode
) >> count
))
8645 && GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
8646 && !((nonzero_bits (varop
, shift_mode
) << count
)
8647 & GET_MODE_MASK (shift_mode
)))
8650 switch (GET_CODE (varop
))
8656 new = expand_compound_operation (varop
);
8665 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8666 minus the width of a smaller mode, we can do this with a
8667 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8668 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
8669 && ! mode_dependent_address_p (XEXP (varop
, 0))
8670 && ! MEM_VOLATILE_P (varop
)
8671 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
8672 MODE_INT
, 1)) != BLKmode
)
8674 new = adjust_address_nv (varop
, tmode
,
8675 BYTES_BIG_ENDIAN
? 0
8676 : count
/ BITS_PER_UNIT
);
8678 varop
= gen_rtx_fmt_e (code
== ASHIFTRT
? SIGN_EXTEND
8679 : ZERO_EXTEND
, mode
, new);
8686 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8687 the same number of words as what we've seen so far. Then store
8688 the widest mode in MODE. */
8689 if (subreg_lowpart_p (varop
)
8690 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
8691 > GET_MODE_SIZE (GET_MODE (varop
)))
8692 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
8693 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
8696 varop
= SUBREG_REG (varop
);
8697 if (GET_MODE_SIZE (GET_MODE (varop
)) > GET_MODE_SIZE (mode
))
8698 mode
= GET_MODE (varop
);
8704 /* Some machines use MULT instead of ASHIFT because MULT
8705 is cheaper. But it is still better on those machines to
8706 merge two shifts into one. */
8707 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
8708 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
8711 = simplify_gen_binary (ASHIFT
, GET_MODE (varop
),
8713 GEN_INT (exact_log2 (
8714 INTVAL (XEXP (varop
, 1)))));
8720 /* Similar, for when divides are cheaper. */
8721 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
8722 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
8725 = simplify_gen_binary (LSHIFTRT
, GET_MODE (varop
),
8727 GEN_INT (exact_log2 (
8728 INTVAL (XEXP (varop
, 1)))));
8734 /* If we are extracting just the sign bit of an arithmetic
8735 right shift, that shift is not needed. However, the sign
8736 bit of a wider mode may be different from what would be
8737 interpreted as the sign bit in a narrower mode, so, if
8738 the result is narrower, don't discard the shift. */
8739 if (code
== LSHIFTRT
8740 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
8741 && (GET_MODE_BITSIZE (result_mode
)
8742 >= GET_MODE_BITSIZE (GET_MODE (varop
))))
8744 varop
= XEXP (varop
, 0);
8748 /* ... fall through ... */
8753 /* Here we have two nested shifts. The result is usually the
8754 AND of a new shift with a mask. We compute the result below. */
8755 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
8756 && INTVAL (XEXP (varop
, 1)) >= 0
8757 && INTVAL (XEXP (varop
, 1)) < GET_MODE_BITSIZE (GET_MODE (varop
))
8758 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
8759 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8760 && !VECTOR_MODE_P (result_mode
))
8762 enum rtx_code first_code
= GET_CODE (varop
);
8763 unsigned int first_count
= INTVAL (XEXP (varop
, 1));
8764 unsigned HOST_WIDE_INT mask
;
8767 /* We have one common special case. We can't do any merging if
8768 the inner code is an ASHIFTRT of a smaller mode. However, if
8769 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8770 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8771 we can convert it to
8772 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8773 This simplifies certain SIGN_EXTEND operations. */
8774 if (code
== ASHIFT
&& first_code
== ASHIFTRT
8775 && count
== (GET_MODE_BITSIZE (result_mode
)
8776 - GET_MODE_BITSIZE (GET_MODE (varop
))))
8778 /* C3 has the low-order C1 bits zero. */
8780 mask
= (GET_MODE_MASK (mode
)
8781 & ~(((HOST_WIDE_INT
) 1 << first_count
) - 1));
8783 varop
= simplify_and_const_int (NULL_RTX
, result_mode
,
8784 XEXP (varop
, 0), mask
);
8785 varop
= simplify_shift_const (NULL_RTX
, ASHIFT
, result_mode
,
8787 count
= first_count
;
8792 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8793 than C1 high-order bits equal to the sign bit, we can convert
8794 this to either an ASHIFT or an ASHIFTRT depending on the
8797 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8799 if (code
== ASHIFTRT
&& first_code
== ASHIFT
8800 && GET_MODE (varop
) == shift_mode
8801 && (num_sign_bit_copies (XEXP (varop
, 0), shift_mode
)
8804 varop
= XEXP (varop
, 0);
8805 count
-= first_count
;
8815 /* There are some cases we can't do. If CODE is ASHIFTRT,
8816 we can only do this if FIRST_CODE is also ASHIFTRT.
8818 We can't do the case when CODE is ROTATE and FIRST_CODE is
8821 If the mode of this shift is not the mode of the outer shift,
8822 we can't do this if either shift is a right shift or ROTATE.
8824 Finally, we can't do any of these if the mode is too wide
8825 unless the codes are the same.
8827 Handle the case where the shift codes are the same
8830 if (code
== first_code
)
8832 if (GET_MODE (varop
) != result_mode
8833 && (code
== ASHIFTRT
|| code
== LSHIFTRT
8837 count
+= first_count
;
8838 varop
= XEXP (varop
, 0);
8842 if (code
== ASHIFTRT
8843 || (code
== ROTATE
&& first_code
== ASHIFTRT
)
8844 || GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
8845 || (GET_MODE (varop
) != result_mode
8846 && (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
8847 || first_code
== ROTATE
8848 || code
== ROTATE
)))
8851 /* To compute the mask to apply after the shift, shift the
8852 nonzero bits of the inner shift the same way the
8853 outer shift will. */
8855 mask_rtx
= GEN_INT (nonzero_bits (varop
, GET_MODE (varop
)));
8858 = simplify_const_binary_operation (code
, result_mode
, mask_rtx
,
8861 /* Give up if we can't compute an outer operation to use. */
8863 || GET_CODE (mask_rtx
) != CONST_INT
8864 || ! merge_outer_ops (&outer_op
, &outer_const
, AND
,
8866 result_mode
, &complement_p
))
8869 /* If the shifts are in the same direction, we add the
8870 counts. Otherwise, we subtract them. */
8871 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
8872 == (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
))
8873 count
+= first_count
;
8875 count
-= first_count
;
8877 /* If COUNT is positive, the new shift is usually CODE,
8878 except for the two exceptions below, in which case it is
8879 FIRST_CODE. If the count is negative, FIRST_CODE should
8882 && ((first_code
== ROTATE
&& code
== ASHIFT
)
8883 || (first_code
== ASHIFTRT
&& code
== LSHIFTRT
)))
8886 code
= first_code
, count
= -count
;
8888 varop
= XEXP (varop
, 0);
8892 /* If we have (A << B << C) for any shift, we can convert this to
8893 (A << C << B). This wins if A is a constant. Only try this if
8894 B is not a constant. */
8896 else if (GET_CODE (varop
) == code
8897 && GET_CODE (XEXP (varop
, 0)) == CONST_INT
8898 && GET_CODE (XEXP (varop
, 1)) != CONST_INT
)
8900 rtx
new = simplify_const_binary_operation (code
, mode
,
8903 varop
= gen_rtx_fmt_ee (code
, mode
, new, XEXP (varop
, 1));
8910 /* Make this fit the case below. */
8911 varop
= gen_rtx_XOR (mode
, XEXP (varop
, 0),
8912 GEN_INT (GET_MODE_MASK (mode
)));
8918 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8919 with C the size of VAROP - 1 and the shift is logical if
8920 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8921 we have an (le X 0) operation. If we have an arithmetic shift
8922 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8923 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8925 if (GET_CODE (varop
) == IOR
&& GET_CODE (XEXP (varop
, 0)) == PLUS
8926 && XEXP (XEXP (varop
, 0), 1) == constm1_rtx
8927 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
8928 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
8929 && count
== (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1)
8930 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
8933 varop
= gen_rtx_LE (GET_MODE (varop
), XEXP (varop
, 1),
8936 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
8937 varop
= gen_rtx_NEG (GET_MODE (varop
), varop
);
8942 /* If we have (shift (logical)), move the logical to the outside
8943 to allow it to possibly combine with another logical and the
8944 shift to combine with another shift. This also canonicalizes to
8945 what a ZERO_EXTRACT looks like. Also, some machines have
8946 (and (shift)) insns. */
8948 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
8949 /* We can't do this if we have (ashiftrt (xor)) and the
8950 constant has its sign bit set in shift_mode. */
8951 && !(code
== ASHIFTRT
&& GET_CODE (varop
) == XOR
8952 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop
, 1)),
8954 && (new = simplify_const_binary_operation (code
, result_mode
,
8956 GEN_INT (count
))) != 0
8957 && GET_CODE (new) == CONST_INT
8958 && merge_outer_ops (&outer_op
, &outer_const
, GET_CODE (varop
),
8959 INTVAL (new), result_mode
, &complement_p
))
8961 varop
= XEXP (varop
, 0);
8965 /* If we can't do that, try to simplify the shift in each arm of the
8966 logical expression, make a new logical expression, and apply
8967 the inverse distributive law. This also can't be done
8968 for some (ashiftrt (xor)). */
8969 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
8970 && !(code
== ASHIFTRT
&& GET_CODE (varop
) == XOR
8971 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop
, 1)),
8974 rtx lhs
= simplify_shift_const (NULL_RTX
, code
, shift_mode
,
8975 XEXP (varop
, 0), count
);
8976 rtx rhs
= simplify_shift_const (NULL_RTX
, code
, shift_mode
,
8977 XEXP (varop
, 1), count
);
8979 varop
= simplify_gen_binary (GET_CODE (varop
), shift_mode
,
8981 varop
= apply_distributive_law (varop
);
8989 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8990 says that the sign bit can be tested, FOO has mode MODE, C is
8991 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8992 that may be nonzero. */
8993 if (code
== LSHIFTRT
8994 && XEXP (varop
, 1) == const0_rtx
8995 && GET_MODE (XEXP (varop
, 0)) == result_mode
8996 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
8997 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
8998 && STORE_FLAG_VALUE
== -1
8999 && nonzero_bits (XEXP (varop
, 0), result_mode
) == 1
9000 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
9001 (HOST_WIDE_INT
) 1, result_mode
,
9004 varop
= XEXP (varop
, 0);
9011 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9012 than the number of bits in the mode is equivalent to A. */
9013 if (code
== LSHIFTRT
9014 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
9015 && nonzero_bits (XEXP (varop
, 0), result_mode
) == 1)
9017 varop
= XEXP (varop
, 0);
9022 /* NEG commutes with ASHIFT since it is multiplication. Move the
9023 NEG outside to allow shifts to combine. */
9025 && merge_outer_ops (&outer_op
, &outer_const
, NEG
,
9026 (HOST_WIDE_INT
) 0, result_mode
,
9029 varop
= XEXP (varop
, 0);
9035 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9036 is one less than the number of bits in the mode is
9037 equivalent to (xor A 1). */
9038 if (code
== LSHIFTRT
9039 && count
== (GET_MODE_BITSIZE (result_mode
) - 1)
9040 && XEXP (varop
, 1) == constm1_rtx
9041 && nonzero_bits (XEXP (varop
, 0), result_mode
) == 1
9042 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
9043 (HOST_WIDE_INT
) 1, result_mode
,
9047 varop
= XEXP (varop
, 0);
9051 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9052 that might be nonzero in BAR are those being shifted out and those
9053 bits are known zero in FOO, we can replace the PLUS with FOO.
9054 Similarly in the other operand order. This code occurs when
9055 we are computing the size of a variable-size array. */
9057 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
9058 && count
< HOST_BITS_PER_WIDE_INT
9059 && nonzero_bits (XEXP (varop
, 1), result_mode
) >> count
== 0
9060 && (nonzero_bits (XEXP (varop
, 1), result_mode
)
9061 & nonzero_bits (XEXP (varop
, 0), result_mode
)) == 0)
9063 varop
= XEXP (varop
, 0);
9066 else if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
9067 && count
< HOST_BITS_PER_WIDE_INT
9068 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
9069 && 0 == (nonzero_bits (XEXP (varop
, 0), result_mode
)
9071 && 0 == (nonzero_bits (XEXP (varop
, 0), result_mode
)
9072 & nonzero_bits (XEXP (varop
, 1),
9075 varop
= XEXP (varop
, 1);
9079 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9081 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
9082 && (new = simplify_const_binary_operation (ASHIFT
, result_mode
,
9084 GEN_INT (count
))) != 0
9085 && GET_CODE (new) == CONST_INT
9086 && merge_outer_ops (&outer_op
, &outer_const
, PLUS
,
9087 INTVAL (new), result_mode
, &complement_p
))
9089 varop
= XEXP (varop
, 0);
9093 /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9094 signbit', and attempt to change the PLUS to an XOR and move it to
9095 the outer operation as is done above in the AND/IOR/XOR case
9096 leg for shift(logical). See details in logical handling above
9097 for reasoning in doing so. */
9098 if (code
== LSHIFTRT
9099 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
9100 && mode_signbit_p (result_mode
, XEXP (varop
, 1))
9101 && (new = simplify_const_binary_operation (code
, result_mode
,
9103 GEN_INT (count
))) != 0
9104 && GET_CODE (new) == CONST_INT
9105 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
9106 INTVAL (new), result_mode
, &complement_p
))
9108 varop
= XEXP (varop
, 0);
9115 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9116 with C the size of VAROP - 1 and the shift is logical if
9117 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9118 we have a (gt X 0) operation. If the shift is arithmetic with
9119 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9120 we have a (neg (gt X 0)) operation. */
9122 if ((STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
9123 && GET_CODE (XEXP (varop
, 0)) == ASHIFTRT
9124 && count
== (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1)
9125 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
9126 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
9127 && INTVAL (XEXP (XEXP (varop
, 0), 1)) == count
9128 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
9131 varop
= gen_rtx_GT (GET_MODE (varop
), XEXP (varop
, 1),
9134 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
9135 varop
= gen_rtx_NEG (GET_MODE (varop
), varop
);
9142 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9143 if the truncate does not affect the value. */
9144 if (code
== LSHIFTRT
9145 && GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
9146 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
9147 && (INTVAL (XEXP (XEXP (varop
, 0), 1))
9148 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop
, 0)))
9149 - GET_MODE_BITSIZE (GET_MODE (varop
)))))
9151 rtx varop_inner
= XEXP (varop
, 0);
9154 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner
),
9155 XEXP (varop_inner
, 0),
9157 (count
+ INTVAL (XEXP (varop_inner
, 1))));
9158 varop
= gen_rtx_TRUNCATE (GET_MODE (varop
), varop_inner
);
9171 /* We need to determine what mode to do the shift in. If the shift is
9172 a right shift or ROTATE, we must always do it in the mode it was
9173 originally done in. Otherwise, we can do it in MODE, the widest mode
9174 encountered. The code we care about is that of the shift that will
9175 actually be done, not the shift that was originally requested. */
9177 = (code
== ASHIFTRT
|| code
== LSHIFTRT
|| code
== ROTATE
9178 ? result_mode
: mode
);
9180 /* We have now finished analyzing the shift. The result should be
9181 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9182 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9183 to the result of the shift. OUTER_CONST is the relevant constant,
9184 but we must turn off all bits turned off in the shift. */
9186 if (outer_op
== UNKNOWN
9187 && orig_code
== code
&& orig_count
== count
9188 && varop
== orig_varop
9189 && shift_mode
== GET_MODE (varop
))
9192 /* Make a SUBREG if necessary. If we can't make it, fail. */
9193 varop
= gen_lowpart (shift_mode
, varop
);
9194 if (varop
== NULL_RTX
|| GET_CODE (varop
) == CLOBBER
)
9197 /* If we have an outer operation and we just made a shift, it is
9198 possible that we could have simplified the shift were it not
9199 for the outer operation. So try to do the simplification
9202 if (outer_op
!= UNKNOWN
)
9203 x
= simplify_shift_const_1 (code
, shift_mode
, varop
, count
);
9208 x
= simplify_gen_binary (code
, shift_mode
, varop
, GEN_INT (count
));
9210 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9211 turn off all the bits that the shift would have turned off. */
9212 if (orig_code
== LSHIFTRT
&& result_mode
!= shift_mode
)
9213 x
= simplify_and_const_int (NULL_RTX
, shift_mode
, x
,
9214 GET_MODE_MASK (result_mode
) >> orig_count
);
9216 /* Do the remainder of the processing in RESULT_MODE. */
9217 x
= gen_lowpart_or_truncate (result_mode
, x
);
9219 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9222 x
= simplify_gen_unary (NOT
, result_mode
, x
, result_mode
);
9224 if (outer_op
!= UNKNOWN
)
9226 if (GET_MODE_BITSIZE (result_mode
) < HOST_BITS_PER_WIDE_INT
)
9227 outer_const
= trunc_int_for_mode (outer_const
, result_mode
);
9229 if (outer_op
== AND
)
9230 x
= simplify_and_const_int (NULL_RTX
, result_mode
, x
, outer_const
);
9231 else if (outer_op
== SET
)
9232 /* This means that we have determined that the result is
9233 equivalent to a constant. This should be rare. */
9234 x
= GEN_INT (outer_const
);
9235 else if (GET_RTX_CLASS (outer_op
) == RTX_UNARY
)
9236 x
= simplify_gen_unary (outer_op
, result_mode
, x
, result_mode
);
9238 x
= simplify_gen_binary (outer_op
, result_mode
, x
,
9239 GEN_INT (outer_const
));
9245 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
9246 The result of the shift is RESULT_MODE. If we cannot simplify it,
9247 return X or, if it is NULL, synthesize the expression with
9248 simplify_gen_binary. Otherwise, return a simplified value.
9250 The shift is normally computed in the widest mode we find in VAROP, as
9251 long as it isn't a different number of words than RESULT_MODE. Exceptions
9252 are ASHIFTRT and ROTATE, which are always done in their original mode. */
9255 simplify_shift_const (rtx x
, enum rtx_code code
, enum machine_mode result_mode
,
9256 rtx varop
, int count
)
9258 rtx tem
= simplify_shift_const_1 (code
, result_mode
, varop
, count
);
9263 x
= simplify_gen_binary (code
, GET_MODE (varop
), varop
, GEN_INT (count
));
9264 if (GET_MODE (x
) != result_mode
)
9265 x
= gen_lowpart (result_mode
, x
);
9270 /* Like recog, but we receive the address of a pointer to a new pattern.
9271 We try to match the rtx that the pointer points to.
9272 If that fails, we may try to modify or replace the pattern,
9273 storing the replacement into the same pointer object.
9275 Modifications include deletion or addition of CLOBBERs.
9277 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9278 the CLOBBERs are placed.
9280 The value is the final insn code from the pattern ultimately matched,
9284 recog_for_combine (rtx
*pnewpat
, rtx insn
, rtx
*pnotes
)
9287 int insn_code_number
;
9288 int num_clobbers_to_add
= 0;
9291 rtx old_notes
, old_pat
;
9293 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9294 we use to indicate that something didn't match. If we find such a
9295 thing, force rejection. */
9296 if (GET_CODE (pat
) == PARALLEL
)
9297 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
9298 if (GET_CODE (XVECEXP (pat
, 0, i
)) == CLOBBER
9299 && XEXP (XVECEXP (pat
, 0, i
), 0) == const0_rtx
)
9302 old_pat
= PATTERN (insn
);
9303 old_notes
= REG_NOTES (insn
);
9304 PATTERN (insn
) = pat
;
9305 REG_NOTES (insn
) = 0;
9307 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
9309 /* If it isn't, there is the possibility that we previously had an insn
9310 that clobbered some register as a side effect, but the combined
9311 insn doesn't need to do that. So try once more without the clobbers
9312 unless this represents an ASM insn. */
9314 if (insn_code_number
< 0 && ! check_asm_operands (pat
)
9315 && GET_CODE (pat
) == PARALLEL
)
9319 for (pos
= 0, i
= 0; i
< XVECLEN (pat
, 0); i
++)
9320 if (GET_CODE (XVECEXP (pat
, 0, i
)) != CLOBBER
)
9323 SUBST (XVECEXP (pat
, 0, pos
), XVECEXP (pat
, 0, i
));
9327 SUBST_INT (XVECLEN (pat
, 0), pos
);
9330 pat
= XVECEXP (pat
, 0, 0);
9332 PATTERN (insn
) = pat
;
9333 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
9335 PATTERN (insn
) = old_pat
;
9336 REG_NOTES (insn
) = old_notes
;
9338 /* Recognize all noop sets, these will be killed by followup pass. */
9339 if (insn_code_number
< 0 && GET_CODE (pat
) == SET
&& set_noop_p (pat
))
9340 insn_code_number
= NOOP_MOVE_INSN_CODE
, num_clobbers_to_add
= 0;
9342 /* If we had any clobbers to add, make a new pattern than contains
9343 them. Then check to make sure that all of them are dead. */
9344 if (num_clobbers_to_add
)
9346 rtx newpat
= gen_rtx_PARALLEL (VOIDmode
,
9347 rtvec_alloc (GET_CODE (pat
) == PARALLEL
9349 + num_clobbers_to_add
)
9350 : num_clobbers_to_add
+ 1));
9352 if (GET_CODE (pat
) == PARALLEL
)
9353 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
9354 XVECEXP (newpat
, 0, i
) = XVECEXP (pat
, 0, i
);
9356 XVECEXP (newpat
, 0, 0) = pat
;
9358 add_clobbers (newpat
, insn_code_number
);
9360 for (i
= XVECLEN (newpat
, 0) - num_clobbers_to_add
;
9361 i
< XVECLEN (newpat
, 0); i
++)
9363 if (REG_P (XEXP (XVECEXP (newpat
, 0, i
), 0))
9364 && ! reg_dead_at_p (XEXP (XVECEXP (newpat
, 0, i
), 0), insn
))
9366 notes
= gen_rtx_EXPR_LIST (REG_UNUSED
,
9367 XEXP (XVECEXP (newpat
, 0, i
), 0), notes
);
9375 return insn_code_number
;
9378 /* Like gen_lowpart_general but for use by combine. In combine it
9379 is not possible to create any new pseudoregs. However, it is
9380 safe to create invalid memory addresses, because combine will
9381 try to recognize them and all they will do is make the combine
9384 If for some reason this cannot do its job, an rtx
9385 (clobber (const_int 0)) is returned.
9386 An insn containing that will not be recognized. */
9389 gen_lowpart_for_combine (enum machine_mode omode
, rtx x
)
9391 enum machine_mode imode
= GET_MODE (x
);
9392 unsigned int osize
= GET_MODE_SIZE (omode
);
9393 unsigned int isize
= GET_MODE_SIZE (imode
);
9399 /* Return identity if this is a CONST or symbolic reference. */
9401 && (GET_CODE (x
) == CONST
9402 || GET_CODE (x
) == SYMBOL_REF
9403 || GET_CODE (x
) == LABEL_REF
))
9406 /* We can only support MODE being wider than a word if X is a
9407 constant integer or has a mode the same size. */
9408 if (GET_MODE_SIZE (omode
) > UNITS_PER_WORD
9409 && ! ((imode
== VOIDmode
9410 && (GET_CODE (x
) == CONST_INT
9411 || GET_CODE (x
) == CONST_DOUBLE
))
9415 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9416 won't know what to do. So we will strip off the SUBREG here and
9417 process normally. */
9418 if (GET_CODE (x
) == SUBREG
&& MEM_P (SUBREG_REG (x
)))
9422 /* For use in case we fall down into the address adjustments
9423 further below, we need to adjust the known mode and size of
9424 x; imode and isize, since we just adjusted x. */
9425 imode
= GET_MODE (x
);
9430 isize
= GET_MODE_SIZE (imode
);
9433 result
= gen_lowpart_common (omode
, x
);
9435 #ifdef CANNOT_CHANGE_MODE_CLASS
9436 if (result
!= 0 && GET_CODE (result
) == SUBREG
)
9437 record_subregs_of_mode (result
);
9447 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9449 if (MEM_VOLATILE_P (x
) || mode_dependent_address_p (XEXP (x
, 0)))
9452 /* If we want to refer to something bigger than the original memref,
9453 generate a paradoxical subreg instead. That will force a reload
9454 of the original memref X. */
9456 return gen_rtx_SUBREG (omode
, x
, 0);
9458 if (WORDS_BIG_ENDIAN
)
9459 offset
= MAX (isize
, UNITS_PER_WORD
) - MAX (osize
, UNITS_PER_WORD
);
9461 /* Adjust the address so that the address-after-the-data is
9463 if (BYTES_BIG_ENDIAN
)
9464 offset
-= MIN (UNITS_PER_WORD
, osize
) - MIN (UNITS_PER_WORD
, isize
);
9466 return adjust_address_nv (x
, omode
, offset
);
9469 /* If X is a comparison operator, rewrite it in a new mode. This
9470 probably won't match, but may allow further simplifications. */
9471 else if (COMPARISON_P (x
))
9472 return gen_rtx_fmt_ee (GET_CODE (x
), omode
, XEXP (x
, 0), XEXP (x
, 1));
9474 /* If we couldn't simplify X any other way, just enclose it in a
9475 SUBREG. Normally, this SUBREG won't match, but some patterns may
9476 include an explicit SUBREG or we may simplify it further in combine. */
9482 offset
= subreg_lowpart_offset (omode
, imode
);
9483 if (imode
== VOIDmode
)
9485 imode
= int_mode_for_mode (omode
);
9486 x
= gen_lowpart_common (imode
, x
);
9490 res
= simplify_gen_subreg (omode
, x
, imode
, offset
);
9496 return gen_rtx_CLOBBER (imode
, const0_rtx
);
9499 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9500 comparison code that will be tested.
9502 The result is a possibly different comparison code to use. *POP0 and
9503 *POP1 may be updated.
9505 It is possible that we might detect that a comparison is either always
9506 true or always false. However, we do not perform general constant
9507 folding in combine, so this knowledge isn't useful. Such tautologies
9508 should have been detected earlier. Hence we ignore all such cases. */
9510 static enum rtx_code
9511 simplify_comparison (enum rtx_code code
, rtx
*pop0
, rtx
*pop1
)
9517 enum machine_mode mode
, tmode
;
9519 /* Try a few ways of applying the same transformation to both operands. */
9522 #ifndef WORD_REGISTER_OPERATIONS
9523 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9524 so check specially. */
9525 if (code
!= GTU
&& code
!= GEU
&& code
!= LTU
&& code
!= LEU
9526 && GET_CODE (op0
) == ASHIFTRT
&& GET_CODE (op1
) == ASHIFTRT
9527 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
9528 && GET_CODE (XEXP (op1
, 0)) == ASHIFT
9529 && GET_CODE (XEXP (XEXP (op0
, 0), 0)) == SUBREG
9530 && GET_CODE (XEXP (XEXP (op1
, 0), 0)) == SUBREG
9531 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0
, 0), 0)))
9532 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1
, 0), 0))))
9533 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
9534 && XEXP (op0
, 1) == XEXP (op1
, 1)
9535 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
9536 && XEXP (op0
, 1) == XEXP (XEXP (op1
, 0), 1)
9537 && (INTVAL (XEXP (op0
, 1))
9538 == (GET_MODE_BITSIZE (GET_MODE (op0
))
9540 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0
, 0), 0))))))))
9542 op0
= SUBREG_REG (XEXP (XEXP (op0
, 0), 0));
9543 op1
= SUBREG_REG (XEXP (XEXP (op1
, 0), 0));
9547 /* If both operands are the same constant shift, see if we can ignore the
9548 shift. We can if the shift is a rotate or if the bits shifted out of
9549 this shift are known to be zero for both inputs and if the type of
9550 comparison is compatible with the shift. */
9551 if (GET_CODE (op0
) == GET_CODE (op1
)
9552 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
9553 && ((GET_CODE (op0
) == ROTATE
&& (code
== NE
|| code
== EQ
))
9554 || ((GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFT
)
9555 && (code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
))
9556 || (GET_CODE (op0
) == ASHIFTRT
9557 && (code
!= GTU
&& code
!= LTU
9558 && code
!= GEU
&& code
!= LEU
)))
9559 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
9560 && INTVAL (XEXP (op0
, 1)) >= 0
9561 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
9562 && XEXP (op0
, 1) == XEXP (op1
, 1))
9564 enum machine_mode mode
= GET_MODE (op0
);
9565 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
9566 int shift_count
= INTVAL (XEXP (op0
, 1));
9568 if (GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFTRT
)
9569 mask
&= (mask
>> shift_count
) << shift_count
;
9570 else if (GET_CODE (op0
) == ASHIFT
)
9571 mask
= (mask
& (mask
<< shift_count
)) >> shift_count
;
9573 if ((nonzero_bits (XEXP (op0
, 0), mode
) & ~mask
) == 0
9574 && (nonzero_bits (XEXP (op1
, 0), mode
) & ~mask
) == 0)
9575 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0);
9580 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9581 SUBREGs are of the same mode, and, in both cases, the AND would
9582 be redundant if the comparison was done in the narrower mode,
9583 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9584 and the operand's possibly nonzero bits are 0xffffff01; in that case
9585 if we only care about QImode, we don't need the AND). This case
9586 occurs if the output mode of an scc insn is not SImode and
9587 STORE_FLAG_VALUE == 1 (e.g., the 386).
9589 Similarly, check for a case where the AND's are ZERO_EXTEND
9590 operations from some narrower mode even though a SUBREG is not
9593 else if (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == AND
9594 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
9595 && GET_CODE (XEXP (op1
, 1)) == CONST_INT
)
9597 rtx inner_op0
= XEXP (op0
, 0);
9598 rtx inner_op1
= XEXP (op1
, 0);
9599 HOST_WIDE_INT c0
= INTVAL (XEXP (op0
, 1));
9600 HOST_WIDE_INT c1
= INTVAL (XEXP (op1
, 1));
9603 if (GET_CODE (inner_op0
) == SUBREG
&& GET_CODE (inner_op1
) == SUBREG
9604 && (GET_MODE_SIZE (GET_MODE (inner_op0
))
9605 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0
))))
9606 && (GET_MODE (SUBREG_REG (inner_op0
))
9607 == GET_MODE (SUBREG_REG (inner_op1
)))
9608 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0
)))
9609 <= HOST_BITS_PER_WIDE_INT
)
9610 && (0 == ((~c0
) & nonzero_bits (SUBREG_REG (inner_op0
),
9611 GET_MODE (SUBREG_REG (inner_op0
)))))
9612 && (0 == ((~c1
) & nonzero_bits (SUBREG_REG (inner_op1
),
9613 GET_MODE (SUBREG_REG (inner_op1
))))))
9615 op0
= SUBREG_REG (inner_op0
);
9616 op1
= SUBREG_REG (inner_op1
);
9618 /* The resulting comparison is always unsigned since we masked
9619 off the original sign bit. */
9620 code
= unsigned_condition (code
);
9626 for (tmode
= GET_CLASS_NARROWEST_MODE
9627 (GET_MODE_CLASS (GET_MODE (op0
)));
9628 tmode
!= GET_MODE (op0
); tmode
= GET_MODE_WIDER_MODE (tmode
))
9629 if ((unsigned HOST_WIDE_INT
) c0
== GET_MODE_MASK (tmode
))
9631 op0
= gen_lowpart (tmode
, inner_op0
);
9632 op1
= gen_lowpart (tmode
, inner_op1
);
9633 code
= unsigned_condition (code
);
9642 /* If both operands are NOT, we can strip off the outer operation
9643 and adjust the comparison code for swapped operands; similarly for
9644 NEG, except that this must be an equality comparison. */
9645 else if ((GET_CODE (op0
) == NOT
&& GET_CODE (op1
) == NOT
)
9646 || (GET_CODE (op0
) == NEG
&& GET_CODE (op1
) == NEG
9647 && (code
== EQ
|| code
== NE
)))
9648 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0), code
= swap_condition (code
);
9654 /* If the first operand is a constant, swap the operands and adjust the
9655 comparison code appropriately, but don't do this if the second operand
9656 is already a constant integer. */
9657 if (swap_commutative_operands_p (op0
, op1
))
9659 tem
= op0
, op0
= op1
, op1
= tem
;
9660 code
= swap_condition (code
);
9663 /* We now enter a loop during which we will try to simplify the comparison.
9664 For the most part, we only are concerned with comparisons with zero,
9665 but some things may really be comparisons with zero but not start
9666 out looking that way. */
9668 while (GET_CODE (op1
) == CONST_INT
)
9670 enum machine_mode mode
= GET_MODE (op0
);
9671 unsigned int mode_width
= GET_MODE_BITSIZE (mode
);
9672 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
9673 int equality_comparison_p
;
9674 int sign_bit_comparison_p
;
9675 int unsigned_comparison_p
;
9676 HOST_WIDE_INT const_op
;
9678 /* We only want to handle integral modes. This catches VOIDmode,
9679 CCmode, and the floating-point modes. An exception is that we
9680 can handle VOIDmode if OP0 is a COMPARE or a comparison
9683 if (GET_MODE_CLASS (mode
) != MODE_INT
9684 && ! (mode
== VOIDmode
9685 && (GET_CODE (op0
) == COMPARE
|| COMPARISON_P (op0
))))
9688 /* Get the constant we are comparing against and turn off all bits
9689 not on in our mode. */
9690 const_op
= INTVAL (op1
);
9691 if (mode
!= VOIDmode
)
9692 const_op
= trunc_int_for_mode (const_op
, mode
);
9693 op1
= GEN_INT (const_op
);
9695 /* If we are comparing against a constant power of two and the value
9696 being compared can only have that single bit nonzero (e.g., it was
9697 `and'ed with that bit), we can replace this with a comparison
9700 && (code
== EQ
|| code
== NE
|| code
== GE
|| code
== GEU
9701 || code
== LT
|| code
== LTU
)
9702 && mode_width
<= HOST_BITS_PER_WIDE_INT
9703 && exact_log2 (const_op
) >= 0
9704 && nonzero_bits (op0
, mode
) == (unsigned HOST_WIDE_INT
) const_op
)
9706 code
= (code
== EQ
|| code
== GE
|| code
== GEU
? NE
: EQ
);
9707 op1
= const0_rtx
, const_op
= 0;
9710 /* Similarly, if we are comparing a value known to be either -1 or
9711 0 with -1, change it to the opposite comparison against zero. */
9714 && (code
== EQ
|| code
== NE
|| code
== GT
|| code
== LE
9715 || code
== GEU
|| code
== LTU
)
9716 && num_sign_bit_copies (op0
, mode
) == mode_width
)
9718 code
= (code
== EQ
|| code
== LE
|| code
== GEU
? NE
: EQ
);
9719 op1
= const0_rtx
, const_op
= 0;
9722 /* Do some canonicalizations based on the comparison code. We prefer
9723 comparisons against zero and then prefer equality comparisons.
9724 If we can reduce the size of a constant, we will do that too. */
9729 /* < C is equivalent to <= (C - 1) */
9733 op1
= GEN_INT (const_op
);
9735 /* ... fall through to LE case below. */
9741 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9745 op1
= GEN_INT (const_op
);
9749 /* If we are doing a <= 0 comparison on a value known to have
9750 a zero sign bit, we can replace this with == 0. */
9751 else if (const_op
== 0
9752 && mode_width
<= HOST_BITS_PER_WIDE_INT
9753 && (nonzero_bits (op0
, mode
)
9754 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
9759 /* >= C is equivalent to > (C - 1). */
9763 op1
= GEN_INT (const_op
);
9765 /* ... fall through to GT below. */
9771 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
9775 op1
= GEN_INT (const_op
);
9779 /* If we are doing a > 0 comparison on a value known to have
9780 a zero sign bit, we can replace this with != 0. */
9781 else if (const_op
== 0
9782 && mode_width
<= HOST_BITS_PER_WIDE_INT
9783 && (nonzero_bits (op0
, mode
)
9784 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
9789 /* < C is equivalent to <= (C - 1). */
9793 op1
= GEN_INT (const_op
);
9795 /* ... fall through ... */
9798 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9799 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
9800 && (const_op
== (HOST_WIDE_INT
) 1 << (mode_width
- 1)))
9802 const_op
= 0, op1
= const0_rtx
;
9810 /* unsigned <= 0 is equivalent to == 0 */
9814 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9815 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
9816 && (const_op
== ((HOST_WIDE_INT
) 1 << (mode_width
- 1)) - 1))
9818 const_op
= 0, op1
= const0_rtx
;
9824 /* >= C is equivalent to > (C - 1). */
9828 op1
= GEN_INT (const_op
);
9830 /* ... fall through ... */
9833 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
9834 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
9835 && (const_op
== (HOST_WIDE_INT
) 1 << (mode_width
- 1)))
9837 const_op
= 0, op1
= const0_rtx
;
9845 /* unsigned > 0 is equivalent to != 0 */
9849 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
9850 else if ((mode_width
<= HOST_BITS_PER_WIDE_INT
)
9851 && (const_op
== ((HOST_WIDE_INT
) 1 << (mode_width
- 1)) - 1))
9853 const_op
= 0, op1
= const0_rtx
;
9862 /* Compute some predicates to simplify code below. */
9864 equality_comparison_p
= (code
== EQ
|| code
== NE
);
9865 sign_bit_comparison_p
= ((code
== LT
|| code
== GE
) && const_op
== 0);
9866 unsigned_comparison_p
= (code
== LTU
|| code
== LEU
|| code
== GTU
9869 /* If this is a sign bit comparison and we can do arithmetic in
9870 MODE, say that we will only be needing the sign bit of OP0. */
9871 if (sign_bit_comparison_p
9872 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
9873 op0
= force_to_mode (op0
, mode
,
9875 << (GET_MODE_BITSIZE (mode
) - 1)),
9878 /* Now try cases based on the opcode of OP0. If none of the cases
9879 does a "continue", we exit this loop immediately after the
9882 switch (GET_CODE (op0
))
9885 /* If we are extracting a single bit from a variable position in
9886 a constant that has only a single bit set and are comparing it
9887 with zero, we can convert this into an equality comparison
9888 between the position and the location of the single bit. */
9889 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
9890 have already reduced the shift count modulo the word size. */
9891 if (!SHIFT_COUNT_TRUNCATED
9892 && GET_CODE (XEXP (op0
, 0)) == CONST_INT
9893 && XEXP (op0
, 1) == const1_rtx
9894 && equality_comparison_p
&& const_op
== 0
9895 && (i
= exact_log2 (INTVAL (XEXP (op0
, 0)))) >= 0)
9897 if (BITS_BIG_ENDIAN
)
9899 enum machine_mode new_mode
9900 = mode_for_extraction (EP_extzv
, 1);
9901 if (new_mode
== MAX_MACHINE_MODE
)
9902 i
= BITS_PER_WORD
- 1 - i
;
9906 i
= (GET_MODE_BITSIZE (mode
) - 1 - i
);
9910 op0
= XEXP (op0
, 2);
9914 /* Result is nonzero iff shift count is equal to I. */
9915 code
= reverse_condition (code
);
9919 /* ... fall through ... */
9922 tem
= expand_compound_operation (op0
);
9931 /* If testing for equality, we can take the NOT of the constant. */
9932 if (equality_comparison_p
9933 && (tem
= simplify_unary_operation (NOT
, mode
, op1
, mode
)) != 0)
9935 op0
= XEXP (op0
, 0);
9940 /* If just looking at the sign bit, reverse the sense of the
9942 if (sign_bit_comparison_p
)
9944 op0
= XEXP (op0
, 0);
9945 code
= (code
== GE
? LT
: GE
);
9951 /* If testing for equality, we can take the NEG of the constant. */
9952 if (equality_comparison_p
9953 && (tem
= simplify_unary_operation (NEG
, mode
, op1
, mode
)) != 0)
9955 op0
= XEXP (op0
, 0);
9960 /* The remaining cases only apply to comparisons with zero. */
9964 /* When X is ABS or is known positive,
9965 (neg X) is < 0 if and only if X != 0. */
9967 if (sign_bit_comparison_p
9968 && (GET_CODE (XEXP (op0
, 0)) == ABS
9969 || (mode_width
<= HOST_BITS_PER_WIDE_INT
9970 && (nonzero_bits (XEXP (op0
, 0), mode
)
9971 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)))
9973 op0
= XEXP (op0
, 0);
9974 code
= (code
== LT
? NE
: EQ
);
9978 /* If we have NEG of something whose two high-order bits are the
9979 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9980 if (num_sign_bit_copies (op0
, mode
) >= 2)
9982 op0
= XEXP (op0
, 0);
9983 code
= swap_condition (code
);
9989 /* If we are testing equality and our count is a constant, we
9990 can perform the inverse operation on our RHS. */
9991 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
9992 && (tem
= simplify_binary_operation (ROTATERT
, mode
,
9993 op1
, XEXP (op0
, 1))) != 0)
9995 op0
= XEXP (op0
, 0);
10000 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10001 a particular bit. Convert it to an AND of a constant of that
10002 bit. This will be converted into a ZERO_EXTRACT. */
10003 if (const_op
== 0 && sign_bit_comparison_p
10004 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10005 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
10007 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
10010 - INTVAL (XEXP (op0
, 1)))));
10011 code
= (code
== LT
? NE
: EQ
);
10015 /* Fall through. */
10018 /* ABS is ignorable inside an equality comparison with zero. */
10019 if (const_op
== 0 && equality_comparison_p
)
10021 op0
= XEXP (op0
, 0);
10027 /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10028 (compare FOO CONST) if CONST fits in FOO's mode and we
10029 are either testing inequality or have an unsigned
10030 comparison with ZERO_EXTEND or a signed comparison with
10031 SIGN_EXTEND. But don't do it if we don't have a compare
10032 insn of the given mode, since we'd have to revert it
10033 later on, and then we wouldn't know whether to sign- or
10035 mode
= GET_MODE (XEXP (op0
, 0));
10036 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
10037 && ! unsigned_comparison_p
10038 && (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10039 && ((unsigned HOST_WIDE_INT
) const_op
10040 < (((unsigned HOST_WIDE_INT
) 1
10041 << (GET_MODE_BITSIZE (mode
) - 1))))
10042 && cmp_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
10044 op0
= XEXP (op0
, 0);
10050 /* Check for the case where we are comparing A - C1 with C2, that is
10052 (subreg:MODE (plus (A) (-C1))) op (C2)
10054 with C1 a constant, and try to lift the SUBREG, i.e. to do the
10055 comparison in the wider mode. One of the following two conditions
10056 must be true in order for this to be valid:
10058 1. The mode extension results in the same bit pattern being added
10059 on both sides and the comparison is equality or unsigned. As
10060 C2 has been truncated to fit in MODE, the pattern can only be
10063 2. The mode extension results in the sign bit being copied on
10066 The difficulty here is that we have predicates for A but not for
10067 (A - C1) so we need to check that C1 is within proper bounds so
10068 as to perturbate A as little as possible. */
10070 if (mode_width
<= HOST_BITS_PER_WIDE_INT
10071 && subreg_lowpart_p (op0
)
10072 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) > mode_width
10073 && GET_CODE (SUBREG_REG (op0
)) == PLUS
10074 && GET_CODE (XEXP (SUBREG_REG (op0
), 1)) == CONST_INT
)
10076 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
10077 rtx a
= XEXP (SUBREG_REG (op0
), 0);
10078 HOST_WIDE_INT c1
= -INTVAL (XEXP (SUBREG_REG (op0
), 1));
10081 && (unsigned HOST_WIDE_INT
) c1
10082 < (unsigned HOST_WIDE_INT
) 1 << (mode_width
- 1)
10083 && (equality_comparison_p
|| unsigned_comparison_p
)
10084 /* (A - C1) zero-extends if it is positive and sign-extends
10085 if it is negative, C2 both zero- and sign-extends. */
10086 && ((0 == (nonzero_bits (a
, inner_mode
)
10087 & ~GET_MODE_MASK (mode
))
10089 /* (A - C1) sign-extends if it is positive and 1-extends
10090 if it is negative, C2 both sign- and 1-extends. */
10091 || (num_sign_bit_copies (a
, inner_mode
)
10092 > (unsigned int) (GET_MODE_BITSIZE (inner_mode
)
10095 || ((unsigned HOST_WIDE_INT
) c1
10096 < (unsigned HOST_WIDE_INT
) 1 << (mode_width
- 2)
10097 /* (A - C1) always sign-extends, like C2. */
10098 && num_sign_bit_copies (a
, inner_mode
)
10099 > (unsigned int) (GET_MODE_BITSIZE (inner_mode
)
10100 - (mode_width
- 1))))
10102 op0
= SUBREG_REG (op0
);
10107 /* If the inner mode is narrower and we are extracting the low part,
10108 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10109 if (subreg_lowpart_p (op0
)
10110 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) < mode_width
)
10111 /* Fall through */ ;
10115 /* ... fall through ... */
10118 mode
= GET_MODE (XEXP (op0
, 0));
10119 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
10120 && (unsigned_comparison_p
|| equality_comparison_p
)
10121 && (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10122 && ((unsigned HOST_WIDE_INT
) const_op
< GET_MODE_MASK (mode
))
10123 && cmp_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
10125 op0
= XEXP (op0
, 0);
10131 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10132 this for equality comparisons due to pathological cases involving
10134 if (equality_comparison_p
10135 && 0 != (tem
= simplify_binary_operation (MINUS
, mode
,
10136 op1
, XEXP (op0
, 1))))
10138 op0
= XEXP (op0
, 0);
10143 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10144 if (const_op
== 0 && XEXP (op0
, 1) == constm1_rtx
10145 && GET_CODE (XEXP (op0
, 0)) == ABS
&& sign_bit_comparison_p
)
10147 op0
= XEXP (XEXP (op0
, 0), 0);
10148 code
= (code
== LT
? EQ
: NE
);
10154 /* We used to optimize signed comparisons against zero, but that
10155 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10156 arrive here as equality comparisons, or (GEU, LTU) are
10157 optimized away. No need to special-case them. */
10159 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10160 (eq B (minus A C)), whichever simplifies. We can only do
10161 this for equality comparisons due to pathological cases involving
10163 if (equality_comparison_p
10164 && 0 != (tem
= simplify_binary_operation (PLUS
, mode
,
10165 XEXP (op0
, 1), op1
)))
10167 op0
= XEXP (op0
, 0);
10172 if (equality_comparison_p
10173 && 0 != (tem
= simplify_binary_operation (MINUS
, mode
,
10174 XEXP (op0
, 0), op1
)))
10176 op0
= XEXP (op0
, 1);
10181 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10182 of bits in X minus 1, is one iff X > 0. */
10183 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
10184 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
10185 && (unsigned HOST_WIDE_INT
) INTVAL (XEXP (XEXP (op0
, 0), 1))
10187 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
10189 op0
= XEXP (op0
, 1);
10190 code
= (code
== GE
? LE
: GT
);
10196 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10197 if C is zero or B is a constant. */
10198 if (equality_comparison_p
10199 && 0 != (tem
= simplify_binary_operation (XOR
, mode
,
10200 XEXP (op0
, 1), op1
)))
10202 op0
= XEXP (op0
, 0);
10209 case UNEQ
: case LTGT
:
10210 case LT
: case LTU
: case UNLT
: case LE
: case LEU
: case UNLE
:
10211 case GT
: case GTU
: case UNGT
: case GE
: case GEU
: case UNGE
:
10212 case UNORDERED
: case ORDERED
:
10213 /* We can't do anything if OP0 is a condition code value, rather
10214 than an actual data value. */
10216 || CC0_P (XEXP (op0
, 0))
10217 || GET_MODE_CLASS (GET_MODE (XEXP (op0
, 0))) == MODE_CC
)
10220 /* Get the two operands being compared. */
10221 if (GET_CODE (XEXP (op0
, 0)) == COMPARE
)
10222 tem
= XEXP (XEXP (op0
, 0), 0), tem1
= XEXP (XEXP (op0
, 0), 1);
10224 tem
= XEXP (op0
, 0), tem1
= XEXP (op0
, 1);
10226 /* Check for the cases where we simply want the result of the
10227 earlier test or the opposite of that result. */
10228 if (code
== NE
|| code
== EQ
10229 || (GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
10230 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10231 && (STORE_FLAG_VALUE
10232 & (((HOST_WIDE_INT
) 1
10233 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
10234 && (code
== LT
|| code
== GE
)))
10236 enum rtx_code new_code
;
10237 if (code
== LT
|| code
== NE
)
10238 new_code
= GET_CODE (op0
);
10240 new_code
= reversed_comparison_code (op0
, NULL
);
10242 if (new_code
!= UNKNOWN
)
10253 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10255 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == PLUS
10256 && XEXP (XEXP (op0
, 0), 1) == constm1_rtx
10257 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
10259 op0
= XEXP (op0
, 1);
10260 code
= (code
== GE
? GT
: LE
);
10266 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10267 will be converted to a ZERO_EXTRACT later. */
10268 if (const_op
== 0 && equality_comparison_p
10269 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
10270 && XEXP (XEXP (op0
, 0), 0) == const1_rtx
)
10272 op0
= simplify_and_const_int
10273 (NULL_RTX
, mode
, gen_rtx_LSHIFTRT (mode
,
10275 XEXP (XEXP (op0
, 0), 1)),
10276 (HOST_WIDE_INT
) 1);
10280 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10281 zero and X is a comparison and C1 and C2 describe only bits set
10282 in STORE_FLAG_VALUE, we can compare with X. */
10283 if (const_op
== 0 && equality_comparison_p
10284 && mode_width
<= HOST_BITS_PER_WIDE_INT
10285 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10286 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
10287 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
10288 && INTVAL (XEXP (XEXP (op0
, 0), 1)) >= 0
10289 && INTVAL (XEXP (XEXP (op0
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
10291 mask
= ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
10292 << INTVAL (XEXP (XEXP (op0
, 0), 1)));
10293 if ((~STORE_FLAG_VALUE
& mask
) == 0
10294 && (COMPARISON_P (XEXP (XEXP (op0
, 0), 0))
10295 || ((tem
= get_last_value (XEXP (XEXP (op0
, 0), 0))) != 0
10296 && COMPARISON_P (tem
))))
10298 op0
= XEXP (XEXP (op0
, 0), 0);
10303 /* If we are doing an equality comparison of an AND of a bit equal
10304 to the sign bit, replace this with a LT or GE comparison of
10305 the underlying value. */
10306 if (equality_comparison_p
10308 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10309 && mode_width
<= HOST_BITS_PER_WIDE_INT
10310 && ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
10311 == (unsigned HOST_WIDE_INT
) 1 << (mode_width
- 1)))
10313 op0
= XEXP (op0
, 0);
10314 code
= (code
== EQ
? GE
: LT
);
10318 /* If this AND operation is really a ZERO_EXTEND from a narrower
10319 mode, the constant fits within that mode, and this is either an
10320 equality or unsigned comparison, try to do this comparison in
10325 (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
10326 -> (ne:DI (reg:SI 4) (const_int 0))
10328 unless TRULY_NOOP_TRUNCATION allows it or the register is
10329 known to hold a value of the required mode the
10330 transformation is invalid. */
10331 if ((equality_comparison_p
|| unsigned_comparison_p
)
10332 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10333 && (i
= exact_log2 ((INTVAL (XEXP (op0
, 1))
10334 & GET_MODE_MASK (mode
))
10336 && const_op
>> i
== 0
10337 && (tmode
= mode_for_size (i
, MODE_INT
, 1)) != BLKmode
10338 && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode
),
10339 GET_MODE_BITSIZE (GET_MODE (op0
)))
10340 || (REG_P (XEXP (op0
, 0))
10341 && reg_truncated_to_mode (tmode
, XEXP (op0
, 0)))))
10343 op0
= gen_lowpart (tmode
, XEXP (op0
, 0));
10347 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10348 fits in both M1 and M2 and the SUBREG is either paradoxical
10349 or represents the low part, permute the SUBREG and the AND
10351 if (GET_CODE (XEXP (op0
, 0)) == SUBREG
)
10353 unsigned HOST_WIDE_INT c1
;
10354 tmode
= GET_MODE (SUBREG_REG (XEXP (op0
, 0)));
10355 /* Require an integral mode, to avoid creating something like
10357 if (SCALAR_INT_MODE_P (tmode
)
10358 /* It is unsafe to commute the AND into the SUBREG if the
10359 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10360 not defined. As originally written the upper bits
10361 have a defined value due to the AND operation.
10362 However, if we commute the AND inside the SUBREG then
10363 they no longer have defined values and the meaning of
10364 the code has been changed. */
10366 #ifdef WORD_REGISTER_OPERATIONS
10367 || (mode_width
> GET_MODE_BITSIZE (tmode
)
10368 && mode_width
<= BITS_PER_WORD
)
10370 || (mode_width
<= GET_MODE_BITSIZE (tmode
)
10371 && subreg_lowpart_p (XEXP (op0
, 0))))
10372 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10373 && mode_width
<= HOST_BITS_PER_WIDE_INT
10374 && GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
10375 && ((c1
= INTVAL (XEXP (op0
, 1))) & ~mask
) == 0
10376 && (c1
& ~GET_MODE_MASK (tmode
)) == 0
10378 && c1
!= GET_MODE_MASK (tmode
))
10380 op0
= simplify_gen_binary (AND
, tmode
,
10381 SUBREG_REG (XEXP (op0
, 0)),
10382 gen_int_mode (c1
, tmode
));
10383 op0
= gen_lowpart (mode
, op0
);
10388 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
10389 if (const_op
== 0 && equality_comparison_p
10390 && XEXP (op0
, 1) == const1_rtx
10391 && GET_CODE (XEXP (op0
, 0)) == NOT
)
10393 op0
= simplify_and_const_int
10394 (NULL_RTX
, mode
, XEXP (XEXP (op0
, 0), 0), (HOST_WIDE_INT
) 1);
10395 code
= (code
== NE
? EQ
: NE
);
10399 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10400 (eq (and (lshiftrt X) 1) 0).
10401 Also handle the case where (not X) is expressed using xor. */
10402 if (const_op
== 0 && equality_comparison_p
10403 && XEXP (op0
, 1) == const1_rtx
10404 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
)
10406 rtx shift_op
= XEXP (XEXP (op0
, 0), 0);
10407 rtx shift_count
= XEXP (XEXP (op0
, 0), 1);
10409 if (GET_CODE (shift_op
) == NOT
10410 || (GET_CODE (shift_op
) == XOR
10411 && GET_CODE (XEXP (shift_op
, 1)) == CONST_INT
10412 && GET_CODE (shift_count
) == CONST_INT
10413 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
10414 && (INTVAL (XEXP (shift_op
, 1))
10415 == (HOST_WIDE_INT
) 1 << INTVAL (shift_count
))))
10417 op0
= simplify_and_const_int
10419 gen_rtx_LSHIFTRT (mode
, XEXP (shift_op
, 0), shift_count
),
10420 (HOST_WIDE_INT
) 1);
10421 code
= (code
== NE
? EQ
: NE
);
10428 /* If we have (compare (ashift FOO N) (const_int C)) and
10429 the high order N bits of FOO (N+1 if an inequality comparison)
10430 are known to be zero, we can do this by comparing FOO with C
10431 shifted right N bits so long as the low-order N bits of C are
10433 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
10434 && INTVAL (XEXP (op0
, 1)) >= 0
10435 && ((INTVAL (XEXP (op0
, 1)) + ! equality_comparison_p
)
10436 < HOST_BITS_PER_WIDE_INT
)
10438 & (((HOST_WIDE_INT
) 1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0)
10439 && mode_width
<= HOST_BITS_PER_WIDE_INT
10440 && (nonzero_bits (XEXP (op0
, 0), mode
)
10441 & ~(mask
>> (INTVAL (XEXP (op0
, 1))
10442 + ! equality_comparison_p
))) == 0)
10444 /* We must perform a logical shift, not an arithmetic one,
10445 as we want the top N bits of C to be zero. */
10446 unsigned HOST_WIDE_INT temp
= const_op
& GET_MODE_MASK (mode
);
10448 temp
>>= INTVAL (XEXP (op0
, 1));
10449 op1
= gen_int_mode (temp
, mode
);
10450 op0
= XEXP (op0
, 0);
10454 /* If we are doing a sign bit comparison, it means we are testing
10455 a particular bit. Convert it to the appropriate AND. */
10456 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
10457 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
10459 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
10462 - INTVAL (XEXP (op0
, 1)))));
10463 code
= (code
== LT
? NE
: EQ
);
10467 /* If this an equality comparison with zero and we are shifting
10468 the low bit to the sign bit, we can convert this to an AND of the
10470 if (const_op
== 0 && equality_comparison_p
10471 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10472 && (unsigned HOST_WIDE_INT
) INTVAL (XEXP (op0
, 1))
10475 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
10476 (HOST_WIDE_INT
) 1);
10482 /* If this is an equality comparison with zero, we can do this
10483 as a logical shift, which might be much simpler. */
10484 if (equality_comparison_p
&& const_op
== 0
10485 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
10487 op0
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
,
10489 INTVAL (XEXP (op0
, 1)));
10493 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10494 do the comparison in a narrower mode. */
10495 if (! unsigned_comparison_p
10496 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10497 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
10498 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
10499 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
10500 MODE_INT
, 1)) != BLKmode
10501 && (((unsigned HOST_WIDE_INT
) const_op
10502 + (GET_MODE_MASK (tmode
) >> 1) + 1)
10503 <= GET_MODE_MASK (tmode
)))
10505 op0
= gen_lowpart (tmode
, XEXP (XEXP (op0
, 0), 0));
10509 /* Likewise if OP0 is a PLUS of a sign extension with a
10510 constant, which is usually represented with the PLUS
10511 between the shifts. */
10512 if (! unsigned_comparison_p
10513 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10514 && GET_CODE (XEXP (op0
, 0)) == PLUS
10515 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
10516 && GET_CODE (XEXP (XEXP (op0
, 0), 0)) == ASHIFT
10517 && XEXP (op0
, 1) == XEXP (XEXP (XEXP (op0
, 0), 0), 1)
10518 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
10519 MODE_INT
, 1)) != BLKmode
10520 && (((unsigned HOST_WIDE_INT
) const_op
10521 + (GET_MODE_MASK (tmode
) >> 1) + 1)
10522 <= GET_MODE_MASK (tmode
)))
10524 rtx inner
= XEXP (XEXP (XEXP (op0
, 0), 0), 0);
10525 rtx add_const
= XEXP (XEXP (op0
, 0), 1);
10526 rtx new_const
= simplify_gen_binary (ASHIFTRT
, GET_MODE (op0
),
10527 add_const
, XEXP (op0
, 1));
10529 op0
= simplify_gen_binary (PLUS
, tmode
,
10530 gen_lowpart (tmode
, inner
),
10535 /* ... fall through ... */
10537 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10538 the low order N bits of FOO are known to be zero, we can do this
10539 by comparing FOO with C shifted left N bits so long as no
10540 overflow occurs. */
10541 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
10542 && INTVAL (XEXP (op0
, 1)) >= 0
10543 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
10544 && mode_width
<= HOST_BITS_PER_WIDE_INT
10545 && (nonzero_bits (XEXP (op0
, 0), mode
)
10546 & (((HOST_WIDE_INT
) 1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0
10547 && (((unsigned HOST_WIDE_INT
) const_op
10548 + (GET_CODE (op0
) != LSHIFTRT
10549 ? ((GET_MODE_MASK (mode
) >> INTVAL (XEXP (op0
, 1)) >> 1)
10552 <= GET_MODE_MASK (mode
) >> INTVAL (XEXP (op0
, 1))))
10554 /* If the shift was logical, then we must make the condition
10556 if (GET_CODE (op0
) == LSHIFTRT
)
10557 code
= unsigned_condition (code
);
10559 const_op
<<= INTVAL (XEXP (op0
, 1));
10560 op1
= GEN_INT (const_op
);
10561 op0
= XEXP (op0
, 0);
10565 /* If we are using this shift to extract just the sign bit, we
10566 can replace this with an LT or GE comparison. */
10568 && (equality_comparison_p
|| sign_bit_comparison_p
)
10569 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
10570 && (unsigned HOST_WIDE_INT
) INTVAL (XEXP (op0
, 1))
10573 op0
= XEXP (op0
, 0);
10574 code
= (code
== NE
|| code
== GT
? LT
: GE
);
10586 /* Now make any compound operations involved in this comparison. Then,
10587 check for an outmost SUBREG on OP0 that is not doing anything or is
10588 paradoxical. The latter transformation must only be performed when
10589 it is known that the "extra" bits will be the same in op0 and op1 or
10590 that they don't matter. There are three cases to consider:
10592 1. SUBREG_REG (op0) is a register. In this case the bits are don't
10593 care bits and we can assume they have any convenient value. So
10594 making the transformation is safe.
10596 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10597 In this case the upper bits of op0 are undefined. We should not make
10598 the simplification in that case as we do not know the contents of
10601 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10602 UNKNOWN. In that case we know those bits are zeros or ones. We must
10603 also be sure that they are the same as the upper bits of op1.
10605 We can never remove a SUBREG for a non-equality comparison because
10606 the sign bit is in a different place in the underlying object. */
10608 op0
= make_compound_operation (op0
, op1
== const0_rtx
? COMPARE
: SET
);
10609 op1
= make_compound_operation (op1
, SET
);
10611 if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
10612 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10613 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0
))) == MODE_INT
10614 && (code
== NE
|| code
== EQ
))
10616 if (GET_MODE_SIZE (GET_MODE (op0
))
10617 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))
10619 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
10621 if (REG_P (SUBREG_REG (op0
)))
10623 op0
= SUBREG_REG (op0
);
10624 op1
= gen_lowpart (GET_MODE (op0
), op1
);
10627 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
)))
10628 <= HOST_BITS_PER_WIDE_INT
)
10629 && (nonzero_bits (SUBREG_REG (op0
),
10630 GET_MODE (SUBREG_REG (op0
)))
10631 & ~GET_MODE_MASK (GET_MODE (op0
))) == 0)
10633 tem
= gen_lowpart (GET_MODE (SUBREG_REG (op0
)), op1
);
10635 if ((nonzero_bits (tem
, GET_MODE (SUBREG_REG (op0
)))
10636 & ~GET_MODE_MASK (GET_MODE (op0
))) == 0)
10637 op0
= SUBREG_REG (op0
), op1
= tem
;
10641 /* We now do the opposite procedure: Some machines don't have compare
10642 insns in all modes. If OP0's mode is an integer mode smaller than a
10643 word and we can't do a compare in that mode, see if there is a larger
10644 mode for which we can do the compare. There are a number of cases in
10645 which we can use the wider mode. */
10647 mode
= GET_MODE (op0
);
10648 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
10649 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
10650 && ! have_insn_for (COMPARE
, mode
))
10651 for (tmode
= GET_MODE_WIDER_MODE (mode
);
10653 && GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
);
10654 tmode
= GET_MODE_WIDER_MODE (tmode
))
10655 if (have_insn_for (COMPARE
, tmode
))
10659 /* If the only nonzero bits in OP0 and OP1 are those in the
10660 narrower mode and this is an equality or unsigned comparison,
10661 we can use the wider mode. Similarly for sign-extended
10662 values, in which case it is true for all comparisons. */
10663 zero_extended
= ((code
== EQ
|| code
== NE
10664 || code
== GEU
|| code
== GTU
10665 || code
== LEU
|| code
== LTU
)
10666 && (nonzero_bits (op0
, tmode
)
10667 & ~GET_MODE_MASK (mode
)) == 0
10668 && ((GET_CODE (op1
) == CONST_INT
10669 || (nonzero_bits (op1
, tmode
)
10670 & ~GET_MODE_MASK (mode
)) == 0)));
10673 || ((num_sign_bit_copies (op0
, tmode
)
10674 > (unsigned int) (GET_MODE_BITSIZE (tmode
)
10675 - GET_MODE_BITSIZE (mode
)))
10676 && (num_sign_bit_copies (op1
, tmode
)
10677 > (unsigned int) (GET_MODE_BITSIZE (tmode
)
10678 - GET_MODE_BITSIZE (mode
)))))
10680 /* If OP0 is an AND and we don't have an AND in MODE either,
10681 make a new AND in the proper mode. */
10682 if (GET_CODE (op0
) == AND
10683 && !have_insn_for (AND
, mode
))
10684 op0
= simplify_gen_binary (AND
, tmode
,
10685 gen_lowpart (tmode
,
10687 gen_lowpart (tmode
,
10690 op0
= gen_lowpart (tmode
, op0
);
10691 if (zero_extended
&& GET_CODE (op1
) == CONST_INT
)
10692 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (mode
));
10693 op1
= gen_lowpart (tmode
, op1
);
10697 /* If this is a test for negative, we can make an explicit
10698 test of the sign bit. */
10700 if (op1
== const0_rtx
&& (code
== LT
|| code
== GE
)
10701 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10703 op0
= simplify_gen_binary (AND
, tmode
,
10704 gen_lowpart (tmode
, op0
),
10705 GEN_INT ((HOST_WIDE_INT
) 1
10706 << (GET_MODE_BITSIZE (mode
)
10708 code
= (code
== LT
) ? NE
: EQ
;
10713 #ifdef CANONICALIZE_COMPARISON
10714 /* If this machine only supports a subset of valid comparisons, see if we
10715 can convert an unsupported one into a supported one. */
10716 CANONICALIZE_COMPARISON (code
, op0
, op1
);
10725 /* Utility function for record_value_for_reg. Count number of
10730 enum rtx_code code
= GET_CODE (x
);
10734 if (GET_RTX_CLASS (code
) == '2'
10735 || GET_RTX_CLASS (code
) == 'c')
10737 rtx x0
= XEXP (x
, 0);
10738 rtx x1
= XEXP (x
, 1);
10741 return 1 + 2 * count_rtxs (x0
);
10743 if ((GET_RTX_CLASS (GET_CODE (x1
)) == '2'
10744 || GET_RTX_CLASS (GET_CODE (x1
)) == 'c')
10745 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
10746 return 2 + 2 * count_rtxs (x0
)
10747 + count_rtxs (x
== XEXP (x1
, 0)
10748 ? XEXP (x1
, 1) : XEXP (x1
, 0));
10750 if ((GET_RTX_CLASS (GET_CODE (x0
)) == '2'
10751 || GET_RTX_CLASS (GET_CODE (x0
)) == 'c')
10752 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
10753 return 2 + 2 * count_rtxs (x1
)
10754 + count_rtxs (x
== XEXP (x0
, 0)
10755 ? XEXP (x0
, 1) : XEXP (x0
, 0));
10758 fmt
= GET_RTX_FORMAT (code
);
10759 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
10761 ret
+= count_rtxs (XEXP (x
, i
));
10766 /* Utility function for following routine. Called when X is part of a value
10767 being stored into last_set_value. Sets last_set_table_tick
10768 for each register mentioned. Similar to mention_regs in cse.c */
10771 update_table_tick (rtx x
)
10773 enum rtx_code code
= GET_CODE (x
);
10774 const char *fmt
= GET_RTX_FORMAT (code
);
10779 unsigned int regno
= REGNO (x
);
10780 unsigned int endregno
10781 = regno
+ (regno
< FIRST_PSEUDO_REGISTER
10782 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
10785 for (r
= regno
; r
< endregno
; r
++)
10786 reg_stat
[r
].last_set_table_tick
= label_tick
;
10791 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
10792 /* Note that we can't have an "E" in values stored; see
10793 get_last_value_validate. */
10796 /* Check for identical subexpressions. If x contains
10797 identical subexpression we only have to traverse one of
10799 if (i
== 0 && ARITHMETIC_P (x
))
10801 /* Note that at this point x1 has already been
10803 rtx x0
= XEXP (x
, 0);
10804 rtx x1
= XEXP (x
, 1);
10806 /* If x0 and x1 are identical then there is no need to
10811 /* If x0 is identical to a subexpression of x1 then while
10812 processing x1, x0 has already been processed. Thus we
10813 are done with x. */
10814 if (ARITHMETIC_P (x1
)
10815 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
10818 /* If x1 is identical to a subexpression of x0 then we
10819 still have to process the rest of x0. */
10820 if (ARITHMETIC_P (x0
)
10821 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
10823 update_table_tick (XEXP (x0
, x1
== XEXP (x0
, 0) ? 1 : 0));
10828 update_table_tick (XEXP (x
, i
));
10832 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10833 are saying that the register is clobbered and we no longer know its
10834 value. If INSN is zero, don't update reg_stat[].last_set; this is
10835 only permitted with VALUE also zero and is used to invalidate the
10839 record_value_for_reg (rtx reg
, rtx insn
, rtx value
)
10841 unsigned int regno
= REGNO (reg
);
10842 unsigned int endregno
10843 = regno
+ (regno
< FIRST_PSEUDO_REGISTER
10844 ? hard_regno_nregs
[regno
][GET_MODE (reg
)] : 1);
10847 /* If VALUE contains REG and we have a previous value for REG, substitute
10848 the previous value. */
10849 if (value
&& insn
&& reg_overlap_mentioned_p (reg
, value
))
10853 /* Set things up so get_last_value is allowed to see anything set up to
10855 subst_low_cuid
= INSN_CUID (insn
);
10856 tem
= get_last_value (reg
);
10858 /* If TEM is simply a binary operation with two CLOBBERs as operands,
10859 it isn't going to be useful and will take a lot of time to process,
10860 so just use the CLOBBER. */
10864 if (ARITHMETIC_P (tem
)
10865 && GET_CODE (XEXP (tem
, 0)) == CLOBBER
10866 && GET_CODE (XEXP (tem
, 1)) == CLOBBER
)
10867 tem
= XEXP (tem
, 0);
10868 else if (count_occurrences (value
, reg
, 1) >= 2)
10870 /* If there are two or more occurrences of REG in VALUE,
10871 prevent the value from growing too much. */
10872 if (count_rtxs (tem
) > MAX_LAST_VALUE_RTL
)
10873 tem
= gen_rtx_CLOBBER (GET_MODE (tem
), const0_rtx
);
10876 value
= replace_rtx (copy_rtx (value
), reg
, tem
);
10880 /* For each register modified, show we don't know its value, that
10881 we don't know about its bitwise content, that its value has been
10882 updated, and that we don't know the location of the death of the
10884 for (i
= regno
; i
< endregno
; i
++)
10887 reg_stat
[i
].last_set
= insn
;
10889 reg_stat
[i
].last_set_value
= 0;
10890 reg_stat
[i
].last_set_mode
= 0;
10891 reg_stat
[i
].last_set_nonzero_bits
= 0;
10892 reg_stat
[i
].last_set_sign_bit_copies
= 0;
10893 reg_stat
[i
].last_death
= 0;
10894 reg_stat
[i
].truncated_to_mode
= 0;
10897 /* Mark registers that are being referenced in this value. */
10899 update_table_tick (value
);
10901 /* Now update the status of each register being set.
10902 If someone is using this register in this block, set this register
10903 to invalid since we will get confused between the two lives in this
10904 basic block. This makes using this register always invalid. In cse, we
10905 scan the table to invalidate all entries using this register, but this
10906 is too much work for us. */
10908 for (i
= regno
; i
< endregno
; i
++)
10910 reg_stat
[i
].last_set_label
= label_tick
;
10911 if (!insn
|| (value
&& reg_stat
[i
].last_set_table_tick
== label_tick
))
10912 reg_stat
[i
].last_set_invalid
= 1;
10914 reg_stat
[i
].last_set_invalid
= 0;
10917 /* The value being assigned might refer to X (like in "x++;"). In that
10918 case, we must replace it with (clobber (const_int 0)) to prevent
10920 if (value
&& ! get_last_value_validate (&value
, insn
,
10921 reg_stat
[regno
].last_set_label
, 0))
10923 value
= copy_rtx (value
);
10924 if (! get_last_value_validate (&value
, insn
,
10925 reg_stat
[regno
].last_set_label
, 1))
10929 /* For the main register being modified, update the value, the mode, the
10930 nonzero bits, and the number of sign bit copies. */
10932 reg_stat
[regno
].last_set_value
= value
;
10936 enum machine_mode mode
= GET_MODE (reg
);
10937 subst_low_cuid
= INSN_CUID (insn
);
10938 reg_stat
[regno
].last_set_mode
= mode
;
10939 if (GET_MODE_CLASS (mode
) == MODE_INT
10940 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
10941 mode
= nonzero_bits_mode
;
10942 reg_stat
[regno
].last_set_nonzero_bits
= nonzero_bits (value
, mode
);
10943 reg_stat
[regno
].last_set_sign_bit_copies
10944 = num_sign_bit_copies (value
, GET_MODE (reg
));
10948 /* Called via note_stores from record_dead_and_set_regs to handle one
10949 SET or CLOBBER in an insn. DATA is the instruction in which the
10950 set is occurring. */
10953 record_dead_and_set_regs_1 (rtx dest
, rtx setter
, void *data
)
10955 rtx record_dead_insn
= (rtx
) data
;
10957 if (GET_CODE (dest
) == SUBREG
)
10958 dest
= SUBREG_REG (dest
);
10960 if (!record_dead_insn
)
10963 record_value_for_reg (dest
, NULL_RTX
, NULL_RTX
);
10969 /* If we are setting the whole register, we know its value. Otherwise
10970 show that we don't know the value. We can handle SUBREG in
10972 if (GET_CODE (setter
) == SET
&& dest
== SET_DEST (setter
))
10973 record_value_for_reg (dest
, record_dead_insn
, SET_SRC (setter
));
10974 else if (GET_CODE (setter
) == SET
10975 && GET_CODE (SET_DEST (setter
)) == SUBREG
10976 && SUBREG_REG (SET_DEST (setter
)) == dest
10977 && GET_MODE_BITSIZE (GET_MODE (dest
)) <= BITS_PER_WORD
10978 && subreg_lowpart_p (SET_DEST (setter
)))
10979 record_value_for_reg (dest
, record_dead_insn
,
10980 gen_lowpart (GET_MODE (dest
),
10981 SET_SRC (setter
)));
10983 record_value_for_reg (dest
, record_dead_insn
, NULL_RTX
);
10985 else if (MEM_P (dest
)
10986 /* Ignore pushes, they clobber nothing. */
10987 && ! push_operand (dest
, GET_MODE (dest
)))
10988 mem_last_set
= INSN_CUID (record_dead_insn
);
10991 /* Update the records of when each REG was most recently set or killed
10992 for the things done by INSN. This is the last thing done in processing
10993 INSN in the combiner loop.
10995 We update reg_stat[], in particular fields last_set, last_set_value,
10996 last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
10997 last_death, and also the similar information mem_last_set (which insn
10998 most recently modified memory) and last_call_cuid (which insn was the
10999 most recent subroutine call). */
11002 record_dead_and_set_regs (rtx insn
)
11007 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
11009 if (REG_NOTE_KIND (link
) == REG_DEAD
11010 && REG_P (XEXP (link
, 0)))
11012 unsigned int regno
= REGNO (XEXP (link
, 0));
11013 unsigned int endregno
11014 = regno
+ (regno
< FIRST_PSEUDO_REGISTER
11015 ? hard_regno_nregs
[regno
][GET_MODE (XEXP (link
, 0))]
11018 for (i
= regno
; i
< endregno
; i
++)
11019 reg_stat
[i
].last_death
= insn
;
11021 else if (REG_NOTE_KIND (link
) == REG_INC
)
11022 record_value_for_reg (XEXP (link
, 0), insn
, NULL_RTX
);
11027 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
11028 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
11030 reg_stat
[i
].last_set_value
= 0;
11031 reg_stat
[i
].last_set_mode
= 0;
11032 reg_stat
[i
].last_set_nonzero_bits
= 0;
11033 reg_stat
[i
].last_set_sign_bit_copies
= 0;
11034 reg_stat
[i
].last_death
= 0;
11035 reg_stat
[i
].truncated_to_mode
= 0;
11038 last_call_cuid
= mem_last_set
= INSN_CUID (insn
);
11040 /* We can't combine into a call pattern. Remember, though, that
11041 the return value register is set at this CUID. We could
11042 still replace a register with the return value from the
11043 wrong subroutine call! */
11044 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
, NULL_RTX
);
11047 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
, insn
);
11050 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11051 register present in the SUBREG, so for each such SUBREG go back and
11052 adjust nonzero and sign bit information of the registers that are
11053 known to have some zero/sign bits set.
11055 This is needed because when combine blows the SUBREGs away, the
11056 information on zero/sign bits is lost and further combines can be
11057 missed because of that. */
11060 record_promoted_value (rtx insn
, rtx subreg
)
11063 unsigned int regno
= REGNO (SUBREG_REG (subreg
));
11064 enum machine_mode mode
= GET_MODE (subreg
);
11066 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
11069 for (links
= LOG_LINKS (insn
); links
;)
11071 insn
= XEXP (links
, 0);
11072 set
= single_set (insn
);
11074 if (! set
|| !REG_P (SET_DEST (set
))
11075 || REGNO (SET_DEST (set
)) != regno
11076 || GET_MODE (SET_DEST (set
)) != GET_MODE (SUBREG_REG (subreg
)))
11078 links
= XEXP (links
, 1);
11082 if (reg_stat
[regno
].last_set
== insn
)
11084 if (SUBREG_PROMOTED_UNSIGNED_P (subreg
) > 0)
11085 reg_stat
[regno
].last_set_nonzero_bits
&= GET_MODE_MASK (mode
);
11088 if (REG_P (SET_SRC (set
)))
11090 regno
= REGNO (SET_SRC (set
));
11091 links
= LOG_LINKS (insn
);
11098 /* Check if X, a register, is known to contain a value already
11099 truncated to MODE. In this case we can use a subreg to refer to
11100 the truncated value even though in the generic case we would need
11101 an explicit truncation. */
11104 reg_truncated_to_mode (enum machine_mode mode
, rtx x
)
11106 enum machine_mode truncated
= reg_stat
[REGNO (x
)].truncated_to_mode
;
11108 if (truncated
== 0 || reg_stat
[REGNO (x
)].truncation_label
!= label_tick
)
11110 if (GET_MODE_SIZE (truncated
) <= GET_MODE_SIZE (mode
))
11112 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
11113 GET_MODE_BITSIZE (truncated
)))
11118 /* X is a REG or a SUBREG. If X is some sort of a truncation record
11119 it. For non-TRULY_NOOP_TRUNCATION targets we might be able to turn
11120 a truncate into a subreg using this information. */
11123 record_truncated_value (rtx x
)
11125 enum machine_mode truncated_mode
;
11127 if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
)))
11129 enum machine_mode original_mode
= GET_MODE (SUBREG_REG (x
));
11130 truncated_mode
= GET_MODE (x
);
11132 if (GET_MODE_SIZE (original_mode
) <= GET_MODE_SIZE (truncated_mode
))
11135 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode
),
11136 GET_MODE_BITSIZE (original_mode
)))
11139 x
= SUBREG_REG (x
);
11141 /* ??? For hard-regs we now record everything. We might be able to
11142 optimize this using last_set_mode. */
11143 else if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
11144 truncated_mode
= GET_MODE (x
);
11148 if (reg_stat
[REGNO (x
)].truncated_to_mode
== 0
11149 || reg_stat
[REGNO (x
)].truncation_label
< label_tick
11150 || (GET_MODE_SIZE (truncated_mode
)
11151 < GET_MODE_SIZE (reg_stat
[REGNO (x
)].truncated_to_mode
)))
11153 reg_stat
[REGNO (x
)].truncated_to_mode
= truncated_mode
;
11154 reg_stat
[REGNO (x
)].truncation_label
= label_tick
;
11158 /* Scan X for promoted SUBREGs and truncated REGs. For each one
11159 found, note what it implies to the registers used in it. */
11162 check_conversions (rtx insn
, rtx x
)
11164 if (GET_CODE (x
) == SUBREG
|| REG_P (x
))
11166 if (GET_CODE (x
) == SUBREG
11167 && SUBREG_PROMOTED_VAR_P (x
)
11168 && REG_P (SUBREG_REG (x
)))
11169 record_promoted_value (insn
, x
);
11171 record_truncated_value (x
);
11175 const char *format
= GET_RTX_FORMAT (GET_CODE (x
));
11178 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
11182 check_conversions (insn
, XEXP (x
, i
));
11186 if (XVEC (x
, i
) != 0)
11187 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
11188 check_conversions (insn
, XVECEXP (x
, i
, j
));
11194 /* Utility routine for the following function. Verify that all the registers
11195 mentioned in *LOC are valid when *LOC was part of a value set when
11196 label_tick == TICK. Return 0 if some are not.
11198 If REPLACE is nonzero, replace the invalid reference with
11199 (clobber (const_int 0)) and return 1. This replacement is useful because
11200 we often can get useful information about the form of a value (e.g., if
11201 it was produced by a shift that always produces -1 or 0) even though
11202 we don't know exactly what registers it was produced from. */
11205 get_last_value_validate (rtx
*loc
, rtx insn
, int tick
, int replace
)
11208 const char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
11209 int len
= GET_RTX_LENGTH (GET_CODE (x
));
11214 unsigned int regno
= REGNO (x
);
11215 unsigned int endregno
11216 = regno
+ (regno
< FIRST_PSEUDO_REGISTER
11217 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
11220 for (j
= regno
; j
< endregno
; j
++)
11221 if (reg_stat
[j
].last_set_invalid
11222 /* If this is a pseudo-register that was only set once and not
11223 live at the beginning of the function, it is always valid. */
11224 || (! (regno
>= FIRST_PSEUDO_REGISTER
11225 && REG_N_SETS (regno
) == 1
11226 && (! REGNO_REG_SET_P
11227 (ENTRY_BLOCK_PTR
->next_bb
->il
.rtl
->global_live_at_start
,
11229 && reg_stat
[j
].last_set_label
> tick
))
11232 *loc
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
11238 /* If this is a memory reference, make sure that there were
11239 no stores after it that might have clobbered the value. We don't
11240 have alias info, so we assume any store invalidates it. */
11241 else if (MEM_P (x
) && !MEM_READONLY_P (x
)
11242 && INSN_CUID (insn
) <= mem_last_set
)
11245 *loc
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
11249 for (i
= 0; i
< len
; i
++)
11253 /* Check for identical subexpressions. If x contains
11254 identical subexpression we only have to traverse one of
11256 if (i
== 1 && ARITHMETIC_P (x
))
11258 /* Note that at this point x0 has already been checked
11259 and found valid. */
11260 rtx x0
= XEXP (x
, 0);
11261 rtx x1
= XEXP (x
, 1);
11263 /* If x0 and x1 are identical then x is also valid. */
11267 /* If x1 is identical to a subexpression of x0 then
11268 while checking x0, x1 has already been checked. Thus
11269 it is valid and so as x. */
11270 if (ARITHMETIC_P (x0
)
11271 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
11274 /* If x0 is identical to a subexpression of x1 then x is
11275 valid iff the rest of x1 is valid. */
11276 if (ARITHMETIC_P (x1
)
11277 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
11279 get_last_value_validate (&XEXP (x1
,
11280 x0
== XEXP (x1
, 0) ? 1 : 0),
11281 insn
, tick
, replace
);
11284 if (get_last_value_validate (&XEXP (x
, i
), insn
, tick
,
11288 /* Don't bother with these. They shouldn't occur anyway. */
11289 else if (fmt
[i
] == 'E')
11293 /* If we haven't found a reason for it to be invalid, it is valid. */
11297 /* Get the last value assigned to X, if known. Some registers
11298 in the value may be replaced with (clobber (const_int 0)) if their value
11299 is known longer known reliably. */
11302 get_last_value (rtx x
)
11304 unsigned int regno
;
11307 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11308 then convert it to the desired mode. If this is a paradoxical SUBREG,
11309 we cannot predict what values the "extra" bits might have. */
11310 if (GET_CODE (x
) == SUBREG
11311 && subreg_lowpart_p (x
)
11312 && (GET_MODE_SIZE (GET_MODE (x
))
11313 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
11314 && (value
= get_last_value (SUBREG_REG (x
))) != 0)
11315 return gen_lowpart (GET_MODE (x
), value
);
11321 value
= reg_stat
[regno
].last_set_value
;
11323 /* If we don't have a value, or if it isn't for this basic block and
11324 it's either a hard register, set more than once, or it's a live
11325 at the beginning of the function, return 0.
11327 Because if it's not live at the beginning of the function then the reg
11328 is always set before being used (is never used without being set).
11329 And, if it's set only once, and it's always set before use, then all
11330 uses must have the same last value, even if it's not from this basic
11334 || (reg_stat
[regno
].last_set_label
!= label_tick
11335 && (regno
< FIRST_PSEUDO_REGISTER
11336 || REG_N_SETS (regno
) != 1
11337 || (REGNO_REG_SET_P
11338 (ENTRY_BLOCK_PTR
->next_bb
->il
.rtl
->global_live_at_start
,
11342 /* If the value was set in a later insn than the ones we are processing,
11343 we can't use it even if the register was only set once. */
11344 if (INSN_CUID (reg_stat
[regno
].last_set
) >= subst_low_cuid
)
11347 /* If the value has all its registers valid, return it. */
11348 if (get_last_value_validate (&value
, reg_stat
[regno
].last_set
,
11349 reg_stat
[regno
].last_set_label
, 0))
11352 /* Otherwise, make a copy and replace any invalid register with
11353 (clobber (const_int 0)). If that fails for some reason, return 0. */
11355 value
= copy_rtx (value
);
11356 if (get_last_value_validate (&value
, reg_stat
[regno
].last_set
,
11357 reg_stat
[regno
].last_set_label
, 1))
11363 /* Return nonzero if expression X refers to a REG or to memory
11364 that is set in an instruction more recent than FROM_CUID. */
11367 use_crosses_set_p (rtx x
, int from_cuid
)
11371 enum rtx_code code
= GET_CODE (x
);
11375 unsigned int regno
= REGNO (x
);
11376 unsigned endreg
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
11377 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
11379 #ifdef PUSH_ROUNDING
11380 /* Don't allow uses of the stack pointer to be moved,
11381 because we don't know whether the move crosses a push insn. */
11382 if (regno
== STACK_POINTER_REGNUM
&& PUSH_ARGS
)
11385 for (; regno
< endreg
; regno
++)
11386 if (reg_stat
[regno
].last_set
11387 && INSN_CUID (reg_stat
[regno
].last_set
) > from_cuid
)
11392 if (code
== MEM
&& mem_last_set
> from_cuid
)
11395 fmt
= GET_RTX_FORMAT (code
);
11397 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
11402 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
11403 if (use_crosses_set_p (XVECEXP (x
, i
, j
), from_cuid
))
11406 else if (fmt
[i
] == 'e'
11407 && use_crosses_set_p (XEXP (x
, i
), from_cuid
))
11413 /* Define three variables used for communication between the following
11416 static unsigned int reg_dead_regno
, reg_dead_endregno
;
11417 static int reg_dead_flag
;
11419 /* Function called via note_stores from reg_dead_at_p.
11421 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11422 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11425 reg_dead_at_p_1 (rtx dest
, rtx x
, void *data ATTRIBUTE_UNUSED
)
11427 unsigned int regno
, endregno
;
11432 regno
= REGNO (dest
);
11433 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
11434 ? hard_regno_nregs
[regno
][GET_MODE (dest
)] : 1);
11436 if (reg_dead_endregno
> regno
&& reg_dead_regno
< endregno
)
11437 reg_dead_flag
= (GET_CODE (x
) == CLOBBER
) ? 1 : -1;
11440 /* Return nonzero if REG is known to be dead at INSN.
11442 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11443 referencing REG, it is dead. If we hit a SET referencing REG, it is
11444 live. Otherwise, see if it is live or dead at the start of the basic
11445 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11446 must be assumed to be always live. */
11449 reg_dead_at_p (rtx reg
, rtx insn
)
11454 /* Set variables for reg_dead_at_p_1. */
11455 reg_dead_regno
= REGNO (reg
);
11456 reg_dead_endregno
= reg_dead_regno
+ (reg_dead_regno
< FIRST_PSEUDO_REGISTER
11457 ? hard_regno_nregs
[reg_dead_regno
]
11463 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
11464 we allow the machine description to decide whether use-and-clobber
11465 patterns are OK. */
11466 if (reg_dead_regno
< FIRST_PSEUDO_REGISTER
)
11468 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
11469 if (!fixed_regs
[i
] && TEST_HARD_REG_BIT (newpat_used_regs
, i
))
11473 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11474 beginning of function. */
11475 for (; insn
&& !LABEL_P (insn
) && !BARRIER_P (insn
);
11476 insn
= prev_nonnote_insn (insn
))
11478 note_stores (PATTERN (insn
), reg_dead_at_p_1
, NULL
);
11480 return reg_dead_flag
== 1 ? 1 : 0;
11482 if (find_regno_note (insn
, REG_DEAD
, reg_dead_regno
))
11486 /* Get the basic block that we were in. */
11488 block
= ENTRY_BLOCK_PTR
->next_bb
;
11491 FOR_EACH_BB (block
)
11492 if (insn
== BB_HEAD (block
))
11495 if (block
== EXIT_BLOCK_PTR
)
11499 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
11500 if (REGNO_REG_SET_P (block
->il
.rtl
->global_live_at_start
, i
))
11506 /* Note hard registers in X that are used. This code is similar to
11507 that in flow.c, but much simpler since we don't care about pseudos. */
11510 mark_used_regs_combine (rtx x
)
11512 RTX_CODE code
= GET_CODE (x
);
11513 unsigned int regno
;
11526 case ADDR_DIFF_VEC
:
11529 /* CC0 must die in the insn after it is set, so we don't need to take
11530 special note of it here. */
11536 /* If we are clobbering a MEM, mark any hard registers inside the
11537 address as used. */
11538 if (MEM_P (XEXP (x
, 0)))
11539 mark_used_regs_combine (XEXP (XEXP (x
, 0), 0));
11544 /* A hard reg in a wide mode may really be multiple registers.
11545 If so, mark all of them just like the first. */
11546 if (regno
< FIRST_PSEUDO_REGISTER
)
11548 unsigned int endregno
, r
;
11550 /* None of this applies to the stack, frame or arg pointers. */
11551 if (regno
== STACK_POINTER_REGNUM
11552 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11553 || regno
== HARD_FRAME_POINTER_REGNUM
11555 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11556 || (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
11558 || regno
== FRAME_POINTER_REGNUM
)
11561 endregno
= regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)];
11562 for (r
= regno
; r
< endregno
; r
++)
11563 SET_HARD_REG_BIT (newpat_used_regs
, r
);
11569 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11571 rtx testreg
= SET_DEST (x
);
11573 while (GET_CODE (testreg
) == SUBREG
11574 || GET_CODE (testreg
) == ZERO_EXTRACT
11575 || GET_CODE (testreg
) == STRICT_LOW_PART
)
11576 testreg
= XEXP (testreg
, 0);
11578 if (MEM_P (testreg
))
11579 mark_used_regs_combine (XEXP (testreg
, 0));
11581 mark_used_regs_combine (SET_SRC (x
));
11589 /* Recursively scan the operands of this expression. */
11592 const char *fmt
= GET_RTX_FORMAT (code
);
11594 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
11597 mark_used_regs_combine (XEXP (x
, i
));
11598 else if (fmt
[i
] == 'E')
11602 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
11603 mark_used_regs_combine (XVECEXP (x
, i
, j
));
11609 /* Remove register number REGNO from the dead registers list of INSN.
11611 Return the note used to record the death, if there was one. */
11614 remove_death (unsigned int regno
, rtx insn
)
11616 rtx note
= find_regno_note (insn
, REG_DEAD
, regno
);
11620 REG_N_DEATHS (regno
)--;
11621 remove_note (insn
, note
);
11627 /* For each register (hardware or pseudo) used within expression X, if its
11628 death is in an instruction with cuid between FROM_CUID (inclusive) and
11629 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11630 list headed by PNOTES.
11632 That said, don't move registers killed by maybe_kill_insn.
11634 This is done when X is being merged by combination into TO_INSN. These
11635 notes will then be distributed as needed. */
11638 move_deaths (rtx x
, rtx maybe_kill_insn
, int from_cuid
, rtx to_insn
,
11643 enum rtx_code code
= GET_CODE (x
);
11647 unsigned int regno
= REGNO (x
);
11648 rtx where_dead
= reg_stat
[regno
].last_death
;
11649 rtx before_dead
, after_dead
;
11651 /* Don't move the register if it gets killed in between from and to. */
11652 if (maybe_kill_insn
&& reg_set_p (x
, maybe_kill_insn
)
11653 && ! reg_referenced_p (x
, maybe_kill_insn
))
11656 /* WHERE_DEAD could be a USE insn made by combine, so first we
11657 make sure that we have insns with valid INSN_CUID values. */
11658 before_dead
= where_dead
;
11659 while (before_dead
&& INSN_UID (before_dead
) > max_uid_cuid
)
11660 before_dead
= PREV_INSN (before_dead
);
11662 after_dead
= where_dead
;
11663 while (after_dead
&& INSN_UID (after_dead
) > max_uid_cuid
)
11664 after_dead
= NEXT_INSN (after_dead
);
11666 if (before_dead
&& after_dead
11667 && INSN_CUID (before_dead
) >= from_cuid
11668 && (INSN_CUID (after_dead
) < INSN_CUID (to_insn
)
11669 || (where_dead
!= after_dead
11670 && INSN_CUID (after_dead
) == INSN_CUID (to_insn
))))
11672 rtx note
= remove_death (regno
, where_dead
);
11674 /* It is possible for the call above to return 0. This can occur
11675 when last_death points to I2 or I1 that we combined with.
11676 In that case make a new note.
11678 We must also check for the case where X is a hard register
11679 and NOTE is a death note for a range of hard registers
11680 including X. In that case, we must put REG_DEAD notes for
11681 the remaining registers in place of NOTE. */
11683 if (note
!= 0 && regno
< FIRST_PSEUDO_REGISTER
11684 && (GET_MODE_SIZE (GET_MODE (XEXP (note
, 0)))
11685 > GET_MODE_SIZE (GET_MODE (x
))))
11687 unsigned int deadregno
= REGNO (XEXP (note
, 0));
11688 unsigned int deadend
11689 = (deadregno
+ hard_regno_nregs
[deadregno
]
11690 [GET_MODE (XEXP (note
, 0))]);
11691 unsigned int ourend
11692 = regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)];
11695 for (i
= deadregno
; i
< deadend
; i
++)
11696 if (i
< regno
|| i
>= ourend
)
11697 REG_NOTES (where_dead
)
11698 = gen_rtx_EXPR_LIST (REG_DEAD
,
11700 REG_NOTES (where_dead
));
11703 /* If we didn't find any note, or if we found a REG_DEAD note that
11704 covers only part of the given reg, and we have a multi-reg hard
11705 register, then to be safe we must check for REG_DEAD notes
11706 for each register other than the first. They could have
11707 their own REG_DEAD notes lying around. */
11708 else if ((note
== 0
11710 && (GET_MODE_SIZE (GET_MODE (XEXP (note
, 0)))
11711 < GET_MODE_SIZE (GET_MODE (x
)))))
11712 && regno
< FIRST_PSEUDO_REGISTER
11713 && hard_regno_nregs
[regno
][GET_MODE (x
)] > 1)
11715 unsigned int ourend
11716 = regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)];
11717 unsigned int i
, offset
;
11721 offset
= hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))];
11725 for (i
= regno
+ offset
; i
< ourend
; i
++)
11726 move_deaths (regno_reg_rtx
[i
],
11727 maybe_kill_insn
, from_cuid
, to_insn
, &oldnotes
);
11730 if (note
!= 0 && GET_MODE (XEXP (note
, 0)) == GET_MODE (x
))
11732 XEXP (note
, 1) = *pnotes
;
11736 *pnotes
= gen_rtx_EXPR_LIST (REG_DEAD
, x
, *pnotes
);
11738 REG_N_DEATHS (regno
)++;
11744 else if (GET_CODE (x
) == SET
)
11746 rtx dest
= SET_DEST (x
);
11748 move_deaths (SET_SRC (x
), maybe_kill_insn
, from_cuid
, to_insn
, pnotes
);
11750 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11751 that accesses one word of a multi-word item, some
11752 piece of everything register in the expression is used by
11753 this insn, so remove any old death. */
11754 /* ??? So why do we test for equality of the sizes? */
11756 if (GET_CODE (dest
) == ZERO_EXTRACT
11757 || GET_CODE (dest
) == STRICT_LOW_PART
11758 || (GET_CODE (dest
) == SUBREG
11759 && (((GET_MODE_SIZE (GET_MODE (dest
))
11760 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
11761 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
11762 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))))
11764 move_deaths (dest
, maybe_kill_insn
, from_cuid
, to_insn
, pnotes
);
11768 /* If this is some other SUBREG, we know it replaces the entire
11769 value, so use that as the destination. */
11770 if (GET_CODE (dest
) == SUBREG
)
11771 dest
= SUBREG_REG (dest
);
11773 /* If this is a MEM, adjust deaths of anything used in the address.
11774 For a REG (the only other possibility), the entire value is
11775 being replaced so the old value is not used in this insn. */
11778 move_deaths (XEXP (dest
, 0), maybe_kill_insn
, from_cuid
,
11783 else if (GET_CODE (x
) == CLOBBER
)
11786 len
= GET_RTX_LENGTH (code
);
11787 fmt
= GET_RTX_FORMAT (code
);
11789 for (i
= 0; i
< len
; i
++)
11794 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
11795 move_deaths (XVECEXP (x
, i
, j
), maybe_kill_insn
, from_cuid
,
11798 else if (fmt
[i
] == 'e')
11799 move_deaths (XEXP (x
, i
), maybe_kill_insn
, from_cuid
, to_insn
, pnotes
);
11803 /* Return 1 if X is the target of a bit-field assignment in BODY, the
11804 pattern of an insn. X must be a REG. */
11807 reg_bitfield_target_p (rtx x
, rtx body
)
11811 if (GET_CODE (body
) == SET
)
11813 rtx dest
= SET_DEST (body
);
11815 unsigned int regno
, tregno
, endregno
, endtregno
;
11817 if (GET_CODE (dest
) == ZERO_EXTRACT
)
11818 target
= XEXP (dest
, 0);
11819 else if (GET_CODE (dest
) == STRICT_LOW_PART
)
11820 target
= SUBREG_REG (XEXP (dest
, 0));
11824 if (GET_CODE (target
) == SUBREG
)
11825 target
= SUBREG_REG (target
);
11827 if (!REG_P (target
))
11830 tregno
= REGNO (target
), regno
= REGNO (x
);
11831 if (tregno
>= FIRST_PSEUDO_REGISTER
|| regno
>= FIRST_PSEUDO_REGISTER
)
11832 return target
== x
;
11834 endtregno
= tregno
+ hard_regno_nregs
[tregno
][GET_MODE (target
)];
11835 endregno
= regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)];
11837 return endregno
> tregno
&& regno
< endtregno
;
11840 else if (GET_CODE (body
) == PARALLEL
)
11841 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
11842 if (reg_bitfield_target_p (x
, XVECEXP (body
, 0, i
)))
11848 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11849 as appropriate. I3 and I2 are the insns resulting from the combination
11850 insns including FROM (I2 may be zero).
11852 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11853 not need REG_DEAD notes because they are being substituted for. This
11854 saves searching in the most common cases.
11856 Each note in the list is either ignored or placed on some insns, depending
11857 on the type of note. */
11860 distribute_notes (rtx notes
, rtx from_insn
, rtx i3
, rtx i2
, rtx elim_i2
,
11863 rtx note
, next_note
;
11866 for (note
= notes
; note
; note
= next_note
)
11868 rtx place
= 0, place2
= 0;
11870 next_note
= XEXP (note
, 1);
11871 switch (REG_NOTE_KIND (note
))
11875 /* Doesn't matter much where we put this, as long as it's somewhere.
11876 It is preferable to keep these notes on branches, which is most
11877 likely to be i3. */
11881 case REG_VALUE_PROFILE
:
11882 /* Just get rid of this note, as it is unused later anyway. */
11885 case REG_NON_LOCAL_GOTO
:
11890 gcc_assert (i2
&& JUMP_P (i2
));
11895 case REG_EH_REGION
:
11896 /* These notes must remain with the call or trapping instruction. */
11899 else if (i2
&& CALL_P (i2
))
11903 gcc_assert (flag_non_call_exceptions
);
11904 if (may_trap_p (i3
))
11906 else if (i2
&& may_trap_p (i2
))
11908 /* ??? Otherwise assume we've combined things such that we
11909 can now prove that the instructions can't trap. Drop the
11910 note in this case. */
11916 /* These notes must remain with the call. It should not be
11917 possible for both I2 and I3 to be a call. */
11922 gcc_assert (i2
&& CALL_P (i2
));
11928 /* Any clobbers for i3 may still exist, and so we must process
11929 REG_UNUSED notes from that insn.
11931 Any clobbers from i2 or i1 can only exist if they were added by
11932 recog_for_combine. In that case, recog_for_combine created the
11933 necessary REG_UNUSED notes. Trying to keep any original
11934 REG_UNUSED notes from these insns can cause incorrect output
11935 if it is for the same register as the original i3 dest.
11936 In that case, we will notice that the register is set in i3,
11937 and then add a REG_UNUSED note for the destination of i3, which
11938 is wrong. However, it is possible to have REG_UNUSED notes from
11939 i2 or i1 for register which were both used and clobbered, so
11940 we keep notes from i2 or i1 if they will turn into REG_DEAD
11943 /* If this register is set or clobbered in I3, put the note there
11944 unless there is one already. */
11945 if (reg_set_p (XEXP (note
, 0), PATTERN (i3
)))
11947 if (from_insn
!= i3
)
11950 if (! (REG_P (XEXP (note
, 0))
11951 ? find_regno_note (i3
, REG_UNUSED
, REGNO (XEXP (note
, 0)))
11952 : find_reg_note (i3
, REG_UNUSED
, XEXP (note
, 0))))
11955 /* Otherwise, if this register is used by I3, then this register
11956 now dies here, so we must put a REG_DEAD note here unless there
11958 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
))
11959 && ! (REG_P (XEXP (note
, 0))
11960 ? find_regno_note (i3
, REG_DEAD
,
11961 REGNO (XEXP (note
, 0)))
11962 : find_reg_note (i3
, REG_DEAD
, XEXP (note
, 0))))
11964 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
11972 /* These notes say something about results of an insn. We can
11973 only support them if they used to be on I3 in which case they
11974 remain on I3. Otherwise they are ignored.
11976 If the note refers to an expression that is not a constant, we
11977 must also ignore the note since we cannot tell whether the
11978 equivalence is still true. It might be possible to do
11979 slightly better than this (we only have a problem if I2DEST
11980 or I1DEST is present in the expression), but it doesn't
11981 seem worth the trouble. */
11983 if (from_insn
== i3
11984 && (XEXP (note
, 0) == 0 || CONSTANT_P (XEXP (note
, 0))))
11989 case REG_NO_CONFLICT
:
11990 /* These notes say something about how a register is used. They must
11991 be present on any use of the register in I2 or I3. */
11992 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
)))
11995 if (i2
&& reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
)))
12005 /* This can show up in several ways -- either directly in the
12006 pattern, or hidden off in the constant pool with (or without?)
12007 a REG_EQUAL note. */
12008 /* ??? Ignore the without-reg_equal-note problem for now. */
12009 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
))
12010 || ((tem
= find_reg_note (i3
, REG_EQUAL
, NULL_RTX
))
12011 && GET_CODE (XEXP (tem
, 0)) == LABEL_REF
12012 && XEXP (XEXP (tem
, 0), 0) == XEXP (note
, 0)))
12016 && (reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
))
12017 || ((tem
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
))
12018 && GET_CODE (XEXP (tem
, 0)) == LABEL_REF
12019 && XEXP (XEXP (tem
, 0), 0) == XEXP (note
, 0))))
12027 /* Don't attach REG_LABEL note to a JUMP_INSN. Add
12028 a JUMP_LABEL instead or decrement LABEL_NUSES. */
12029 if (place
&& JUMP_P (place
))
12031 rtx label
= JUMP_LABEL (place
);
12034 JUMP_LABEL (place
) = XEXP (note
, 0);
12037 gcc_assert (label
== XEXP (note
, 0));
12038 if (LABEL_P (label
))
12039 LABEL_NUSES (label
)--;
12043 if (place2
&& JUMP_P (place2
))
12045 rtx label
= JUMP_LABEL (place2
);
12048 JUMP_LABEL (place2
) = XEXP (note
, 0);
12051 gcc_assert (label
== XEXP (note
, 0));
12052 if (LABEL_P (label
))
12053 LABEL_NUSES (label
)--;
12060 /* This note says something about the value of a register prior
12061 to the execution of an insn. It is too much trouble to see
12062 if the note is still correct in all situations. It is better
12063 to simply delete it. */
12067 /* If the insn previously containing this note still exists,
12068 put it back where it was. Otherwise move it to the previous
12069 insn. Adjust the corresponding REG_LIBCALL note. */
12070 if (!NOTE_P (from_insn
))
12074 tem
= find_reg_note (XEXP (note
, 0), REG_LIBCALL
, NULL_RTX
);
12075 place
= prev_real_insn (from_insn
);
12077 XEXP (tem
, 0) = place
;
12078 /* If we're deleting the last remaining instruction of a
12079 libcall sequence, don't add the notes. */
12080 else if (XEXP (note
, 0) == from_insn
)
12082 /* Don't add the dangling REG_RETVAL note. */
12089 /* This is handled similarly to REG_RETVAL. */
12090 if (!NOTE_P (from_insn
))
12094 tem
= find_reg_note (XEXP (note
, 0), REG_RETVAL
, NULL_RTX
);
12095 place
= next_real_insn (from_insn
);
12097 XEXP (tem
, 0) = place
;
12098 /* If we're deleting the last remaining instruction of a
12099 libcall sequence, don't add the notes. */
12100 else if (XEXP (note
, 0) == from_insn
)
12102 /* Don't add the dangling REG_LIBCALL note. */
12109 /* If we replaced the right hand side of FROM_INSN with a
12110 REG_EQUAL note, the original use of the dying register
12111 will not have been combined into I3 and I2. In such cases,
12112 FROM_INSN is guaranteed to be the first of the combined
12113 instructions, so we simply need to search back before
12114 FROM_INSN for the previous use or set of this register,
12115 then alter the notes there appropriately.
12117 If the register is used as an input in I3, it dies there.
12118 Similarly for I2, if it is nonzero and adjacent to I3.
12120 If the register is not used as an input in either I3 or I2
12121 and it is not one of the registers we were supposed to eliminate,
12122 there are two possibilities. We might have a non-adjacent I2
12123 or we might have somehow eliminated an additional register
12124 from a computation. For example, we might have had A & B where
12125 we discover that B will always be zero. In this case we will
12126 eliminate the reference to A.
12128 In both cases, we must search to see if we can find a previous
12129 use of A and put the death note there. */
12132 && from_insn
== replaced_rhs_insn
12133 && !reg_overlap_mentioned_p (XEXP (note
, 0), replaced_rhs_value
))
12138 && CALL_P (from_insn
)
12139 && find_reg_fusage (from_insn
, USE
, XEXP (note
, 0)))
12141 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
)))
12143 else if (i2
!= 0 && next_nonnote_insn (i2
) == i3
12144 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
12146 else if (rtx_equal_p (XEXP (note
, 0), elim_i2
)
12147 || rtx_equal_p (XEXP (note
, 0), elim_i1
))
12154 basic_block bb
= this_basic_block
;
12156 for (tem
= PREV_INSN (tem
); place
== 0; tem
= PREV_INSN (tem
))
12158 if (! INSN_P (tem
))
12160 if (tem
== BB_HEAD (bb
))
12165 /* If the register is being set at TEM, see if that is all
12166 TEM is doing. If so, delete TEM. Otherwise, make this
12167 into a REG_UNUSED note instead. Don't delete sets to
12168 global register vars. */
12169 if ((REGNO (XEXP (note
, 0)) >= FIRST_PSEUDO_REGISTER
12170 || !global_regs
[REGNO (XEXP (note
, 0))])
12171 && reg_set_p (XEXP (note
, 0), PATTERN (tem
)))
12173 rtx set
= single_set (tem
);
12174 rtx inner_dest
= 0;
12176 rtx cc0_setter
= NULL_RTX
;
12180 for (inner_dest
= SET_DEST (set
);
12181 (GET_CODE (inner_dest
) == STRICT_LOW_PART
12182 || GET_CODE (inner_dest
) == SUBREG
12183 || GET_CODE (inner_dest
) == ZERO_EXTRACT
);
12184 inner_dest
= XEXP (inner_dest
, 0))
12187 /* Verify that it was the set, and not a clobber that
12188 modified the register.
12190 CC0 targets must be careful to maintain setter/user
12191 pairs. If we cannot delete the setter due to side
12192 effects, mark the user with an UNUSED note instead
12195 if (set
!= 0 && ! side_effects_p (SET_SRC (set
))
12196 && rtx_equal_p (XEXP (note
, 0), inner_dest
)
12198 && (! reg_mentioned_p (cc0_rtx
, SET_SRC (set
))
12199 || ((cc0_setter
= prev_cc0_setter (tem
)) != NULL
12200 && sets_cc0_p (PATTERN (cc0_setter
)) > 0))
12204 /* Move the notes and links of TEM elsewhere.
12205 This might delete other dead insns recursively.
12206 First set the pattern to something that won't use
12208 rtx old_notes
= REG_NOTES (tem
);
12210 PATTERN (tem
) = pc_rtx
;
12211 REG_NOTES (tem
) = NULL
;
12213 distribute_notes (old_notes
, tem
, tem
, NULL_RTX
,
12214 NULL_RTX
, NULL_RTX
);
12215 distribute_links (LOG_LINKS (tem
));
12217 SET_INSN_DELETED (tem
);
12220 /* Delete the setter too. */
12223 PATTERN (cc0_setter
) = pc_rtx
;
12224 old_notes
= REG_NOTES (cc0_setter
);
12225 REG_NOTES (cc0_setter
) = NULL
;
12227 distribute_notes (old_notes
, cc0_setter
,
12228 cc0_setter
, NULL_RTX
,
12229 NULL_RTX
, NULL_RTX
);
12230 distribute_links (LOG_LINKS (cc0_setter
));
12232 SET_INSN_DELETED (cc0_setter
);
12238 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
12240 /* If there isn't already a REG_UNUSED note, put one
12241 here. Do not place a REG_DEAD note, even if
12242 the register is also used here; that would not
12243 match the algorithm used in lifetime analysis
12244 and can cause the consistency check in the
12245 scheduler to fail. */
12246 if (! find_regno_note (tem
, REG_UNUSED
,
12247 REGNO (XEXP (note
, 0))))
12252 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (tem
))
12254 && find_reg_fusage (tem
, USE
, XEXP (note
, 0))))
12258 /* If we are doing a 3->2 combination, and we have a
12259 register which formerly died in i3 and was not used
12260 by i2, which now no longer dies in i3 and is used in
12261 i2 but does not die in i2, and place is between i2
12262 and i3, then we may need to move a link from place to
12264 if (i2
&& INSN_UID (place
) <= max_uid_cuid
12265 && INSN_CUID (place
) > INSN_CUID (i2
)
12267 && INSN_CUID (from_insn
) > INSN_CUID (i2
)
12268 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
12270 rtx links
= LOG_LINKS (place
);
12271 LOG_LINKS (place
) = 0;
12272 distribute_links (links
);
12277 if (tem
== BB_HEAD (bb
))
12281 /* We haven't found an insn for the death note and it
12282 is still a REG_DEAD note, but we have hit the beginning
12283 of the block. If the existing life info says the reg
12284 was dead, there's nothing left to do. Otherwise, we'll
12285 need to do a global life update after combine. */
12286 if (REG_NOTE_KIND (note
) == REG_DEAD
&& place
== 0
12287 && REGNO_REG_SET_P (bb
->il
.rtl
->global_live_at_start
,
12288 REGNO (XEXP (note
, 0))))
12289 SET_BIT (refresh_blocks
, this_basic_block
->index
);
12292 /* If the register is set or already dead at PLACE, we needn't do
12293 anything with this note if it is still a REG_DEAD note.
12294 We check here if it is set at all, not if is it totally replaced,
12295 which is what `dead_or_set_p' checks, so also check for it being
12298 if (place
&& REG_NOTE_KIND (note
) == REG_DEAD
)
12300 unsigned int regno
= REGNO (XEXP (note
, 0));
12302 /* Similarly, if the instruction on which we want to place
12303 the note is a noop, we'll need do a global live update
12304 after we remove them in delete_noop_moves. */
12305 if (noop_move_p (place
))
12306 SET_BIT (refresh_blocks
, this_basic_block
->index
);
12308 if (dead_or_set_p (place
, XEXP (note
, 0))
12309 || reg_bitfield_target_p (XEXP (note
, 0), PATTERN (place
)))
12311 /* Unless the register previously died in PLACE, clear
12312 last_death. [I no longer understand why this is
12314 if (reg_stat
[regno
].last_death
!= place
)
12315 reg_stat
[regno
].last_death
= 0;
12319 reg_stat
[regno
].last_death
= place
;
12321 /* If this is a death note for a hard reg that is occupying
12322 multiple registers, ensure that we are still using all
12323 parts of the object. If we find a piece of the object
12324 that is unused, we must arrange for an appropriate REG_DEAD
12325 note to be added for it. However, we can't just emit a USE
12326 and tag the note to it, since the register might actually
12327 be dead; so we recourse, and the recursive call then finds
12328 the previous insn that used this register. */
12330 if (place
&& regno
< FIRST_PSEUDO_REGISTER
12331 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] > 1)
12333 unsigned int endregno
12334 = regno
+ hard_regno_nregs
[regno
]
12335 [GET_MODE (XEXP (note
, 0))];
12339 for (i
= regno
; i
< endregno
; i
++)
12340 if ((! refers_to_regno_p (i
, i
+ 1, PATTERN (place
), 0)
12341 && ! find_regno_fusage (place
, USE
, i
))
12342 || dead_or_set_regno_p (place
, i
))
12347 /* Put only REG_DEAD notes for pieces that are
12348 not already dead or set. */
12350 for (i
= regno
; i
< endregno
;
12351 i
+= hard_regno_nregs
[i
][reg_raw_mode
[i
]])
12353 rtx piece
= regno_reg_rtx
[i
];
12354 basic_block bb
= this_basic_block
;
12356 if (! dead_or_set_p (place
, piece
)
12357 && ! reg_bitfield_target_p (piece
,
12361 = gen_rtx_EXPR_LIST (REG_DEAD
, piece
, NULL_RTX
);
12363 distribute_notes (new_note
, place
, place
,
12364 NULL_RTX
, NULL_RTX
, NULL_RTX
);
12366 else if (! refers_to_regno_p (i
, i
+ 1,
12367 PATTERN (place
), 0)
12368 && ! find_regno_fusage (place
, USE
, i
))
12369 for (tem
= PREV_INSN (place
); ;
12370 tem
= PREV_INSN (tem
))
12372 if (! INSN_P (tem
))
12374 if (tem
== BB_HEAD (bb
))
12376 SET_BIT (refresh_blocks
,
12377 this_basic_block
->index
);
12382 if (dead_or_set_p (tem
, piece
)
12383 || reg_bitfield_target_p (piece
,
12387 = gen_rtx_EXPR_LIST (REG_UNUSED
, piece
,
12402 /* Any other notes should not be present at this point in the
12404 gcc_unreachable ();
12409 XEXP (note
, 1) = REG_NOTES (place
);
12410 REG_NOTES (place
) = note
;
12412 else if ((REG_NOTE_KIND (note
) == REG_DEAD
12413 || REG_NOTE_KIND (note
) == REG_UNUSED
)
12414 && REG_P (XEXP (note
, 0)))
12415 REG_N_DEATHS (REGNO (XEXP (note
, 0)))--;
12419 if ((REG_NOTE_KIND (note
) == REG_DEAD
12420 || REG_NOTE_KIND (note
) == REG_UNUSED
)
12421 && REG_P (XEXP (note
, 0)))
12422 REG_N_DEATHS (REGNO (XEXP (note
, 0)))++;
12424 REG_NOTES (place2
) = gen_rtx_fmt_ee (GET_CODE (note
),
12425 REG_NOTE_KIND (note
),
12427 REG_NOTES (place2
));
12432 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12433 I3, I2, and I1 to new locations. This is also called to add a link
12434 pointing at I3 when I3's destination is changed. */
12437 distribute_links (rtx links
)
12439 rtx link
, next_link
;
12441 for (link
= links
; link
; link
= next_link
)
12447 next_link
= XEXP (link
, 1);
12449 /* If the insn that this link points to is a NOTE or isn't a single
12450 set, ignore it. In the latter case, it isn't clear what we
12451 can do other than ignore the link, since we can't tell which
12452 register it was for. Such links wouldn't be used by combine
12455 It is not possible for the destination of the target of the link to
12456 have been changed by combine. The only potential of this is if we
12457 replace I3, I2, and I1 by I3 and I2. But in that case the
12458 destination of I2 also remains unchanged. */
12460 if (NOTE_P (XEXP (link
, 0))
12461 || (set
= single_set (XEXP (link
, 0))) == 0)
12464 reg
= SET_DEST (set
);
12465 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
12466 || GET_CODE (reg
) == STRICT_LOW_PART
)
12467 reg
= XEXP (reg
, 0);
12469 /* A LOG_LINK is defined as being placed on the first insn that uses
12470 a register and points to the insn that sets the register. Start
12471 searching at the next insn after the target of the link and stop
12472 when we reach a set of the register or the end of the basic block.
12474 Note that this correctly handles the link that used to point from
12475 I3 to I2. Also note that not much searching is typically done here
12476 since most links don't point very far away. */
12478 for (insn
= NEXT_INSN (XEXP (link
, 0));
12479 (insn
&& (this_basic_block
->next_bb
== EXIT_BLOCK_PTR
12480 || BB_HEAD (this_basic_block
->next_bb
) != insn
));
12481 insn
= NEXT_INSN (insn
))
12482 if (INSN_P (insn
) && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
12484 if (reg_referenced_p (reg
, PATTERN (insn
)))
12488 else if (CALL_P (insn
)
12489 && find_reg_fusage (insn
, USE
, reg
))
12494 else if (INSN_P (insn
) && reg_set_p (reg
, insn
))
12497 /* If we found a place to put the link, place it there unless there
12498 is already a link to the same insn as LINK at that point. */
12504 for (link2
= LOG_LINKS (place
); link2
; link2
= XEXP (link2
, 1))
12505 if (XEXP (link2
, 0) == XEXP (link
, 0))
12510 XEXP (link
, 1) = LOG_LINKS (place
);
12511 LOG_LINKS (place
) = link
;
12513 /* Set added_links_insn to the earliest insn we added a
12515 if (added_links_insn
== 0
12516 || INSN_CUID (added_links_insn
) > INSN_CUID (place
))
12517 added_links_insn
= place
;
12523 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12524 Check whether the expression pointer to by LOC is a register or
12525 memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12526 Otherwise return zero. */
12529 unmentioned_reg_p_1 (rtx
*loc
, void *expr
)
12534 && (REG_P (x
) || MEM_P (x
))
12535 && ! reg_mentioned_p (x
, (rtx
) expr
))
12540 /* Check for any register or memory mentioned in EQUIV that is not
12541 mentioned in EXPR. This is used to restrict EQUIV to "specializations"
12542 of EXPR where some registers may have been replaced by constants. */
12545 unmentioned_reg_p (rtx equiv
, rtx expr
)
12547 return for_each_rtx (&equiv
, unmentioned_reg_p_1
, expr
);
12550 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
12553 insn_cuid (rtx insn
)
12555 while (insn
!= 0 && INSN_UID (insn
) > max_uid_cuid
12556 && NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
)
12557 insn
= NEXT_INSN (insn
);
12559 gcc_assert (INSN_UID (insn
) <= max_uid_cuid
);
12561 return INSN_CUID (insn
);
12565 dump_combine_stats (FILE *file
)
12569 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12570 combine_attempts
, combine_merges
, combine_extras
, combine_successes
);
12574 dump_combine_total_stats (FILE *file
)
12578 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12579 total_attempts
, total_merges
, total_extras
, total_successes
);
12584 gate_handle_combine (void)
12586 return (optimize
> 0);
12589 /* Try combining insns through substitution. */
12590 static unsigned int
12591 rest_of_handle_combine (void)
12593 int rebuild_jump_labels_after_combine
12594 = combine_instructions (get_insns (), max_reg_num ());
12596 /* Combining insns may have turned an indirect jump into a
12597 direct jump. Rebuild the JUMP_LABEL fields of jumping
12599 if (rebuild_jump_labels_after_combine
)
12601 timevar_push (TV_JUMP
);
12602 rebuild_jump_labels (get_insns ());
12603 timevar_pop (TV_JUMP
);
12605 delete_dead_jumptables ();
12606 cleanup_cfg (CLEANUP_EXPENSIVE
| CLEANUP_UPDATE_LIFE
);
12611 struct tree_opt_pass pass_combine
=
12613 "combine", /* name */
12614 gate_handle_combine
, /* gate */
12615 rest_of_handle_combine
, /* execute */
12618 0, /* static_pass_number */
12619 TV_COMBINE
, /* tv_id */
12620 0, /* properties_required */
12621 0, /* properties_provided */
12622 0, /* properties_destroyed */
12623 0, /* todo_flags_start */
12625 TODO_ggc_collect
, /* todo_flags_finish */