* expr.c (force_operand): Use convert_to_mode for conversions.
[official-gcc.git] / gcc / combine.c
blobb13ea867c0e5a0ec821c5c99210805fff7b4c463
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "rtl.h"
82 #include "tree.h"
83 #include "tm_p.h"
84 #include "flags.h"
85 #include "regs.h"
86 #include "hard-reg-set.h"
87 #include "basic-block.h"
88 #include "insn-config.h"
89 #include "function.h"
90 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
91 #include "expr.h"
92 #include "insn-attr.h"
93 #include "recog.h"
94 #include "real.h"
95 #include "toplev.h"
96 #include "target.h"
97 #include "optabs.h"
98 #include "insn-codes.h"
99 #include "rtlhooks-def.h"
100 /* Include output.h for dump_file. */
101 #include "output.h"
102 #include "params.h"
103 #include "timevar.h"
104 #include "tree-pass.h"
106 /* Number of attempts to combine instructions in this function. */
108 static int combine_attempts;
110 /* Number of attempts that got as far as substitution in this function. */
112 static int combine_merges;
114 /* Number of instructions combined with added SETs in this function. */
116 static int combine_extras;
118 /* Number of instructions combined in this function. */
120 static int combine_successes;
122 /* Totals over entire compilation. */
124 static int total_attempts, total_merges, total_extras, total_successes;
127 /* Vector mapping INSN_UIDs to cuids.
128 The cuids are like uids but increase monotonically always.
129 Combine always uses cuids so that it can compare them.
130 But actually renumbering the uids, which we used to do,
131 proves to be a bad idea because it makes it hard to compare
132 the dumps produced by earlier passes with those from later passes. */
134 static int *uid_cuid;
135 static int max_uid_cuid;
137 /* Get the cuid of an insn. */
139 #define INSN_CUID(INSN) \
140 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
142 /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
143 BITS_PER_WORD would invoke undefined behavior. Work around it. */
145 #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
146 (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
148 /* Maximum register number, which is the size of the tables below. */
150 static unsigned int combine_max_regno;
152 struct reg_stat {
153 /* Record last point of death of (hard or pseudo) register n. */
154 rtx last_death;
156 /* Record last point of modification of (hard or pseudo) register n. */
157 rtx last_set;
159 /* The next group of fields allows the recording of the last value assigned
160 to (hard or pseudo) register n. We use this information to see if an
161 operation being processed is redundant given a prior operation performed
162 on the register. For example, an `and' with a constant is redundant if
163 all the zero bits are already known to be turned off.
165 We use an approach similar to that used by cse, but change it in the
166 following ways:
168 (1) We do not want to reinitialize at each label.
169 (2) It is useful, but not critical, to know the actual value assigned
170 to a register. Often just its form is helpful.
172 Therefore, we maintain the following fields:
174 last_set_value the last value assigned
175 last_set_label records the value of label_tick when the
176 register was assigned
177 last_set_table_tick records the value of label_tick when a
178 value using the register is assigned
179 last_set_invalid set to nonzero when it is not valid
180 to use the value of this register in some
181 register's value
183 To understand the usage of these tables, it is important to understand
184 the distinction between the value in last_set_value being valid and
185 the register being validly contained in some other expression in the
186 table.
188 (The next two parameters are out of date).
190 reg_stat[i].last_set_value is valid if it is nonzero, and either
191 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
193 Register I may validly appear in any expression returned for the value
194 of another register if reg_n_sets[i] is 1. It may also appear in the
195 value for register J if reg_stat[j].last_set_invalid is zero, or
196 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
198 If an expression is found in the table containing a register which may
199 not validly appear in an expression, the register is replaced by
200 something that won't match, (clobber (const_int 0)). */
202 /* Record last value assigned to (hard or pseudo) register n. */
204 rtx last_set_value;
206 /* Record the value of label_tick when an expression involving register n
207 is placed in last_set_value. */
209 int last_set_table_tick;
211 /* Record the value of label_tick when the value for register n is placed in
212 last_set_value. */
214 int last_set_label;
216 /* These fields are maintained in parallel with last_set_value and are
217 used to store the mode in which the register was last set, the bits
218 that were known to be zero when it was last set, and the number of
219 sign bits copies it was known to have when it was last set. */
221 unsigned HOST_WIDE_INT last_set_nonzero_bits;
222 char last_set_sign_bit_copies;
223 ENUM_BITFIELD(machine_mode) last_set_mode : 8;
225 /* Set nonzero if references to register n in expressions should not be
226 used. last_set_invalid is set nonzero when this register is being
227 assigned to and last_set_table_tick == label_tick. */
229 char last_set_invalid;
231 /* Some registers that are set more than once and used in more than one
232 basic block are nevertheless always set in similar ways. For example,
233 a QImode register may be loaded from memory in two places on a machine
234 where byte loads zero extend.
236 We record in the following fields if a register has some leading bits
237 that are always equal to the sign bit, and what we know about the
238 nonzero bits of a register, specifically which bits are known to be
239 zero.
241 If an entry is zero, it means that we don't know anything special. */
243 unsigned char sign_bit_copies;
245 unsigned HOST_WIDE_INT nonzero_bits;
248 static struct reg_stat *reg_stat;
250 /* Record the cuid of the last insn that invalidated memory
251 (anything that writes memory, and subroutine calls, but not pushes). */
253 static int mem_last_set;
255 /* Record the cuid of the last CALL_INSN
256 so we can tell whether a potential combination crosses any calls. */
258 static int last_call_cuid;
260 /* When `subst' is called, this is the insn that is being modified
261 (by combining in a previous insn). The PATTERN of this insn
262 is still the old pattern partially modified and it should not be
263 looked at, but this may be used to examine the successors of the insn
264 to judge whether a simplification is valid. */
266 static rtx subst_insn;
268 /* This is the lowest CUID that `subst' is currently dealing with.
269 get_last_value will not return a value if the register was set at or
270 after this CUID. If not for this mechanism, we could get confused if
271 I2 or I1 in try_combine were an insn that used the old value of a register
272 to obtain a new value. In that case, we might erroneously get the
273 new value of the register when we wanted the old one. */
275 static int subst_low_cuid;
277 /* This contains any hard registers that are used in newpat; reg_dead_at_p
278 must consider all these registers to be always live. */
280 static HARD_REG_SET newpat_used_regs;
282 /* This is an insn to which a LOG_LINKS entry has been added. If this
283 insn is the earlier than I2 or I3, combine should rescan starting at
284 that location. */
286 static rtx added_links_insn;
288 /* Basic block in which we are performing combines. */
289 static basic_block this_basic_block;
291 /* A bitmap indicating which blocks had registers go dead at entry.
292 After combine, we'll need to re-do global life analysis with
293 those blocks as starting points. */
294 static sbitmap refresh_blocks;
296 /* The following array records the insn_rtx_cost for every insn
297 in the instruction stream. */
299 static int *uid_insn_cost;
301 /* Length of the currently allocated uid_insn_cost array. */
303 static int last_insn_cost;
305 /* Incremented for each label. */
307 static int label_tick;
309 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
310 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
312 static enum machine_mode nonzero_bits_mode;
314 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
315 be safely used. It is zero while computing them and after combine has
316 completed. This former test prevents propagating values based on
317 previously set values, which can be incorrect if a variable is modified
318 in a loop. */
320 static int nonzero_sign_valid;
323 /* Record one modification to rtl structure
324 to be undone by storing old_contents into *where.
325 is_int is 1 if the contents are an int. */
327 struct undo
329 struct undo *next;
330 int is_int;
331 union {rtx r; int i;} old_contents;
332 union {rtx *r; int *i;} where;
335 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
336 num_undo says how many are currently recorded.
338 other_insn is nonzero if we have modified some other insn in the process
339 of working on subst_insn. It must be verified too. */
341 struct undobuf
343 struct undo *undos;
344 struct undo *frees;
345 rtx other_insn;
348 static struct undobuf undobuf;
350 /* Number of times the pseudo being substituted for
351 was found and replaced. */
353 static int n_occurrences;
355 static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx,
356 enum machine_mode,
357 unsigned HOST_WIDE_INT,
358 unsigned HOST_WIDE_INT *);
359 static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx,
360 enum machine_mode,
361 unsigned int, unsigned int *);
362 static void do_SUBST (rtx *, rtx);
363 static void do_SUBST_INT (int *, int);
364 static void init_reg_last (void);
365 static void setup_incoming_promotions (void);
366 static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
367 static int cant_combine_insn_p (rtx);
368 static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
369 static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
370 static int contains_muldiv (rtx);
371 static rtx try_combine (rtx, rtx, rtx, int *);
372 static void undo_all (void);
373 static void undo_commit (void);
374 static rtx *find_split_point (rtx *, rtx);
375 static rtx subst (rtx, rtx, rtx, int, int);
376 static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
377 static rtx simplify_if_then_else (rtx);
378 static rtx simplify_set (rtx);
379 static rtx simplify_logical (rtx);
380 static rtx expand_compound_operation (rtx);
381 static rtx expand_field_assignment (rtx);
382 static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
383 rtx, unsigned HOST_WIDE_INT, int, int, int);
384 static rtx extract_left_shift (rtx, int);
385 static rtx make_compound_operation (rtx, enum rtx_code);
386 static int get_pos_from_mask (unsigned HOST_WIDE_INT,
387 unsigned HOST_WIDE_INT *);
388 static rtx force_to_mode (rtx, enum machine_mode,
389 unsigned HOST_WIDE_INT, rtx, int);
390 static rtx if_then_else_cond (rtx, rtx *, rtx *);
391 static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
392 static int rtx_equal_for_field_assignment_p (rtx, rtx);
393 static rtx make_field_assignment (rtx);
394 static rtx apply_distributive_law (rtx);
395 static rtx distribute_and_simplify_rtx (rtx, int);
396 static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
397 unsigned HOST_WIDE_INT);
398 static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
399 HOST_WIDE_INT, enum machine_mode, int *);
400 static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
401 int);
402 static int recog_for_combine (rtx *, rtx, rtx *);
403 static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
404 static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
405 static void update_table_tick (rtx);
406 static void record_value_for_reg (rtx, rtx, rtx);
407 static void check_promoted_subreg (rtx, rtx);
408 static void record_dead_and_set_regs_1 (rtx, rtx, void *);
409 static void record_dead_and_set_regs (rtx);
410 static int get_last_value_validate (rtx *, rtx, int, int);
411 static rtx get_last_value (rtx);
412 static int use_crosses_set_p (rtx, int);
413 static void reg_dead_at_p_1 (rtx, rtx, void *);
414 static int reg_dead_at_p (rtx, rtx);
415 static void move_deaths (rtx, rtx, int, rtx, rtx *);
416 static int reg_bitfield_target_p (rtx, rtx);
417 static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
418 static void distribute_links (rtx);
419 static void mark_used_regs_combine (rtx);
420 static int insn_cuid (rtx);
421 static void record_promoted_value (rtx, rtx);
422 static int unmentioned_reg_p_1 (rtx *, void *);
423 static bool unmentioned_reg_p (rtx, rtx);
426 /* It is not safe to use ordinary gen_lowpart in combine.
427 See comments in gen_lowpart_for_combine. */
428 #undef RTL_HOOKS_GEN_LOWPART
429 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
431 /* Our implementation of gen_lowpart never emits a new pseudo. */
432 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
433 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
435 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
436 #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
438 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
439 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
441 static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
444 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
445 insn. The substitution can be undone by undo_all. If INTO is already
446 set to NEWVAL, do not record this change. Because computing NEWVAL might
447 also call SUBST, we have to compute it before we put anything into
448 the undo table. */
450 static void
451 do_SUBST (rtx *into, rtx newval)
453 struct undo *buf;
454 rtx oldval = *into;
456 if (oldval == newval)
457 return;
459 /* We'd like to catch as many invalid transformations here as
460 possible. Unfortunately, there are way too many mode changes
461 that are perfectly valid, so we'd waste too much effort for
462 little gain doing the checks here. Focus on catching invalid
463 transformations involving integer constants. */
464 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
465 && GET_CODE (newval) == CONST_INT)
467 /* Sanity check that we're replacing oldval with a CONST_INT
468 that is a valid sign-extension for the original mode. */
469 gcc_assert (INTVAL (newval)
470 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
472 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
473 CONST_INT is not valid, because after the replacement, the
474 original mode would be gone. Unfortunately, we can't tell
475 when do_SUBST is called to replace the operand thereof, so we
476 perform this test on oldval instead, checking whether an
477 invalid replacement took place before we got here. */
478 gcc_assert (!(GET_CODE (oldval) == SUBREG
479 && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
480 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
481 && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
484 if (undobuf.frees)
485 buf = undobuf.frees, undobuf.frees = buf->next;
486 else
487 buf = xmalloc (sizeof (struct undo));
489 buf->is_int = 0;
490 buf->where.r = into;
491 buf->old_contents.r = oldval;
492 *into = newval;
494 buf->next = undobuf.undos, undobuf.undos = buf;
497 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
499 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
500 for the value of a HOST_WIDE_INT value (including CONST_INT) is
501 not safe. */
503 static void
504 do_SUBST_INT (int *into, int newval)
506 struct undo *buf;
507 int oldval = *into;
509 if (oldval == newval)
510 return;
512 if (undobuf.frees)
513 buf = undobuf.frees, undobuf.frees = buf->next;
514 else
515 buf = xmalloc (sizeof (struct undo));
517 buf->is_int = 1;
518 buf->where.i = into;
519 buf->old_contents.i = oldval;
520 *into = newval;
522 buf->next = undobuf.undos, undobuf.undos = buf;
525 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
527 /* Subroutine of try_combine. Determine whether the combine replacement
528 patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
529 that the original instruction sequence I1, I2 and I3. Note that I1
530 and/or NEWI2PAT may be NULL_RTX. This function returns false, if the
531 costs of all instructions can be estimated, and the replacements are
532 more expensive than the original sequence. */
534 static bool
535 combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat)
537 int i1_cost, i2_cost, i3_cost;
538 int new_i2_cost, new_i3_cost;
539 int old_cost, new_cost;
541 /* Lookup the original insn_rtx_costs. */
542 i2_cost = INSN_UID (i2) <= last_insn_cost
543 ? uid_insn_cost[INSN_UID (i2)] : 0;
544 i3_cost = INSN_UID (i3) <= last_insn_cost
545 ? uid_insn_cost[INSN_UID (i3)] : 0;
547 if (i1)
549 i1_cost = INSN_UID (i1) <= last_insn_cost
550 ? uid_insn_cost[INSN_UID (i1)] : 0;
551 old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
552 ? i1_cost + i2_cost + i3_cost : 0;
554 else
556 old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
557 i1_cost = 0;
560 /* Calculate the replacement insn_rtx_costs. */
561 new_i3_cost = insn_rtx_cost (newpat);
562 if (newi2pat)
564 new_i2_cost = insn_rtx_cost (newi2pat);
565 new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
566 ? new_i2_cost + new_i3_cost : 0;
568 else
570 new_cost = new_i3_cost;
571 new_i2_cost = 0;
574 if (undobuf.other_insn)
576 int old_other_cost, new_other_cost;
578 old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
579 ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
580 new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
581 if (old_other_cost > 0 && new_other_cost > 0)
583 old_cost += old_other_cost;
584 new_cost += new_other_cost;
586 else
587 old_cost = 0;
590 /* Disallow this recombination if both new_cost and old_cost are
591 greater than zero, and new_cost is greater than old cost. */
592 if (old_cost > 0
593 && new_cost > old_cost)
595 if (dump_file)
597 if (i1)
599 fprintf (dump_file,
600 "rejecting combination of insns %d, %d and %d\n",
601 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
602 fprintf (dump_file, "original costs %d + %d + %d = %d\n",
603 i1_cost, i2_cost, i3_cost, old_cost);
605 else
607 fprintf (dump_file,
608 "rejecting combination of insns %d and %d\n",
609 INSN_UID (i2), INSN_UID (i3));
610 fprintf (dump_file, "original costs %d + %d = %d\n",
611 i2_cost, i3_cost, old_cost);
614 if (newi2pat)
616 fprintf (dump_file, "replacement costs %d + %d = %d\n",
617 new_i2_cost, new_i3_cost, new_cost);
619 else
620 fprintf (dump_file, "replacement cost %d\n", new_cost);
623 return false;
626 /* Update the uid_insn_cost array with the replacement costs. */
627 uid_insn_cost[INSN_UID (i2)] = new_i2_cost;
628 uid_insn_cost[INSN_UID (i3)] = new_i3_cost;
629 if (i1)
630 uid_insn_cost[INSN_UID (i1)] = 0;
632 return true;
635 /* Main entry point for combiner. F is the first insn of the function.
636 NREGS is the first unused pseudo-reg number.
638 Return nonzero if the combiner has turned an indirect jump
639 instruction into a direct jump. */
641 combine_instructions (rtx f, unsigned int nregs)
643 rtx insn, next;
644 #ifdef HAVE_cc0
645 rtx prev;
646 #endif
647 int i;
648 unsigned int j = 0;
649 rtx links, nextlinks;
650 sbitmap_iterator sbi;
652 int new_direct_jump_p = 0;
654 combine_attempts = 0;
655 combine_merges = 0;
656 combine_extras = 0;
657 combine_successes = 0;
659 combine_max_regno = nregs;
661 rtl_hooks = combine_rtl_hooks;
663 reg_stat = xcalloc (nregs, sizeof (struct reg_stat));
665 init_recog_no_volatile ();
667 /* Compute maximum uid value so uid_cuid can be allocated. */
669 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
670 if (INSN_UID (insn) > i)
671 i = INSN_UID (insn);
673 uid_cuid = xmalloc ((i + 1) * sizeof (int));
674 max_uid_cuid = i;
676 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
678 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
679 problems when, for example, we have j <<= 1 in a loop. */
681 nonzero_sign_valid = 0;
683 /* Compute the mapping from uids to cuids.
684 Cuids are numbers assigned to insns, like uids,
685 except that cuids increase monotonically through the code.
687 Scan all SETs and see if we can deduce anything about what
688 bits are known to be zero for some registers and how many copies
689 of the sign bit are known to exist for those registers.
691 Also set any known values so that we can use it while searching
692 for what bits are known to be set. */
694 label_tick = 1;
696 setup_incoming_promotions ();
698 refresh_blocks = sbitmap_alloc (last_basic_block);
699 sbitmap_zero (refresh_blocks);
701 /* Allocate array of current insn_rtx_costs. */
702 uid_insn_cost = xcalloc (max_uid_cuid + 1, sizeof (int));
703 last_insn_cost = max_uid_cuid;
705 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
707 uid_cuid[INSN_UID (insn)] = ++i;
708 subst_low_cuid = i;
709 subst_insn = insn;
711 if (INSN_P (insn))
713 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
714 NULL);
715 record_dead_and_set_regs (insn);
717 #ifdef AUTO_INC_DEC
718 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
719 if (REG_NOTE_KIND (links) == REG_INC)
720 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
721 NULL);
722 #endif
724 /* Record the current insn_rtx_cost of this instruction. */
725 if (NONJUMP_INSN_P (insn))
726 uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn));
727 if (dump_file)
728 fprintf(dump_file, "insn_cost %d: %d\n",
729 INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
732 if (LABEL_P (insn))
733 label_tick++;
736 nonzero_sign_valid = 1;
738 /* Now scan all the insns in forward order. */
740 label_tick = 1;
741 last_call_cuid = 0;
742 mem_last_set = 0;
743 init_reg_last ();
744 setup_incoming_promotions ();
746 FOR_EACH_BB (this_basic_block)
748 for (insn = BB_HEAD (this_basic_block);
749 insn != NEXT_INSN (BB_END (this_basic_block));
750 insn = next ? next : NEXT_INSN (insn))
752 next = 0;
754 if (LABEL_P (insn))
755 label_tick++;
757 else if (INSN_P (insn))
759 /* See if we know about function return values before this
760 insn based upon SUBREG flags. */
761 check_promoted_subreg (insn, PATTERN (insn));
763 /* Try this insn with each insn it links back to. */
765 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
766 if ((next = try_combine (insn, XEXP (links, 0),
767 NULL_RTX, &new_direct_jump_p)) != 0)
768 goto retry;
770 /* Try each sequence of three linked insns ending with this one. */
772 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
774 rtx link = XEXP (links, 0);
776 /* If the linked insn has been replaced by a note, then there
777 is no point in pursuing this chain any further. */
778 if (NOTE_P (link))
779 continue;
781 for (nextlinks = LOG_LINKS (link);
782 nextlinks;
783 nextlinks = XEXP (nextlinks, 1))
784 if ((next = try_combine (insn, link,
785 XEXP (nextlinks, 0),
786 &new_direct_jump_p)) != 0)
787 goto retry;
790 #ifdef HAVE_cc0
791 /* Try to combine a jump insn that uses CC0
792 with a preceding insn that sets CC0, and maybe with its
793 logical predecessor as well.
794 This is how we make decrement-and-branch insns.
795 We need this special code because data flow connections
796 via CC0 do not get entered in LOG_LINKS. */
798 if (JUMP_P (insn)
799 && (prev = prev_nonnote_insn (insn)) != 0
800 && NONJUMP_INSN_P (prev)
801 && sets_cc0_p (PATTERN (prev)))
803 if ((next = try_combine (insn, prev,
804 NULL_RTX, &new_direct_jump_p)) != 0)
805 goto retry;
807 for (nextlinks = LOG_LINKS (prev); nextlinks;
808 nextlinks = XEXP (nextlinks, 1))
809 if ((next = try_combine (insn, prev,
810 XEXP (nextlinks, 0),
811 &new_direct_jump_p)) != 0)
812 goto retry;
815 /* Do the same for an insn that explicitly references CC0. */
816 if (NONJUMP_INSN_P (insn)
817 && (prev = prev_nonnote_insn (insn)) != 0
818 && NONJUMP_INSN_P (prev)
819 && sets_cc0_p (PATTERN (prev))
820 && GET_CODE (PATTERN (insn)) == SET
821 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
823 if ((next = try_combine (insn, prev,
824 NULL_RTX, &new_direct_jump_p)) != 0)
825 goto retry;
827 for (nextlinks = LOG_LINKS (prev); nextlinks;
828 nextlinks = XEXP (nextlinks, 1))
829 if ((next = try_combine (insn, prev,
830 XEXP (nextlinks, 0),
831 &new_direct_jump_p)) != 0)
832 goto retry;
835 /* Finally, see if any of the insns that this insn links to
836 explicitly references CC0. If so, try this insn, that insn,
837 and its predecessor if it sets CC0. */
838 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
839 if (NONJUMP_INSN_P (XEXP (links, 0))
840 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
841 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
842 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
843 && NONJUMP_INSN_P (prev)
844 && sets_cc0_p (PATTERN (prev))
845 && (next = try_combine (insn, XEXP (links, 0),
846 prev, &new_direct_jump_p)) != 0)
847 goto retry;
848 #endif
850 /* Try combining an insn with two different insns whose results it
851 uses. */
852 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
853 for (nextlinks = XEXP (links, 1); nextlinks;
854 nextlinks = XEXP (nextlinks, 1))
855 if ((next = try_combine (insn, XEXP (links, 0),
856 XEXP (nextlinks, 0),
857 &new_direct_jump_p)) != 0)
858 goto retry;
860 /* Try this insn with each REG_EQUAL note it links back to. */
861 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
863 rtx set, note;
864 rtx temp = XEXP (links, 0);
865 if ((set = single_set (temp)) != 0
866 && (note = find_reg_equal_equiv_note (temp)) != 0
867 && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
868 /* Avoid using a register that may already been marked
869 dead by an earlier instruction. */
870 && ! unmentioned_reg_p (note, SET_SRC (set))
871 && (GET_MODE (note) == VOIDmode
872 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
873 : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
875 /* Temporarily replace the set's source with the
876 contents of the REG_EQUAL note. The insn will
877 be deleted or recognized by try_combine. */
878 rtx orig = SET_SRC (set);
879 SET_SRC (set) = note;
880 next = try_combine (insn, temp, NULL_RTX,
881 &new_direct_jump_p);
882 if (next)
883 goto retry;
884 SET_SRC (set) = orig;
888 if (!NOTE_P (insn))
889 record_dead_and_set_regs (insn);
891 retry:
896 clear_bb_flags ();
898 EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, j, sbi)
899 BASIC_BLOCK (j)->flags |= BB_DIRTY;
900 new_direct_jump_p |= purge_all_dead_edges ();
901 delete_noop_moves ();
903 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
904 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
905 | PROP_KILL_DEAD_CODE);
907 /* Clean up. */
908 sbitmap_free (refresh_blocks);
909 free (uid_insn_cost);
910 free (reg_stat);
911 free (uid_cuid);
914 struct undo *undo, *next;
915 for (undo = undobuf.frees; undo; undo = next)
917 next = undo->next;
918 free (undo);
920 undobuf.frees = 0;
923 total_attempts += combine_attempts;
924 total_merges += combine_merges;
925 total_extras += combine_extras;
926 total_successes += combine_successes;
928 nonzero_sign_valid = 0;
929 rtl_hooks = general_rtl_hooks;
931 /* Make recognizer allow volatile MEMs again. */
932 init_recog ();
934 return new_direct_jump_p;
937 /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
939 static void
940 init_reg_last (void)
942 unsigned int i;
943 for (i = 0; i < combine_max_regno; i++)
944 memset (reg_stat + i, 0, offsetof (struct reg_stat, sign_bit_copies));
947 /* Set up any promoted values for incoming argument registers. */
949 static void
950 setup_incoming_promotions (void)
952 unsigned int regno;
953 rtx reg;
954 enum machine_mode mode;
955 int unsignedp;
956 rtx first = get_insns ();
958 if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
961 /* Check whether this register can hold an incoming pointer
962 argument. FUNCTION_ARG_REGNO_P tests outgoing register
963 numbers, so translate if necessary due to register windows. */
964 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
965 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
967 record_value_for_reg
968 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
969 : SIGN_EXTEND),
970 GET_MODE (reg),
971 gen_rtx_CLOBBER (mode, const0_rtx)));
976 /* Called via note_stores. If X is a pseudo that is narrower than
977 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
979 If we are setting only a portion of X and we can't figure out what
980 portion, assume all bits will be used since we don't know what will
981 be happening.
983 Similarly, set how many bits of X are known to be copies of the sign bit
984 at all locations in the function. This is the smallest number implied
985 by any set of X. */
987 static void
988 set_nonzero_bits_and_sign_copies (rtx x, rtx set,
989 void *data ATTRIBUTE_UNUSED)
991 unsigned int num;
993 if (REG_P (x)
994 && REGNO (x) >= FIRST_PSEUDO_REGISTER
995 /* If this register is undefined at the start of the file, we can't
996 say what its contents were. */
997 && ! REGNO_REG_SET_P
998 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
999 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1001 if (set == 0 || GET_CODE (set) == CLOBBER)
1003 reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1004 reg_stat[REGNO (x)].sign_bit_copies = 1;
1005 return;
1008 /* If this is a complex assignment, see if we can convert it into a
1009 simple assignment. */
1010 set = expand_field_assignment (set);
1012 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1013 set what we know about X. */
1015 if (SET_DEST (set) == x
1016 || (GET_CODE (SET_DEST (set)) == SUBREG
1017 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1018 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1019 && SUBREG_REG (SET_DEST (set)) == x))
1021 rtx src = SET_SRC (set);
1023 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1024 /* If X is narrower than a word and SRC is a non-negative
1025 constant that would appear negative in the mode of X,
1026 sign-extend it for use in reg_stat[].nonzero_bits because some
1027 machines (maybe most) will actually do the sign-extension
1028 and this is the conservative approach.
1030 ??? For 2.5, try to tighten up the MD files in this regard
1031 instead of this kludge. */
1033 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1034 && GET_CODE (src) == CONST_INT
1035 && INTVAL (src) > 0
1036 && 0 != (INTVAL (src)
1037 & ((HOST_WIDE_INT) 1
1038 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1039 src = GEN_INT (INTVAL (src)
1040 | ((HOST_WIDE_INT) (-1)
1041 << GET_MODE_BITSIZE (GET_MODE (x))));
1042 #endif
1044 /* Don't call nonzero_bits if it cannot change anything. */
1045 if (reg_stat[REGNO (x)].nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1046 reg_stat[REGNO (x)].nonzero_bits
1047 |= nonzero_bits (src, nonzero_bits_mode);
1048 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1049 if (reg_stat[REGNO (x)].sign_bit_copies == 0
1050 || reg_stat[REGNO (x)].sign_bit_copies > num)
1051 reg_stat[REGNO (x)].sign_bit_copies = num;
1053 else
1055 reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1056 reg_stat[REGNO (x)].sign_bit_copies = 1;
1061 /* See if INSN can be combined into I3. PRED and SUCC are optionally
1062 insns that were previously combined into I3 or that will be combined
1063 into the merger of INSN and I3.
1065 Return 0 if the combination is not allowed for any reason.
1067 If the combination is allowed, *PDEST will be set to the single
1068 destination of INSN and *PSRC to the single source, and this function
1069 will return 1. */
1071 static int
1072 can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1073 rtx *pdest, rtx *psrc)
1075 int i;
1076 rtx set = 0, src, dest;
1077 rtx p;
1078 #ifdef AUTO_INC_DEC
1079 rtx link;
1080 #endif
1081 int all_adjacent = (succ ? (next_active_insn (insn) == succ
1082 && next_active_insn (succ) == i3)
1083 : next_active_insn (insn) == i3);
1085 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1086 or a PARALLEL consisting of such a SET and CLOBBERs.
1088 If INSN has CLOBBER parallel parts, ignore them for our processing.
1089 By definition, these happen during the execution of the insn. When it
1090 is merged with another insn, all bets are off. If they are, in fact,
1091 needed and aren't also supplied in I3, they may be added by
1092 recog_for_combine. Otherwise, it won't match.
1094 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1095 note.
1097 Get the source and destination of INSN. If more than one, can't
1098 combine. */
1100 if (GET_CODE (PATTERN (insn)) == SET)
1101 set = PATTERN (insn);
1102 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1103 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1105 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1107 rtx elt = XVECEXP (PATTERN (insn), 0, i);
1108 rtx note;
1110 switch (GET_CODE (elt))
1112 /* This is important to combine floating point insns
1113 for the SH4 port. */
1114 case USE:
1115 /* Combining an isolated USE doesn't make sense.
1116 We depend here on combinable_i3pat to reject them. */
1117 /* The code below this loop only verifies that the inputs of
1118 the SET in INSN do not change. We call reg_set_between_p
1119 to verify that the REG in the USE does not change between
1120 I3 and INSN.
1121 If the USE in INSN was for a pseudo register, the matching
1122 insn pattern will likely match any register; combining this
1123 with any other USE would only be safe if we knew that the
1124 used registers have identical values, or if there was
1125 something to tell them apart, e.g. different modes. For
1126 now, we forgo such complicated tests and simply disallow
1127 combining of USES of pseudo registers with any other USE. */
1128 if (REG_P (XEXP (elt, 0))
1129 && GET_CODE (PATTERN (i3)) == PARALLEL)
1131 rtx i3pat = PATTERN (i3);
1132 int i = XVECLEN (i3pat, 0) - 1;
1133 unsigned int regno = REGNO (XEXP (elt, 0));
1137 rtx i3elt = XVECEXP (i3pat, 0, i);
1139 if (GET_CODE (i3elt) == USE
1140 && REG_P (XEXP (i3elt, 0))
1141 && (REGNO (XEXP (i3elt, 0)) == regno
1142 ? reg_set_between_p (XEXP (elt, 0),
1143 PREV_INSN (insn), i3)
1144 : regno >= FIRST_PSEUDO_REGISTER))
1145 return 0;
1147 while (--i >= 0);
1149 break;
1151 /* We can ignore CLOBBERs. */
1152 case CLOBBER:
1153 break;
1155 case SET:
1156 /* Ignore SETs whose result isn't used but not those that
1157 have side-effects. */
1158 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1159 && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1160 || INTVAL (XEXP (note, 0)) <= 0)
1161 && ! side_effects_p (elt))
1162 break;
1164 /* If we have already found a SET, this is a second one and
1165 so we cannot combine with this insn. */
1166 if (set)
1167 return 0;
1169 set = elt;
1170 break;
1172 default:
1173 /* Anything else means we can't combine. */
1174 return 0;
1178 if (set == 0
1179 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1180 so don't do anything with it. */
1181 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1182 return 0;
1184 else
1185 return 0;
1187 if (set == 0)
1188 return 0;
1190 set = expand_field_assignment (set);
1191 src = SET_SRC (set), dest = SET_DEST (set);
1193 /* Don't eliminate a store in the stack pointer. */
1194 if (dest == stack_pointer_rtx
1195 /* Don't combine with an insn that sets a register to itself if it has
1196 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1197 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1198 /* Can't merge an ASM_OPERANDS. */
1199 || GET_CODE (src) == ASM_OPERANDS
1200 /* Can't merge a function call. */
1201 || GET_CODE (src) == CALL
1202 /* Don't eliminate a function call argument. */
1203 || (CALL_P (i3)
1204 && (find_reg_fusage (i3, USE, dest)
1205 || (REG_P (dest)
1206 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1207 && global_regs[REGNO (dest)])))
1208 /* Don't substitute into an incremented register. */
1209 || FIND_REG_INC_NOTE (i3, dest)
1210 || (succ && FIND_REG_INC_NOTE (succ, dest))
1211 /* Don't substitute into a non-local goto, this confuses CFG. */
1212 || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1213 #if 0
1214 /* Don't combine the end of a libcall into anything. */
1215 /* ??? This gives worse code, and appears to be unnecessary, since no
1216 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1217 use REG_RETVAL notes for noconflict blocks, but other code here
1218 makes sure that those insns don't disappear. */
1219 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1220 #endif
1221 /* Make sure that DEST is not used after SUCC but before I3. */
1222 || (succ && ! all_adjacent
1223 && reg_used_between_p (dest, succ, i3))
1224 /* Make sure that the value that is to be substituted for the register
1225 does not use any registers whose values alter in between. However,
1226 If the insns are adjacent, a use can't cross a set even though we
1227 think it might (this can happen for a sequence of insns each setting
1228 the same destination; last_set of that register might point to
1229 a NOTE). If INSN has a REG_EQUIV note, the register is always
1230 equivalent to the memory so the substitution is valid even if there
1231 are intervening stores. Also, don't move a volatile asm or
1232 UNSPEC_VOLATILE across any other insns. */
1233 || (! all_adjacent
1234 && (((!MEM_P (src)
1235 || ! find_reg_note (insn, REG_EQUIV, src))
1236 && use_crosses_set_p (src, INSN_CUID (insn)))
1237 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1238 || GET_CODE (src) == UNSPEC_VOLATILE))
1239 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1240 better register allocation by not doing the combine. */
1241 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1242 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1243 /* Don't combine across a CALL_INSN, because that would possibly
1244 change whether the life span of some REGs crosses calls or not,
1245 and it is a pain to update that information.
1246 Exception: if source is a constant, moving it later can't hurt.
1247 Accept that special case, because it helps -fforce-addr a lot. */
1248 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1249 return 0;
1251 /* DEST must either be a REG or CC0. */
1252 if (REG_P (dest))
1254 /* If register alignment is being enforced for multi-word items in all
1255 cases except for parameters, it is possible to have a register copy
1256 insn referencing a hard register that is not allowed to contain the
1257 mode being copied and which would not be valid as an operand of most
1258 insns. Eliminate this problem by not combining with such an insn.
1260 Also, on some machines we don't want to extend the life of a hard
1261 register. */
1263 if (REG_P (src)
1264 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1265 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1266 /* Don't extend the life of a hard register unless it is
1267 user variable (if we have few registers) or it can't
1268 fit into the desired register (meaning something special
1269 is going on).
1270 Also avoid substituting a return register into I3, because
1271 reload can't handle a conflict with constraints of other
1272 inputs. */
1273 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1274 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1275 return 0;
1277 else if (GET_CODE (dest) != CC0)
1278 return 0;
1281 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1282 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1283 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1285 /* Don't substitute for a register intended as a clobberable
1286 operand. */
1287 rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1288 if (rtx_equal_p (reg, dest))
1289 return 0;
1291 /* If the clobber represents an earlyclobber operand, we must not
1292 substitute an expression containing the clobbered register.
1293 As we do not analyze the constraint strings here, we have to
1294 make the conservative assumption. However, if the register is
1295 a fixed hard reg, the clobber cannot represent any operand;
1296 we leave it up to the machine description to either accept or
1297 reject use-and-clobber patterns. */
1298 if (!REG_P (reg)
1299 || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1300 || !fixed_regs[REGNO (reg)])
1301 if (reg_overlap_mentioned_p (reg, src))
1302 return 0;
1305 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1306 or not), reject, unless nothing volatile comes between it and I3 */
1308 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1310 /* Make sure succ doesn't contain a volatile reference. */
1311 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1312 return 0;
1314 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1315 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1316 return 0;
1319 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1320 to be an explicit register variable, and was chosen for a reason. */
1322 if (GET_CODE (src) == ASM_OPERANDS
1323 && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1324 return 0;
1326 /* If there are any volatile insns between INSN and I3, reject, because
1327 they might affect machine state. */
1329 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1330 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1331 return 0;
1333 /* If INSN contains an autoincrement or autodecrement, make sure that
1334 register is not used between there and I3, and not already used in
1335 I3 either. Neither must it be used in PRED or SUCC, if they exist.
1336 Also insist that I3 not be a jump; if it were one
1337 and the incremented register were spilled, we would lose. */
1339 #ifdef AUTO_INC_DEC
1340 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1341 if (REG_NOTE_KIND (link) == REG_INC
1342 && (JUMP_P (i3)
1343 || reg_used_between_p (XEXP (link, 0), insn, i3)
1344 || (pred != NULL_RTX
1345 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1346 || (succ != NULL_RTX
1347 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1348 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1349 return 0;
1350 #endif
1352 #ifdef HAVE_cc0
1353 /* Don't combine an insn that follows a CC0-setting insn.
1354 An insn that uses CC0 must not be separated from the one that sets it.
1355 We do, however, allow I2 to follow a CC0-setting insn if that insn
1356 is passed as I1; in that case it will be deleted also.
1357 We also allow combining in this case if all the insns are adjacent
1358 because that would leave the two CC0 insns adjacent as well.
1359 It would be more logical to test whether CC0 occurs inside I1 or I2,
1360 but that would be much slower, and this ought to be equivalent. */
1362 p = prev_nonnote_insn (insn);
1363 if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1364 && ! all_adjacent)
1365 return 0;
1366 #endif
1368 /* If we get here, we have passed all the tests and the combination is
1369 to be allowed. */
1371 *pdest = dest;
1372 *psrc = src;
1374 return 1;
1377 /* LOC is the location within I3 that contains its pattern or the component
1378 of a PARALLEL of the pattern. We validate that it is valid for combining.
1380 One problem is if I3 modifies its output, as opposed to replacing it
1381 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1382 so would produce an insn that is not equivalent to the original insns.
1384 Consider:
1386 (set (reg:DI 101) (reg:DI 100))
1387 (set (subreg:SI (reg:DI 101) 0) <foo>)
1389 This is NOT equivalent to:
1391 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1392 (set (reg:DI 101) (reg:DI 100))])
1394 Not only does this modify 100 (in which case it might still be valid
1395 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1397 We can also run into a problem if I2 sets a register that I1
1398 uses and I1 gets directly substituted into I3 (not via I2). In that
1399 case, we would be getting the wrong value of I2DEST into I3, so we
1400 must reject the combination. This case occurs when I2 and I1 both
1401 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1402 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1403 of a SET must prevent combination from occurring.
1405 Before doing the above check, we first try to expand a field assignment
1406 into a set of logical operations.
1408 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1409 we place a register that is both set and used within I3. If more than one
1410 such register is detected, we fail.
1412 Return 1 if the combination is valid, zero otherwise. */
1414 static int
1415 combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1416 int i1_not_in_src, rtx *pi3dest_killed)
1418 rtx x = *loc;
1420 if (GET_CODE (x) == SET)
1422 rtx set = x ;
1423 rtx dest = SET_DEST (set);
1424 rtx src = SET_SRC (set);
1425 rtx inner_dest = dest;
1426 rtx subdest;
1428 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1429 || GET_CODE (inner_dest) == SUBREG
1430 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1431 inner_dest = XEXP (inner_dest, 0);
1433 /* Check for the case where I3 modifies its output, as discussed
1434 above. We don't want to prevent pseudos from being combined
1435 into the address of a MEM, so only prevent the combination if
1436 i1 or i2 set the same MEM. */
1437 if ((inner_dest != dest &&
1438 (!MEM_P (inner_dest)
1439 || rtx_equal_p (i2dest, inner_dest)
1440 || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1441 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1442 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1444 /* This is the same test done in can_combine_p except we can't test
1445 all_adjacent; we don't have to, since this instruction will stay
1446 in place, thus we are not considering increasing the lifetime of
1447 INNER_DEST.
1449 Also, if this insn sets a function argument, combining it with
1450 something that might need a spill could clobber a previous
1451 function argument; the all_adjacent test in can_combine_p also
1452 checks this; here, we do a more specific test for this case. */
1454 || (REG_P (inner_dest)
1455 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1456 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1457 GET_MODE (inner_dest))))
1458 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1459 return 0;
1461 /* If DEST is used in I3, it is being killed in this insn, so
1462 record that for later. We have to consider paradoxical
1463 subregs here, since they kill the whole register, but we
1464 ignore partial subregs, STRICT_LOW_PART, etc.
1465 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1466 STACK_POINTER_REGNUM, since these are always considered to be
1467 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1468 subdest = dest;
1469 if (GET_CODE (subdest) == SUBREG
1470 && (GET_MODE_SIZE (GET_MODE (subdest))
1471 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1472 subdest = SUBREG_REG (subdest);
1473 if (pi3dest_killed
1474 && REG_P (subdest)
1475 && reg_referenced_p (subdest, PATTERN (i3))
1476 && REGNO (subdest) != FRAME_POINTER_REGNUM
1477 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1478 && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1479 #endif
1480 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1481 && (REGNO (subdest) != ARG_POINTER_REGNUM
1482 || ! fixed_regs [REGNO (subdest)])
1483 #endif
1484 && REGNO (subdest) != STACK_POINTER_REGNUM)
1486 if (*pi3dest_killed)
1487 return 0;
1489 *pi3dest_killed = subdest;
1493 else if (GET_CODE (x) == PARALLEL)
1495 int i;
1497 for (i = 0; i < XVECLEN (x, 0); i++)
1498 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1499 i1_not_in_src, pi3dest_killed))
1500 return 0;
1503 return 1;
1506 /* Return 1 if X is an arithmetic expression that contains a multiplication
1507 and division. We don't count multiplications by powers of two here. */
1509 static int
1510 contains_muldiv (rtx x)
1512 switch (GET_CODE (x))
1514 case MOD: case DIV: case UMOD: case UDIV:
1515 return 1;
1517 case MULT:
1518 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1519 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1520 default:
1521 if (BINARY_P (x))
1522 return contains_muldiv (XEXP (x, 0))
1523 || contains_muldiv (XEXP (x, 1));
1525 if (UNARY_P (x))
1526 return contains_muldiv (XEXP (x, 0));
1528 return 0;
1532 /* Determine whether INSN can be used in a combination. Return nonzero if
1533 not. This is used in try_combine to detect early some cases where we
1534 can't perform combinations. */
1536 static int
1537 cant_combine_insn_p (rtx insn)
1539 rtx set;
1540 rtx src, dest;
1542 /* If this isn't really an insn, we can't do anything.
1543 This can occur when flow deletes an insn that it has merged into an
1544 auto-increment address. */
1545 if (! INSN_P (insn))
1546 return 1;
1548 /* Never combine loads and stores involving hard regs that are likely
1549 to be spilled. The register allocator can usually handle such
1550 reg-reg moves by tying. If we allow the combiner to make
1551 substitutions of likely-spilled regs, reload might die.
1552 As an exception, we allow combinations involving fixed regs; these are
1553 not available to the register allocator so there's no risk involved. */
1555 set = single_set (insn);
1556 if (! set)
1557 return 0;
1558 src = SET_SRC (set);
1559 dest = SET_DEST (set);
1560 if (GET_CODE (src) == SUBREG)
1561 src = SUBREG_REG (src);
1562 if (GET_CODE (dest) == SUBREG)
1563 dest = SUBREG_REG (dest);
1564 if (REG_P (src) && REG_P (dest)
1565 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1566 && ! fixed_regs[REGNO (src)]
1567 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1568 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1569 && ! fixed_regs[REGNO (dest)]
1570 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1571 return 1;
1573 return 0;
1576 struct likely_spilled_retval_info
1578 unsigned regno, nregs;
1579 unsigned mask;
1582 /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
1583 hard registers that are known to be written to / clobbered in full. */
1584 static void
1585 likely_spilled_retval_1 (rtx x, rtx set, void *data)
1587 struct likely_spilled_retval_info *info = data;
1588 unsigned regno, nregs;
1589 unsigned new_mask;
1591 if (!REG_P (XEXP (set, 0)))
1592 return;
1593 regno = REGNO (x);
1594 if (regno >= info->regno + info->nregs)
1595 return;
1596 nregs = hard_regno_nregs[regno][GET_MODE (x)];
1597 if (regno + nregs <= info->regno)
1598 return;
1599 new_mask = (2U << (nregs - 1)) - 1;
1600 if (regno < info->regno)
1601 new_mask >>= info->regno - regno;
1602 else
1603 new_mask <<= regno - info->regno;
1604 info->mask &= new_mask;
1607 /* Return nonzero iff part of the return value is live during INSN, and
1608 it is likely spilled. This can happen when more than one insn is needed
1609 to copy the return value, e.g. when we consider to combine into the
1610 second copy insn for a complex value. */
1612 static int
1613 likely_spilled_retval_p (rtx insn)
1615 rtx use = BB_END (this_basic_block);
1616 rtx reg, p;
1617 unsigned regno, nregs;
1618 /* We assume here that no machine mode needs more than
1619 32 hard registers when the value overlaps with a register
1620 for which FUNCTION_VALUE_REGNO_P is true. */
1621 unsigned mask;
1622 struct likely_spilled_retval_info info;
1624 if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
1625 return 0;
1626 reg = XEXP (PATTERN (use), 0);
1627 if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
1628 return 0;
1629 regno = REGNO (reg);
1630 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1631 if (nregs == 1)
1632 return 0;
1633 mask = (2U << (nregs - 1)) - 1;
1635 /* Disregard parts of the return value that are set later. */
1636 info.regno = regno;
1637 info.nregs = nregs;
1638 info.mask = mask;
1639 for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
1640 note_stores (PATTERN (insn), likely_spilled_retval_1, &info);
1641 mask = info.mask;
1643 /* Check if any of the (probably) live return value registers is
1644 likely spilled. */
1645 nregs --;
1648 if ((mask & 1 << nregs)
1649 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
1650 return 1;
1651 } while (nregs--);
1652 return 0;
1655 /* Adjust INSN after we made a change to its destination.
1657 Changing the destination can invalidate notes that say something about
1658 the results of the insn and a LOG_LINK pointing to the insn. */
1660 static void
1661 adjust_for_new_dest (rtx insn)
1663 rtx *loc;
1665 /* For notes, be conservative and simply remove them. */
1666 loc = &REG_NOTES (insn);
1667 while (*loc)
1669 enum reg_note kind = REG_NOTE_KIND (*loc);
1670 if (kind == REG_EQUAL || kind == REG_EQUIV)
1671 *loc = XEXP (*loc, 1);
1672 else
1673 loc = &XEXP (*loc, 1);
1676 /* The new insn will have a destination that was previously the destination
1677 of an insn just above it. Call distribute_links to make a LOG_LINK from
1678 the next use of that destination. */
1679 distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1682 /* Return TRUE if combine can reuse reg X in mode MODE.
1683 ADDED_SETS is nonzero if the original set is still required. */
1684 static bool
1685 can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
1687 unsigned int regno;
1689 if (!REG_P(x))
1690 return false;
1692 regno = REGNO (x);
1693 /* Allow hard registers if the new mode is legal, and occupies no more
1694 registers than the old mode. */
1695 if (regno < FIRST_PSEUDO_REGISTER)
1696 return (HARD_REGNO_MODE_OK (regno, mode)
1697 && (hard_regno_nregs[regno][GET_MODE (x)]
1698 >= hard_regno_nregs[regno][mode]));
1700 /* Or a pseudo that is only used once. */
1701 return (REG_N_SETS (regno) == 1 && !added_sets
1702 && !REG_USERVAR_P (x));
1705 /* Try to combine the insns I1 and I2 into I3.
1706 Here I1 and I2 appear earlier than I3.
1707 I1 can be zero; then we combine just I2 into I3.
1709 If we are combining three insns and the resulting insn is not recognized,
1710 try splitting it into two insns. If that happens, I2 and I3 are retained
1711 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1712 are pseudo-deleted.
1714 Return 0 if the combination does not work. Then nothing is changed.
1715 If we did the combination, return the insn at which combine should
1716 resume scanning.
1718 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1719 new direct jump instruction. */
1721 static rtx
1722 try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1724 /* New patterns for I3 and I2, respectively. */
1725 rtx newpat, newi2pat = 0;
1726 rtvec newpat_vec_with_clobbers = 0;
1727 int substed_i2 = 0, substed_i1 = 0;
1728 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1729 int added_sets_1, added_sets_2;
1730 /* Total number of SETs to put into I3. */
1731 int total_sets;
1732 /* Nonzero if I2's body now appears in I3. */
1733 int i2_is_used;
1734 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1735 int insn_code_number, i2_code_number = 0, other_code_number = 0;
1736 /* Contains I3 if the destination of I3 is used in its source, which means
1737 that the old life of I3 is being killed. If that usage is placed into
1738 I2 and not in I3, a REG_DEAD note must be made. */
1739 rtx i3dest_killed = 0;
1740 /* SET_DEST and SET_SRC of I2 and I1. */
1741 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1742 /* PATTERN (I2), or a copy of it in certain cases. */
1743 rtx i2pat;
1744 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1745 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1746 int i2dest_killed = 0, i1dest_killed = 0;
1747 int i1_feeds_i3 = 0;
1748 /* Notes that must be added to REG_NOTES in I3 and I2. */
1749 rtx new_i3_notes, new_i2_notes;
1750 /* Notes that we substituted I3 into I2 instead of the normal case. */
1751 int i3_subst_into_i2 = 0;
1752 /* Notes that I1, I2 or I3 is a MULT operation. */
1753 int have_mult = 0;
1754 int swap_i2i3 = 0;
1756 int maxreg;
1757 rtx temp;
1758 rtx link;
1759 int i;
1761 /* Exit early if one of the insns involved can't be used for
1762 combinations. */
1763 if (cant_combine_insn_p (i3)
1764 || cant_combine_insn_p (i2)
1765 || (i1 && cant_combine_insn_p (i1))
1766 || likely_spilled_retval_p (i3)
1767 /* We also can't do anything if I3 has a
1768 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1769 libcall. */
1770 #if 0
1771 /* ??? This gives worse code, and appears to be unnecessary, since no
1772 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1773 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1774 #endif
1776 return 0;
1778 combine_attempts++;
1779 undobuf.other_insn = 0;
1781 /* Reset the hard register usage information. */
1782 CLEAR_HARD_REG_SET (newpat_used_regs);
1784 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1785 code below, set I1 to be the earlier of the two insns. */
1786 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1787 temp = i1, i1 = i2, i2 = temp;
1789 added_links_insn = 0;
1791 /* First check for one important special-case that the code below will
1792 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
1793 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1794 we may be able to replace that destination with the destination of I3.
1795 This occurs in the common code where we compute both a quotient and
1796 remainder into a structure, in which case we want to do the computation
1797 directly into the structure to avoid register-register copies.
1799 Note that this case handles both multiple sets in I2 and also
1800 cases where I2 has a number of CLOBBER or PARALLELs.
1802 We make very conservative checks below and only try to handle the
1803 most common cases of this. For example, we only handle the case
1804 where I2 and I3 are adjacent to avoid making difficult register
1805 usage tests. */
1807 if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
1808 && REG_P (SET_SRC (PATTERN (i3)))
1809 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1810 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1811 && GET_CODE (PATTERN (i2)) == PARALLEL
1812 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1813 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1814 below would need to check what is inside (and reg_overlap_mentioned_p
1815 doesn't support those codes anyway). Don't allow those destinations;
1816 the resulting insn isn't likely to be recognized anyway. */
1817 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1818 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1819 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1820 SET_DEST (PATTERN (i3)))
1821 && next_real_insn (i2) == i3)
1823 rtx p2 = PATTERN (i2);
1825 /* Make sure that the destination of I3,
1826 which we are going to substitute into one output of I2,
1827 is not used within another output of I2. We must avoid making this:
1828 (parallel [(set (mem (reg 69)) ...)
1829 (set (reg 69) ...)])
1830 which is not well-defined as to order of actions.
1831 (Besides, reload can't handle output reloads for this.)
1833 The problem can also happen if the dest of I3 is a memory ref,
1834 if another dest in I2 is an indirect memory ref. */
1835 for (i = 0; i < XVECLEN (p2, 0); i++)
1836 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1837 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1838 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1839 SET_DEST (XVECEXP (p2, 0, i))))
1840 break;
1842 if (i == XVECLEN (p2, 0))
1843 for (i = 0; i < XVECLEN (p2, 0); i++)
1844 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1845 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1846 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1848 combine_merges++;
1850 subst_insn = i3;
1851 subst_low_cuid = INSN_CUID (i2);
1853 added_sets_2 = added_sets_1 = 0;
1854 i2dest = SET_SRC (PATTERN (i3));
1855 i2dest_killed = dead_or_set_p (i2, i2dest);
1857 /* Replace the dest in I2 with our dest and make the resulting
1858 insn the new pattern for I3. Then skip to where we
1859 validate the pattern. Everything was set up above. */
1860 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1861 SET_DEST (PATTERN (i3)));
1863 newpat = p2;
1864 i3_subst_into_i2 = 1;
1865 goto validate_replacement;
1869 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1870 one of those words to another constant, merge them by making a new
1871 constant. */
1872 if (i1 == 0
1873 && (temp = single_set (i2)) != 0
1874 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1875 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1876 && REG_P (SET_DEST (temp))
1877 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1878 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1879 && GET_CODE (PATTERN (i3)) == SET
1880 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1881 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1882 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1883 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1884 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1886 HOST_WIDE_INT lo, hi;
1888 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1889 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1890 else
1892 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1893 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1896 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1898 /* We don't handle the case of the target word being wider
1899 than a host wide int. */
1900 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD);
1902 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1903 lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1904 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1906 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1907 hi = INTVAL (SET_SRC (PATTERN (i3)));
1908 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1910 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1911 >> (HOST_BITS_PER_WIDE_INT - 1));
1913 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1914 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1915 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1916 (INTVAL (SET_SRC (PATTERN (i3)))));
1917 if (hi == sign)
1918 hi = lo < 0 ? -1 : 0;
1920 else
1921 /* We don't handle the case of the higher word not fitting
1922 entirely in either hi or lo. */
1923 gcc_unreachable ();
1925 combine_merges++;
1926 subst_insn = i3;
1927 subst_low_cuid = INSN_CUID (i2);
1928 added_sets_2 = added_sets_1 = 0;
1929 i2dest = SET_DEST (temp);
1930 i2dest_killed = dead_or_set_p (i2, i2dest);
1932 SUBST (SET_SRC (temp),
1933 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1935 newpat = PATTERN (i2);
1936 goto validate_replacement;
1939 #ifndef HAVE_cc0
1940 /* If we have no I1 and I2 looks like:
1941 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1942 (set Y OP)])
1943 make up a dummy I1 that is
1944 (set Y OP)
1945 and change I2 to be
1946 (set (reg:CC X) (compare:CC Y (const_int 0)))
1948 (We can ignore any trailing CLOBBERs.)
1950 This undoes a previous combination and allows us to match a branch-and-
1951 decrement insn. */
1953 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1954 && XVECLEN (PATTERN (i2), 0) >= 2
1955 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1956 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1957 == MODE_CC)
1958 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1959 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1960 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1961 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
1962 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1963 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1965 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1966 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1967 break;
1969 if (i == 1)
1971 /* We make I1 with the same INSN_UID as I2. This gives it
1972 the same INSN_CUID for value tracking. Our fake I1 will
1973 never appear in the insn stream so giving it the same INSN_UID
1974 as I2 will not cause a problem. */
1976 i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1977 BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
1978 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1979 NULL_RTX);
1981 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1982 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1983 SET_DEST (PATTERN (i1)));
1986 #endif
1988 /* Verify that I2 and I1 are valid for combining. */
1989 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1990 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1992 undo_all ();
1993 return 0;
1996 /* Record whether I2DEST is used in I2SRC and similarly for the other
1997 cases. Knowing this will help in register status updating below. */
1998 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1999 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2000 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2001 i2dest_killed = dead_or_set_p (i2, i2dest);
2002 i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2004 /* See if I1 directly feeds into I3. It does if I1DEST is not used
2005 in I2SRC. */
2006 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2008 /* Ensure that I3's pattern can be the destination of combines. */
2009 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2010 i1 && i2dest_in_i1src && i1_feeds_i3,
2011 &i3dest_killed))
2013 undo_all ();
2014 return 0;
2017 /* See if any of the insns is a MULT operation. Unless one is, we will
2018 reject a combination that is, since it must be slower. Be conservative
2019 here. */
2020 if (GET_CODE (i2src) == MULT
2021 || (i1 != 0 && GET_CODE (i1src) == MULT)
2022 || (GET_CODE (PATTERN (i3)) == SET
2023 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2024 have_mult = 1;
2026 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2027 We used to do this EXCEPT in one case: I3 has a post-inc in an
2028 output operand. However, that exception can give rise to insns like
2029 mov r3,(r3)+
2030 which is a famous insn on the PDP-11 where the value of r3 used as the
2031 source was model-dependent. Avoid this sort of thing. */
2033 #if 0
2034 if (!(GET_CODE (PATTERN (i3)) == SET
2035 && REG_P (SET_SRC (PATTERN (i3)))
2036 && MEM_P (SET_DEST (PATTERN (i3)))
2037 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2038 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2039 /* It's not the exception. */
2040 #endif
2041 #ifdef AUTO_INC_DEC
2042 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2043 if (REG_NOTE_KIND (link) == REG_INC
2044 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2045 || (i1 != 0
2046 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2048 undo_all ();
2049 return 0;
2051 #endif
2053 /* See if the SETs in I1 or I2 need to be kept around in the merged
2054 instruction: whenever the value set there is still needed past I3.
2055 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2057 For the SET in I1, we have two cases: If I1 and I2 independently
2058 feed into I3, the set in I1 needs to be kept around if I1DEST dies
2059 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
2060 in I1 needs to be kept around unless I1DEST dies or is set in either
2061 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
2062 I1DEST. If so, we know I1 feeds into I2. */
2064 added_sets_2 = ! dead_or_set_p (i3, i2dest);
2066 added_sets_1
2067 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2068 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2070 /* If the set in I2 needs to be kept around, we must make a copy of
2071 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2072 PATTERN (I2), we are only substituting for the original I1DEST, not into
2073 an already-substituted copy. This also prevents making self-referential
2074 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2075 I2DEST. */
2077 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
2078 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
2079 : PATTERN (i2));
2081 if (added_sets_2)
2082 i2pat = copy_rtx (i2pat);
2084 combine_merges++;
2086 /* Substitute in the latest insn for the regs set by the earlier ones. */
2088 maxreg = max_reg_num ();
2090 subst_insn = i3;
2092 #ifndef HAVE_cc0
2093 /* Many machines that don't use CC0 have insns that can both perform an
2094 arithmetic operation and set the condition code. These operations will
2095 be represented as a PARALLEL with the first element of the vector
2096 being a COMPARE of an arithmetic operation with the constant zero.
2097 The second element of the vector will set some pseudo to the result
2098 of the same arithmetic operation. If we simplify the COMPARE, we won't
2099 match such a pattern and so will generate an extra insn. Here we test
2100 for this case, where both the comparison and the operation result are
2101 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2102 I2SRC. Later we will make the PARALLEL that contains I2. */
2104 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2105 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2106 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2107 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2109 #ifdef SELECT_CC_MODE
2110 rtx *cc_use;
2111 enum machine_mode compare_mode;
2112 #endif
2114 newpat = PATTERN (i3);
2115 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2117 i2_is_used = 1;
2119 #ifdef SELECT_CC_MODE
2120 /* See if a COMPARE with the operand we substituted in should be done
2121 with the mode that is currently being used. If not, do the same
2122 processing we do in `subst' for a SET; namely, if the destination
2123 is used only once, try to replace it with a register of the proper
2124 mode and also replace the COMPARE. */
2125 if (undobuf.other_insn == 0
2126 && (cc_use = find_single_use (SET_DEST (newpat), i3,
2127 &undobuf.other_insn))
2128 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2129 i2src, const0_rtx))
2130 != GET_MODE (SET_DEST (newpat))))
2132 if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2133 compare_mode))
2135 unsigned int regno = REGNO (SET_DEST (newpat));
2136 rtx new_dest = gen_rtx_REG (compare_mode, regno);
2138 if (regno >= FIRST_PSEUDO_REGISTER)
2139 SUBST (regno_reg_rtx[regno], new_dest);
2141 SUBST (SET_DEST (newpat), new_dest);
2142 SUBST (XEXP (*cc_use, 0), new_dest);
2143 SUBST (SET_SRC (newpat),
2144 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2146 else
2147 undobuf.other_insn = 0;
2149 #endif
2151 else
2152 #endif
2154 /* It is possible that the source of I2 or I1 may be performing
2155 an unneeded operation, such as a ZERO_EXTEND of something
2156 that is known to have the high part zero. Handle that case
2157 by letting subst look at the innermost one of them.
2159 Another way to do this would be to have a function that tries
2160 to simplify a single insn instead of merging two or more
2161 insns. We don't do this because of the potential of infinite
2162 loops and because of the potential extra memory required.
2163 However, doing it the way we are is a bit of a kludge and
2164 doesn't catch all cases.
2166 But only do this if -fexpensive-optimizations since it slows
2167 things down and doesn't usually win.
2169 This is not done in the COMPARE case above because the
2170 unmodified I2PAT is used in the PARALLEL and so a pattern
2171 with a modified I2SRC would not match. */
2173 if (flag_expensive_optimizations)
2175 /* Pass pc_rtx so no substitutions are done, just
2176 simplifications. */
2177 if (i1)
2179 subst_low_cuid = INSN_CUID (i1);
2180 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2182 else
2184 subst_low_cuid = INSN_CUID (i2);
2185 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2189 n_occurrences = 0; /* `subst' counts here */
2191 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2192 need to make a unique copy of I2SRC each time we substitute it
2193 to avoid self-referential rtl. */
2195 subst_low_cuid = INSN_CUID (i2);
2196 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2197 ! i1_feeds_i3 && i1dest_in_i1src);
2198 substed_i2 = 1;
2200 /* Record whether i2's body now appears within i3's body. */
2201 i2_is_used = n_occurrences;
2204 /* If we already got a failure, don't try to do more. Otherwise,
2205 try to substitute in I1 if we have it. */
2207 if (i1 && GET_CODE (newpat) != CLOBBER)
2209 /* Before we can do this substitution, we must redo the test done
2210 above (see detailed comments there) that ensures that I1DEST
2211 isn't mentioned in any SETs in NEWPAT that are field assignments. */
2213 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2214 0, (rtx*) 0))
2216 undo_all ();
2217 return 0;
2220 n_occurrences = 0;
2221 subst_low_cuid = INSN_CUID (i1);
2222 newpat = subst (newpat, i1dest, i1src, 0, 0);
2223 substed_i1 = 1;
2226 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2227 to count all the ways that I2SRC and I1SRC can be used. */
2228 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2229 && i2_is_used + added_sets_2 > 1)
2230 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2231 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2232 > 1))
2233 /* Fail if we tried to make a new register. */
2234 || max_reg_num () != maxreg
2235 /* Fail if we couldn't do something and have a CLOBBER. */
2236 || GET_CODE (newpat) == CLOBBER
2237 /* Fail if this new pattern is a MULT and we didn't have one before
2238 at the outer level. */
2239 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2240 && ! have_mult))
2242 undo_all ();
2243 return 0;
2246 /* If the actions of the earlier insns must be kept
2247 in addition to substituting them into the latest one,
2248 we must make a new PARALLEL for the latest insn
2249 to hold additional the SETs. */
2251 if (added_sets_1 || added_sets_2)
2253 combine_extras++;
2255 if (GET_CODE (newpat) == PARALLEL)
2257 rtvec old = XVEC (newpat, 0);
2258 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2259 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2260 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2261 sizeof (old->elem[0]) * old->num_elem);
2263 else
2265 rtx old = newpat;
2266 total_sets = 1 + added_sets_1 + added_sets_2;
2267 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2268 XVECEXP (newpat, 0, 0) = old;
2271 if (added_sets_1)
2272 XVECEXP (newpat, 0, --total_sets)
2273 = (GET_CODE (PATTERN (i1)) == PARALLEL
2274 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2276 if (added_sets_2)
2278 /* If there is no I1, use I2's body as is. We used to also not do
2279 the subst call below if I2 was substituted into I3,
2280 but that could lose a simplification. */
2281 if (i1 == 0)
2282 XVECEXP (newpat, 0, --total_sets) = i2pat;
2283 else
2284 /* See comment where i2pat is assigned. */
2285 XVECEXP (newpat, 0, --total_sets)
2286 = subst (i2pat, i1dest, i1src, 0, 0);
2290 /* We come here when we are replacing a destination in I2 with the
2291 destination of I3. */
2292 validate_replacement:
2294 /* Note which hard regs this insn has as inputs. */
2295 mark_used_regs_combine (newpat);
2297 /* If recog_for_combine fails, it strips existing clobbers. If we'll
2298 consider splitting this pattern, we might need these clobbers. */
2299 if (i1 && GET_CODE (newpat) == PARALLEL
2300 && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
2302 int len = XVECLEN (newpat, 0);
2304 newpat_vec_with_clobbers = rtvec_alloc (len);
2305 for (i = 0; i < len; i++)
2306 RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
2309 /* Is the result of combination a valid instruction? */
2310 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2312 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2313 the second SET's destination is a register that is unused and isn't
2314 marked as an instruction that might trap in an EH region. In that case,
2315 we just need the first SET. This can occur when simplifying a divmod
2316 insn. We *must* test for this case here because the code below that
2317 splits two independent SETs doesn't handle this case correctly when it
2318 updates the register status.
2320 It's pointless doing this if we originally had two sets, one from
2321 i3, and one from i2. Combining then splitting the parallel results
2322 in the original i2 again plus an invalid insn (which we delete).
2323 The net effect is only to move instructions around, which makes
2324 debug info less accurate.
2326 Also check the case where the first SET's destination is unused.
2327 That would not cause incorrect code, but does cause an unneeded
2328 insn to remain. */
2330 if (insn_code_number < 0
2331 && !(added_sets_2 && i1 == 0)
2332 && GET_CODE (newpat) == PARALLEL
2333 && XVECLEN (newpat, 0) == 2
2334 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2335 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2336 && asm_noperands (newpat) < 0)
2338 rtx set0 = XVECEXP (newpat, 0, 0);
2339 rtx set1 = XVECEXP (newpat, 0, 1);
2340 rtx note;
2342 if (((REG_P (SET_DEST (set1))
2343 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2344 || (GET_CODE (SET_DEST (set1)) == SUBREG
2345 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2346 && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2347 || INTVAL (XEXP (note, 0)) <= 0)
2348 && ! side_effects_p (SET_SRC (set1)))
2350 newpat = set0;
2351 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2354 else if (((REG_P (SET_DEST (set0))
2355 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2356 || (GET_CODE (SET_DEST (set0)) == SUBREG
2357 && find_reg_note (i3, REG_UNUSED,
2358 SUBREG_REG (SET_DEST (set0)))))
2359 && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2360 || INTVAL (XEXP (note, 0)) <= 0)
2361 && ! side_effects_p (SET_SRC (set0)))
2363 newpat = set1;
2364 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2366 if (insn_code_number >= 0)
2368 /* If we will be able to accept this, we have made a
2369 change to the destination of I3. This requires us to
2370 do a few adjustments. */
2372 PATTERN (i3) = newpat;
2373 adjust_for_new_dest (i3);
2378 /* If we were combining three insns and the result is a simple SET
2379 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2380 insns. There are two ways to do this. It can be split using a
2381 machine-specific method (like when you have an addition of a large
2382 constant) or by combine in the function find_split_point. */
2384 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2385 && asm_noperands (newpat) < 0)
2387 rtx m_split, *split;
2388 rtx ni2dest = i2dest;
2390 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2391 use I2DEST as a scratch register will help. In the latter case,
2392 convert I2DEST to the mode of the source of NEWPAT if we can. */
2394 m_split = split_insns (newpat, i3);
2396 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2397 inputs of NEWPAT. */
2399 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2400 possible to try that as a scratch reg. This would require adding
2401 more code to make it work though. */
2403 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2405 enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
2406 /* If I2DEST is a hard register or the only use of a pseudo,
2407 we can change its mode. */
2408 if (new_mode != GET_MODE (i2dest)
2409 && new_mode != VOIDmode
2410 && can_change_dest_mode (i2dest, added_sets_2, new_mode))
2411 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2412 REGNO (i2dest));
2414 m_split = split_insns (gen_rtx_PARALLEL
2415 (VOIDmode,
2416 gen_rtvec (2, newpat,
2417 gen_rtx_CLOBBER (VOIDmode,
2418 ni2dest))),
2419 i3);
2420 /* If the split with the mode-changed register didn't work, try
2421 the original register. */
2422 if (! m_split && ni2dest != i2dest)
2424 ni2dest = i2dest;
2425 m_split = split_insns (gen_rtx_PARALLEL
2426 (VOIDmode,
2427 gen_rtvec (2, newpat,
2428 gen_rtx_CLOBBER (VOIDmode,
2429 i2dest))),
2430 i3);
2434 /* If recog_for_combine has discarded clobbers, try to use them
2435 again for the split. */
2436 if (m_split == 0 && newpat_vec_with_clobbers)
2437 m_split
2438 = split_insns (gen_rtx_PARALLEL (VOIDmode,
2439 newpat_vec_with_clobbers), i3);
2441 if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2443 m_split = PATTERN (m_split);
2444 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2445 if (insn_code_number >= 0)
2446 newpat = m_split;
2448 else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2449 && (next_real_insn (i2) == i3
2450 || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2452 rtx i2set, i3set;
2453 rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2454 newi2pat = PATTERN (m_split);
2456 i3set = single_set (NEXT_INSN (m_split));
2457 i2set = single_set (m_split);
2459 /* In case we changed the mode of I2DEST, replace it in the
2460 pseudo-register table here. We can't do it above in case this
2461 code doesn't get executed and we do a split the other way. */
2463 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2464 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2466 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2468 /* If I2 or I3 has multiple SETs, we won't know how to track
2469 register status, so don't use these insns. If I2's destination
2470 is used between I2 and I3, we also can't use these insns. */
2472 if (i2_code_number >= 0 && i2set && i3set
2473 && (next_real_insn (i2) == i3
2474 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2475 insn_code_number = recog_for_combine (&newi3pat, i3,
2476 &new_i3_notes);
2477 if (insn_code_number >= 0)
2478 newpat = newi3pat;
2480 /* It is possible that both insns now set the destination of I3.
2481 If so, we must show an extra use of it. */
2483 if (insn_code_number >= 0)
2485 rtx new_i3_dest = SET_DEST (i3set);
2486 rtx new_i2_dest = SET_DEST (i2set);
2488 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2489 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2490 || GET_CODE (new_i3_dest) == SUBREG)
2491 new_i3_dest = XEXP (new_i3_dest, 0);
2493 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2494 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2495 || GET_CODE (new_i2_dest) == SUBREG)
2496 new_i2_dest = XEXP (new_i2_dest, 0);
2498 if (REG_P (new_i3_dest)
2499 && REG_P (new_i2_dest)
2500 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2501 REG_N_SETS (REGNO (new_i2_dest))++;
2505 /* If we can split it and use I2DEST, go ahead and see if that
2506 helps things be recognized. Verify that none of the registers
2507 are set between I2 and I3. */
2508 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2509 #ifdef HAVE_cc0
2510 && REG_P (i2dest)
2511 #endif
2512 /* We need I2DEST in the proper mode. If it is a hard register
2513 or the only use of a pseudo, we can change its mode.
2514 Make sure we don't change a hard register to have a mode that
2515 isn't valid for it, or change the number of registers. */
2516 && (GET_MODE (*split) == GET_MODE (i2dest)
2517 || GET_MODE (*split) == VOIDmode
2518 || can_change_dest_mode (i2dest, added_sets_2,
2519 GET_MODE (*split)))
2520 && (next_real_insn (i2) == i3
2521 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2522 /* We can't overwrite I2DEST if its value is still used by
2523 NEWPAT. */
2524 && ! reg_referenced_p (i2dest, newpat))
2526 rtx newdest = i2dest;
2527 enum rtx_code split_code = GET_CODE (*split);
2528 enum machine_mode split_mode = GET_MODE (*split);
2530 /* Get NEWDEST as a register in the proper mode. We have already
2531 validated that we can do this. */
2532 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2534 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2536 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2537 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2540 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2541 an ASHIFT. This can occur if it was inside a PLUS and hence
2542 appeared to be a memory address. This is a kludge. */
2543 if (split_code == MULT
2544 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2545 && INTVAL (XEXP (*split, 1)) > 0
2546 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2548 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2549 XEXP (*split, 0), GEN_INT (i)));
2550 /* Update split_code because we may not have a multiply
2551 anymore. */
2552 split_code = GET_CODE (*split);
2555 #ifdef INSN_SCHEDULING
2556 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2557 be written as a ZERO_EXTEND. */
2558 if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
2560 #ifdef LOAD_EXTEND_OP
2561 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2562 what it really is. */
2563 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2564 == SIGN_EXTEND)
2565 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2566 SUBREG_REG (*split)));
2567 else
2568 #endif
2569 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2570 SUBREG_REG (*split)));
2572 #endif
2574 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2575 SUBST (*split, newdest);
2576 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2578 /* recog_for_combine might have added CLOBBERs to newi2pat.
2579 Make sure NEWPAT does not depend on the clobbered regs. */
2580 if (GET_CODE (newi2pat) == PARALLEL)
2581 for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
2582 if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
2584 rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
2585 if (reg_overlap_mentioned_p (reg, newpat))
2587 undo_all ();
2588 return 0;
2592 /* If the split point was a MULT and we didn't have one before,
2593 don't use one now. */
2594 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2595 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2599 /* Check for a case where we loaded from memory in a narrow mode and
2600 then sign extended it, but we need both registers. In that case,
2601 we have a PARALLEL with both loads from the same memory location.
2602 We can split this into a load from memory followed by a register-register
2603 copy. This saves at least one insn, more if register allocation can
2604 eliminate the copy.
2606 We cannot do this if the destination of the first assignment is a
2607 condition code register or cc0. We eliminate this case by making sure
2608 the SET_DEST and SET_SRC have the same mode.
2610 We cannot do this if the destination of the second assignment is
2611 a register that we have already assumed is zero-extended. Similarly
2612 for a SUBREG of such a register. */
2614 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2615 && GET_CODE (newpat) == PARALLEL
2616 && XVECLEN (newpat, 0) == 2
2617 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2618 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2619 && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2620 == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2621 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2622 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2623 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2624 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2625 INSN_CUID (i2))
2626 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2627 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2628 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2629 (REG_P (temp)
2630 && reg_stat[REGNO (temp)].nonzero_bits != 0
2631 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2632 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2633 && (reg_stat[REGNO (temp)].nonzero_bits
2634 != GET_MODE_MASK (word_mode))))
2635 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2636 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2637 (REG_P (temp)
2638 && reg_stat[REGNO (temp)].nonzero_bits != 0
2639 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2640 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2641 && (reg_stat[REGNO (temp)].nonzero_bits
2642 != GET_MODE_MASK (word_mode)))))
2643 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2644 SET_SRC (XVECEXP (newpat, 0, 1)))
2645 && ! find_reg_note (i3, REG_UNUSED,
2646 SET_DEST (XVECEXP (newpat, 0, 0))))
2648 rtx ni2dest;
2650 newi2pat = XVECEXP (newpat, 0, 0);
2651 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2652 newpat = XVECEXP (newpat, 0, 1);
2653 SUBST (SET_SRC (newpat),
2654 gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
2655 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2657 if (i2_code_number >= 0)
2658 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2660 if (insn_code_number >= 0)
2661 swap_i2i3 = 1;
2664 /* Similarly, check for a case where we have a PARALLEL of two independent
2665 SETs but we started with three insns. In this case, we can do the sets
2666 as two separate insns. This case occurs when some SET allows two
2667 other insns to combine, but the destination of that SET is still live. */
2669 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2670 && GET_CODE (newpat) == PARALLEL
2671 && XVECLEN (newpat, 0) == 2
2672 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2673 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2674 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2675 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2676 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2677 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2678 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2679 INSN_CUID (i2))
2680 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2681 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2682 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2683 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2684 XVECEXP (newpat, 0, 0))
2685 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2686 XVECEXP (newpat, 0, 1))
2687 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2688 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2690 /* Normally, it doesn't matter which of the two is done first,
2691 but it does if one references cc0. In that case, it has to
2692 be first. */
2693 #ifdef HAVE_cc0
2694 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2696 newi2pat = XVECEXP (newpat, 0, 0);
2697 newpat = XVECEXP (newpat, 0, 1);
2699 else
2700 #endif
2702 newi2pat = XVECEXP (newpat, 0, 1);
2703 newpat = XVECEXP (newpat, 0, 0);
2706 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2708 if (i2_code_number >= 0)
2709 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2712 /* If it still isn't recognized, fail and change things back the way they
2713 were. */
2714 if ((insn_code_number < 0
2715 /* Is the result a reasonable ASM_OPERANDS? */
2716 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2718 undo_all ();
2719 return 0;
2722 /* If we had to change another insn, make sure it is valid also. */
2723 if (undobuf.other_insn)
2725 rtx other_pat = PATTERN (undobuf.other_insn);
2726 rtx new_other_notes;
2727 rtx note, next;
2729 CLEAR_HARD_REG_SET (newpat_used_regs);
2731 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2732 &new_other_notes);
2734 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2736 undo_all ();
2737 return 0;
2740 PATTERN (undobuf.other_insn) = other_pat;
2742 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2743 are still valid. Then add any non-duplicate notes added by
2744 recog_for_combine. */
2745 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2747 next = XEXP (note, 1);
2749 if (REG_NOTE_KIND (note) == REG_UNUSED
2750 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2752 if (REG_P (XEXP (note, 0)))
2753 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2755 remove_note (undobuf.other_insn, note);
2759 for (note = new_other_notes; note; note = XEXP (note, 1))
2760 if (REG_P (XEXP (note, 0)))
2761 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2763 distribute_notes (new_other_notes, undobuf.other_insn,
2764 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2766 #ifdef HAVE_cc0
2767 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
2768 they are adjacent to each other or not. */
2770 rtx p = prev_nonnote_insn (i3);
2771 if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
2772 && sets_cc0_p (newi2pat))
2774 undo_all ();
2775 return 0;
2778 #endif
2780 /* Only allow this combination if insn_rtx_costs reports that the
2781 replacement instructions are cheaper than the originals. */
2782 if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat))
2784 undo_all ();
2785 return 0;
2788 /* We now know that we can do this combination. Merge the insns and
2789 update the status of registers and LOG_LINKS. */
2791 if (swap_i2i3)
2793 rtx insn;
2794 rtx link;
2795 rtx ni2dest;
2797 /* I3 now uses what used to be its destination and which is now
2798 I2's destination. This requires us to do a few adjustments. */
2799 PATTERN (i3) = newpat;
2800 adjust_for_new_dest (i3);
2802 /* We need a LOG_LINK from I3 to I2. But we used to have one,
2803 so we still will.
2805 However, some later insn might be using I2's dest and have
2806 a LOG_LINK pointing at I3. We must remove this link.
2807 The simplest way to remove the link is to point it at I1,
2808 which we know will be a NOTE. */
2810 /* newi2pat is usually a SET here; however, recog_for_combine might
2811 have added some clobbers. */
2812 if (GET_CODE (newi2pat) == PARALLEL)
2813 ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
2814 else
2815 ni2dest = SET_DEST (newi2pat);
2817 for (insn = NEXT_INSN (i3);
2818 insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2819 || insn != BB_HEAD (this_basic_block->next_bb));
2820 insn = NEXT_INSN (insn))
2822 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2824 for (link = LOG_LINKS (insn); link;
2825 link = XEXP (link, 1))
2826 if (XEXP (link, 0) == i3)
2827 XEXP (link, 0) = i1;
2829 break;
2835 rtx i3notes, i2notes, i1notes = 0;
2836 rtx i3links, i2links, i1links = 0;
2837 rtx midnotes = 0;
2838 unsigned int regno;
2839 /* Compute which registers we expect to eliminate. newi2pat may be setting
2840 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2841 same as i3dest, in which case newi2pat may be setting i1dest. */
2842 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2843 || i2dest_in_i2src || i2dest_in_i1src
2844 || !i2dest_killed
2845 ? 0 : i2dest);
2846 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2847 || (newi2pat && reg_set_p (i1dest, newi2pat))
2848 || !i1dest_killed
2849 ? 0 : i1dest);
2851 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2852 clear them. */
2853 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2854 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2855 if (i1)
2856 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2858 /* Ensure that we do not have something that should not be shared but
2859 occurs multiple times in the new insns. Check this by first
2860 resetting all the `used' flags and then copying anything is shared. */
2862 reset_used_flags (i3notes);
2863 reset_used_flags (i2notes);
2864 reset_used_flags (i1notes);
2865 reset_used_flags (newpat);
2866 reset_used_flags (newi2pat);
2867 if (undobuf.other_insn)
2868 reset_used_flags (PATTERN (undobuf.other_insn));
2870 i3notes = copy_rtx_if_shared (i3notes);
2871 i2notes = copy_rtx_if_shared (i2notes);
2872 i1notes = copy_rtx_if_shared (i1notes);
2873 newpat = copy_rtx_if_shared (newpat);
2874 newi2pat = copy_rtx_if_shared (newi2pat);
2875 if (undobuf.other_insn)
2876 reset_used_flags (PATTERN (undobuf.other_insn));
2878 INSN_CODE (i3) = insn_code_number;
2879 PATTERN (i3) = newpat;
2881 if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
2883 rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2885 reset_used_flags (call_usage);
2886 call_usage = copy_rtx (call_usage);
2888 if (substed_i2)
2889 replace_rtx (call_usage, i2dest, i2src);
2891 if (substed_i1)
2892 replace_rtx (call_usage, i1dest, i1src);
2894 CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2897 if (undobuf.other_insn)
2898 INSN_CODE (undobuf.other_insn) = other_code_number;
2900 /* We had one special case above where I2 had more than one set and
2901 we replaced a destination of one of those sets with the destination
2902 of I3. In that case, we have to update LOG_LINKS of insns later
2903 in this basic block. Note that this (expensive) case is rare.
2905 Also, in this case, we must pretend that all REG_NOTEs for I2
2906 actually came from I3, so that REG_UNUSED notes from I2 will be
2907 properly handled. */
2909 if (i3_subst_into_i2)
2911 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2912 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2913 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
2914 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2915 && ! find_reg_note (i2, REG_UNUSED,
2916 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2917 for (temp = NEXT_INSN (i2);
2918 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2919 || BB_HEAD (this_basic_block) != temp);
2920 temp = NEXT_INSN (temp))
2921 if (temp != i3 && INSN_P (temp))
2922 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2923 if (XEXP (link, 0) == i2)
2924 XEXP (link, 0) = i3;
2926 if (i3notes)
2928 rtx link = i3notes;
2929 while (XEXP (link, 1))
2930 link = XEXP (link, 1);
2931 XEXP (link, 1) = i2notes;
2933 else
2934 i3notes = i2notes;
2935 i2notes = 0;
2938 LOG_LINKS (i3) = 0;
2939 REG_NOTES (i3) = 0;
2940 LOG_LINKS (i2) = 0;
2941 REG_NOTES (i2) = 0;
2943 if (newi2pat)
2945 INSN_CODE (i2) = i2_code_number;
2946 PATTERN (i2) = newi2pat;
2948 else
2949 SET_INSN_DELETED (i2);
2951 if (i1)
2953 LOG_LINKS (i1) = 0;
2954 REG_NOTES (i1) = 0;
2955 SET_INSN_DELETED (i1);
2958 /* Get death notes for everything that is now used in either I3 or
2959 I2 and used to die in a previous insn. If we built two new
2960 patterns, move from I1 to I2 then I2 to I3 so that we get the
2961 proper movement on registers that I2 modifies. */
2963 if (newi2pat)
2965 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2966 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2968 else
2969 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2970 i3, &midnotes);
2972 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2973 if (i3notes)
2974 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2975 elim_i2, elim_i1);
2976 if (i2notes)
2977 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2978 elim_i2, elim_i1);
2979 if (i1notes)
2980 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2981 elim_i2, elim_i1);
2982 if (midnotes)
2983 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2984 elim_i2, elim_i1);
2986 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2987 know these are REG_UNUSED and want them to go to the desired insn,
2988 so we always pass it as i3. We have not counted the notes in
2989 reg_n_deaths yet, so we need to do so now. */
2991 if (newi2pat && new_i2_notes)
2993 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2994 if (REG_P (XEXP (temp, 0)))
2995 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2997 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3000 if (new_i3_notes)
3002 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
3003 if (REG_P (XEXP (temp, 0)))
3004 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
3006 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3009 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
3010 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
3011 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
3012 in that case, it might delete I2. Similarly for I2 and I1.
3013 Show an additional death due to the REG_DEAD note we make here. If
3014 we discard it in distribute_notes, we will decrement it again. */
3016 if (i3dest_killed)
3018 if (REG_P (i3dest_killed))
3019 REG_N_DEATHS (REGNO (i3dest_killed))++;
3021 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3022 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3023 NULL_RTX),
3024 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3025 else
3026 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3027 NULL_RTX),
3028 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3029 elim_i2, elim_i1);
3032 if (i2dest_in_i2src)
3034 if (REG_P (i2dest))
3035 REG_N_DEATHS (REGNO (i2dest))++;
3037 if (newi2pat && reg_set_p (i2dest, newi2pat))
3038 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3039 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3040 else
3041 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3042 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3043 NULL_RTX, NULL_RTX);
3046 if (i1dest_in_i1src)
3048 if (REG_P (i1dest))
3049 REG_N_DEATHS (REGNO (i1dest))++;
3051 if (newi2pat && reg_set_p (i1dest, newi2pat))
3052 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3053 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3054 else
3055 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3056 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3057 NULL_RTX, NULL_RTX);
3060 distribute_links (i3links);
3061 distribute_links (i2links);
3062 distribute_links (i1links);
3064 if (REG_P (i2dest))
3066 rtx link;
3067 rtx i2_insn = 0, i2_val = 0, set;
3069 /* The insn that used to set this register doesn't exist, and
3070 this life of the register may not exist either. See if one of
3071 I3's links points to an insn that sets I2DEST. If it does,
3072 that is now the last known value for I2DEST. If we don't update
3073 this and I2 set the register to a value that depended on its old
3074 contents, we will get confused. If this insn is used, thing
3075 will be set correctly in combine_instructions. */
3077 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3078 if ((set = single_set (XEXP (link, 0))) != 0
3079 && rtx_equal_p (i2dest, SET_DEST (set)))
3080 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3082 record_value_for_reg (i2dest, i2_insn, i2_val);
3084 /* If the reg formerly set in I2 died only once and that was in I3,
3085 zero its use count so it won't make `reload' do any work. */
3086 if (! added_sets_2
3087 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3088 && ! i2dest_in_i2src)
3090 regno = REGNO (i2dest);
3091 REG_N_SETS (regno)--;
3095 if (i1 && REG_P (i1dest))
3097 rtx link;
3098 rtx i1_insn = 0, i1_val = 0, set;
3100 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3101 if ((set = single_set (XEXP (link, 0))) != 0
3102 && rtx_equal_p (i1dest, SET_DEST (set)))
3103 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3105 record_value_for_reg (i1dest, i1_insn, i1_val);
3107 regno = REGNO (i1dest);
3108 if (! added_sets_1 && ! i1dest_in_i1src)
3109 REG_N_SETS (regno)--;
3112 /* Update reg_stat[].nonzero_bits et al for any changes that may have
3113 been made to this insn. The order of
3114 set_nonzero_bits_and_sign_copies() is important. Because newi2pat
3115 can affect nonzero_bits of newpat */
3116 if (newi2pat)
3117 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
3118 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
3120 /* Set new_direct_jump_p if a new return or simple jump instruction
3121 has been created.
3123 If I3 is now an unconditional jump, ensure that it has a
3124 BARRIER following it since it may have initially been a
3125 conditional jump. It may also be the last nonnote insn. */
3127 if (returnjump_p (i3) || any_uncondjump_p (i3))
3129 *new_direct_jump_p = 1;
3130 mark_jump_label (PATTERN (i3), i3, 0);
3132 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
3133 || !BARRIER_P (temp))
3134 emit_barrier_after (i3);
3137 if (undobuf.other_insn != NULL_RTX
3138 && (returnjump_p (undobuf.other_insn)
3139 || any_uncondjump_p (undobuf.other_insn)))
3141 *new_direct_jump_p = 1;
3143 if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
3144 || !BARRIER_P (temp))
3145 emit_barrier_after (undobuf.other_insn);
3148 /* An NOOP jump does not need barrier, but it does need cleaning up
3149 of CFG. */
3150 if (GET_CODE (newpat) == SET
3151 && SET_SRC (newpat) == pc_rtx
3152 && SET_DEST (newpat) == pc_rtx)
3153 *new_direct_jump_p = 1;
3156 combine_successes++;
3157 undo_commit ();
3159 if (added_links_insn
3160 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
3161 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
3162 return added_links_insn;
3163 else
3164 return newi2pat ? i2 : i3;
3167 /* Undo all the modifications recorded in undobuf. */
3169 static void
3170 undo_all (void)
3172 struct undo *undo, *next;
3174 for (undo = undobuf.undos; undo; undo = next)
3176 next = undo->next;
3177 if (undo->is_int)
3178 *undo->where.i = undo->old_contents.i;
3179 else
3180 *undo->where.r = undo->old_contents.r;
3182 undo->next = undobuf.frees;
3183 undobuf.frees = undo;
3186 undobuf.undos = 0;
3189 /* We've committed to accepting the changes we made. Move all
3190 of the undos to the free list. */
3192 static void
3193 undo_commit (void)
3195 struct undo *undo, *next;
3197 for (undo = undobuf.undos; undo; undo = next)
3199 next = undo->next;
3200 undo->next = undobuf.frees;
3201 undobuf.frees = undo;
3203 undobuf.undos = 0;
3207 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
3208 where we have an arithmetic expression and return that point. LOC will
3209 be inside INSN.
3211 try_combine will call this function to see if an insn can be split into
3212 two insns. */
3214 static rtx *
3215 find_split_point (rtx *loc, rtx insn)
3217 rtx x = *loc;
3218 enum rtx_code code = GET_CODE (x);
3219 rtx *split;
3220 unsigned HOST_WIDE_INT len = 0;
3221 HOST_WIDE_INT pos = 0;
3222 int unsignedp = 0;
3223 rtx inner = NULL_RTX;
3225 /* First special-case some codes. */
3226 switch (code)
3228 case SUBREG:
3229 #ifdef INSN_SCHEDULING
3230 /* If we are making a paradoxical SUBREG invalid, it becomes a split
3231 point. */
3232 if (MEM_P (SUBREG_REG (x)))
3233 return loc;
3234 #endif
3235 return find_split_point (&SUBREG_REG (x), insn);
3237 case MEM:
3238 #ifdef HAVE_lo_sum
3239 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3240 using LO_SUM and HIGH. */
3241 if (GET_CODE (XEXP (x, 0)) == CONST
3242 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3244 SUBST (XEXP (x, 0),
3245 gen_rtx_LO_SUM (Pmode,
3246 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
3247 XEXP (x, 0)));
3248 return &XEXP (XEXP (x, 0), 0);
3250 #endif
3252 /* If we have a PLUS whose second operand is a constant and the
3253 address is not valid, perhaps will can split it up using
3254 the machine-specific way to split large constants. We use
3255 the first pseudo-reg (one of the virtual regs) as a placeholder;
3256 it will not remain in the result. */
3257 if (GET_CODE (XEXP (x, 0)) == PLUS
3258 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3259 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3261 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3262 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3263 subst_insn);
3265 /* This should have produced two insns, each of which sets our
3266 placeholder. If the source of the second is a valid address,
3267 we can make put both sources together and make a split point
3268 in the middle. */
3270 if (seq
3271 && NEXT_INSN (seq) != NULL_RTX
3272 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3273 && NONJUMP_INSN_P (seq)
3274 && GET_CODE (PATTERN (seq)) == SET
3275 && SET_DEST (PATTERN (seq)) == reg
3276 && ! reg_mentioned_p (reg,
3277 SET_SRC (PATTERN (seq)))
3278 && NONJUMP_INSN_P (NEXT_INSN (seq))
3279 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3280 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3281 && memory_address_p (GET_MODE (x),
3282 SET_SRC (PATTERN (NEXT_INSN (seq)))))
3284 rtx src1 = SET_SRC (PATTERN (seq));
3285 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3287 /* Replace the placeholder in SRC2 with SRC1. If we can
3288 find where in SRC2 it was placed, that can become our
3289 split point and we can replace this address with SRC2.
3290 Just try two obvious places. */
3292 src2 = replace_rtx (src2, reg, src1);
3293 split = 0;
3294 if (XEXP (src2, 0) == src1)
3295 split = &XEXP (src2, 0);
3296 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3297 && XEXP (XEXP (src2, 0), 0) == src1)
3298 split = &XEXP (XEXP (src2, 0), 0);
3300 if (split)
3302 SUBST (XEXP (x, 0), src2);
3303 return split;
3307 /* If that didn't work, perhaps the first operand is complex and
3308 needs to be computed separately, so make a split point there.
3309 This will occur on machines that just support REG + CONST
3310 and have a constant moved through some previous computation. */
3312 else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
3313 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3314 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
3315 return &XEXP (XEXP (x, 0), 0);
3317 break;
3319 case SET:
3320 #ifdef HAVE_cc0
3321 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3322 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3323 we need to put the operand into a register. So split at that
3324 point. */
3326 if (SET_DEST (x) == cc0_rtx
3327 && GET_CODE (SET_SRC (x)) != COMPARE
3328 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3329 && !OBJECT_P (SET_SRC (x))
3330 && ! (GET_CODE (SET_SRC (x)) == SUBREG
3331 && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
3332 return &SET_SRC (x);
3333 #endif
3335 /* See if we can split SET_SRC as it stands. */
3336 split = find_split_point (&SET_SRC (x), insn);
3337 if (split && split != &SET_SRC (x))
3338 return split;
3340 /* See if we can split SET_DEST as it stands. */
3341 split = find_split_point (&SET_DEST (x), insn);
3342 if (split && split != &SET_DEST (x))
3343 return split;
3345 /* See if this is a bitfield assignment with everything constant. If
3346 so, this is an IOR of an AND, so split it into that. */
3347 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3348 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3349 <= HOST_BITS_PER_WIDE_INT)
3350 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3351 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3352 && GET_CODE (SET_SRC (x)) == CONST_INT
3353 && ((INTVAL (XEXP (SET_DEST (x), 1))
3354 + INTVAL (XEXP (SET_DEST (x), 2)))
3355 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3356 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3358 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3359 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3360 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3361 rtx dest = XEXP (SET_DEST (x), 0);
3362 enum machine_mode mode = GET_MODE (dest);
3363 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3364 rtx or_mask;
3366 if (BITS_BIG_ENDIAN)
3367 pos = GET_MODE_BITSIZE (mode) - len - pos;
3369 or_mask = gen_int_mode (src << pos, mode);
3370 if (src == mask)
3371 SUBST (SET_SRC (x),
3372 simplify_gen_binary (IOR, mode, dest, or_mask));
3373 else
3375 rtx negmask = gen_int_mode (~(mask << pos), mode);
3376 SUBST (SET_SRC (x),
3377 simplify_gen_binary (IOR, mode,
3378 simplify_gen_binary (AND, mode,
3379 dest, negmask),
3380 or_mask));
3383 SUBST (SET_DEST (x), dest);
3385 split = find_split_point (&SET_SRC (x), insn);
3386 if (split && split != &SET_SRC (x))
3387 return split;
3390 /* Otherwise, see if this is an operation that we can split into two.
3391 If so, try to split that. */
3392 code = GET_CODE (SET_SRC (x));
3394 switch (code)
3396 case AND:
3397 /* If we are AND'ing with a large constant that is only a single
3398 bit and the result is only being used in a context where we
3399 need to know if it is zero or nonzero, replace it with a bit
3400 extraction. This will avoid the large constant, which might
3401 have taken more than one insn to make. If the constant were
3402 not a valid argument to the AND but took only one insn to make,
3403 this is no worse, but if it took more than one insn, it will
3404 be better. */
3406 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3407 && REG_P (XEXP (SET_SRC (x), 0))
3408 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3409 && REG_P (SET_DEST (x))
3410 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3411 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3412 && XEXP (*split, 0) == SET_DEST (x)
3413 && XEXP (*split, 1) == const0_rtx)
3415 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3416 XEXP (SET_SRC (x), 0),
3417 pos, NULL_RTX, 1, 1, 0, 0);
3418 if (extraction != 0)
3420 SUBST (SET_SRC (x), extraction);
3421 return find_split_point (loc, insn);
3424 break;
3426 case NE:
3427 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3428 is known to be on, this can be converted into a NEG of a shift. */
3429 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3430 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3431 && 1 <= (pos = exact_log2
3432 (nonzero_bits (XEXP (SET_SRC (x), 0),
3433 GET_MODE (XEXP (SET_SRC (x), 0))))))
3435 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3437 SUBST (SET_SRC (x),
3438 gen_rtx_NEG (mode,
3439 gen_rtx_LSHIFTRT (mode,
3440 XEXP (SET_SRC (x), 0),
3441 GEN_INT (pos))));
3443 split = find_split_point (&SET_SRC (x), insn);
3444 if (split && split != &SET_SRC (x))
3445 return split;
3447 break;
3449 case SIGN_EXTEND:
3450 inner = XEXP (SET_SRC (x), 0);
3452 /* We can't optimize if either mode is a partial integer
3453 mode as we don't know how many bits are significant
3454 in those modes. */
3455 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3456 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3457 break;
3459 pos = 0;
3460 len = GET_MODE_BITSIZE (GET_MODE (inner));
3461 unsignedp = 0;
3462 break;
3464 case SIGN_EXTRACT:
3465 case ZERO_EXTRACT:
3466 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3467 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3469 inner = XEXP (SET_SRC (x), 0);
3470 len = INTVAL (XEXP (SET_SRC (x), 1));
3471 pos = INTVAL (XEXP (SET_SRC (x), 2));
3473 if (BITS_BIG_ENDIAN)
3474 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3475 unsignedp = (code == ZERO_EXTRACT);
3477 break;
3479 default:
3480 break;
3483 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3485 enum machine_mode mode = GET_MODE (SET_SRC (x));
3487 /* For unsigned, we have a choice of a shift followed by an
3488 AND or two shifts. Use two shifts for field sizes where the
3489 constant might be too large. We assume here that we can
3490 always at least get 8-bit constants in an AND insn, which is
3491 true for every current RISC. */
3493 if (unsignedp && len <= 8)
3495 SUBST (SET_SRC (x),
3496 gen_rtx_AND (mode,
3497 gen_rtx_LSHIFTRT
3498 (mode, gen_lowpart (mode, inner),
3499 GEN_INT (pos)),
3500 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3502 split = find_split_point (&SET_SRC (x), insn);
3503 if (split && split != &SET_SRC (x))
3504 return split;
3506 else
3508 SUBST (SET_SRC (x),
3509 gen_rtx_fmt_ee
3510 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3511 gen_rtx_ASHIFT (mode,
3512 gen_lowpart (mode, inner),
3513 GEN_INT (GET_MODE_BITSIZE (mode)
3514 - len - pos)),
3515 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3517 split = find_split_point (&SET_SRC (x), insn);
3518 if (split && split != &SET_SRC (x))
3519 return split;
3523 /* See if this is a simple operation with a constant as the second
3524 operand. It might be that this constant is out of range and hence
3525 could be used as a split point. */
3526 if (BINARY_P (SET_SRC (x))
3527 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3528 && (OBJECT_P (XEXP (SET_SRC (x), 0))
3529 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3530 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
3531 return &XEXP (SET_SRC (x), 1);
3533 /* Finally, see if this is a simple operation with its first operand
3534 not in a register. The operation might require this operand in a
3535 register, so return it as a split point. We can always do this
3536 because if the first operand were another operation, we would have
3537 already found it as a split point. */
3538 if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
3539 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3540 return &XEXP (SET_SRC (x), 0);
3542 return 0;
3544 case AND:
3545 case IOR:
3546 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3547 it is better to write this as (not (ior A B)) so we can split it.
3548 Similarly for IOR. */
3549 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3551 SUBST (*loc,
3552 gen_rtx_NOT (GET_MODE (x),
3553 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3554 GET_MODE (x),
3555 XEXP (XEXP (x, 0), 0),
3556 XEXP (XEXP (x, 1), 0))));
3557 return find_split_point (loc, insn);
3560 /* Many RISC machines have a large set of logical insns. If the
3561 second operand is a NOT, put it first so we will try to split the
3562 other operand first. */
3563 if (GET_CODE (XEXP (x, 1)) == NOT)
3565 rtx tem = XEXP (x, 0);
3566 SUBST (XEXP (x, 0), XEXP (x, 1));
3567 SUBST (XEXP (x, 1), tem);
3569 break;
3571 default:
3572 break;
3575 /* Otherwise, select our actions depending on our rtx class. */
3576 switch (GET_RTX_CLASS (code))
3578 case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3579 case RTX_TERNARY:
3580 split = find_split_point (&XEXP (x, 2), insn);
3581 if (split)
3582 return split;
3583 /* ... fall through ... */
3584 case RTX_BIN_ARITH:
3585 case RTX_COMM_ARITH:
3586 case RTX_COMPARE:
3587 case RTX_COMM_COMPARE:
3588 split = find_split_point (&XEXP (x, 1), insn);
3589 if (split)
3590 return split;
3591 /* ... fall through ... */
3592 case RTX_UNARY:
3593 /* Some machines have (and (shift ...) ...) insns. If X is not
3594 an AND, but XEXP (X, 0) is, use it as our split point. */
3595 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3596 return &XEXP (x, 0);
3598 split = find_split_point (&XEXP (x, 0), insn);
3599 if (split)
3600 return split;
3601 return loc;
3603 default:
3604 /* Otherwise, we don't have a split point. */
3605 return 0;
3609 /* Throughout X, replace FROM with TO, and return the result.
3610 The result is TO if X is FROM;
3611 otherwise the result is X, but its contents may have been modified.
3612 If they were modified, a record was made in undobuf so that
3613 undo_all will (among other things) return X to its original state.
3615 If the number of changes necessary is too much to record to undo,
3616 the excess changes are not made, so the result is invalid.
3617 The changes already made can still be undone.
3618 undobuf.num_undo is incremented for such changes, so by testing that
3619 the caller can tell whether the result is valid.
3621 `n_occurrences' is incremented each time FROM is replaced.
3623 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3625 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
3626 by copying if `n_occurrences' is nonzero. */
3628 static rtx
3629 subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3631 enum rtx_code code = GET_CODE (x);
3632 enum machine_mode op0_mode = VOIDmode;
3633 const char *fmt;
3634 int len, i;
3635 rtx new;
3637 /* Two expressions are equal if they are identical copies of a shared
3638 RTX or if they are both registers with the same register number
3639 and mode. */
3641 #define COMBINE_RTX_EQUAL_P(X,Y) \
3642 ((X) == (Y) \
3643 || (REG_P (X) && REG_P (Y) \
3644 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3646 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3648 n_occurrences++;
3649 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3652 /* If X and FROM are the same register but different modes, they will
3653 not have been seen as equal above. However, flow.c will make a
3654 LOG_LINKS entry for that case. If we do nothing, we will try to
3655 rerecognize our original insn and, when it succeeds, we will
3656 delete the feeding insn, which is incorrect.
3658 So force this insn not to match in this (rare) case. */
3659 if (! in_dest && code == REG && REG_P (from)
3660 && REGNO (x) == REGNO (from))
3661 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3663 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3664 of which may contain things that can be combined. */
3665 if (code != MEM && code != LO_SUM && OBJECT_P (x))
3666 return x;
3668 /* It is possible to have a subexpression appear twice in the insn.
3669 Suppose that FROM is a register that appears within TO.
3670 Then, after that subexpression has been scanned once by `subst',
3671 the second time it is scanned, TO may be found. If we were
3672 to scan TO here, we would find FROM within it and create a
3673 self-referent rtl structure which is completely wrong. */
3674 if (COMBINE_RTX_EQUAL_P (x, to))
3675 return to;
3677 /* Parallel asm_operands need special attention because all of the
3678 inputs are shared across the arms. Furthermore, unsharing the
3679 rtl results in recognition failures. Failure to handle this case
3680 specially can result in circular rtl.
3682 Solve this by doing a normal pass across the first entry of the
3683 parallel, and only processing the SET_DESTs of the subsequent
3684 entries. Ug. */
3686 if (code == PARALLEL
3687 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3688 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3690 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3692 /* If this substitution failed, this whole thing fails. */
3693 if (GET_CODE (new) == CLOBBER
3694 && XEXP (new, 0) == const0_rtx)
3695 return new;
3697 SUBST (XVECEXP (x, 0, 0), new);
3699 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3701 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3703 if (!REG_P (dest)
3704 && GET_CODE (dest) != CC0
3705 && GET_CODE (dest) != PC)
3707 new = subst (dest, from, to, 0, unique_copy);
3709 /* If this substitution failed, this whole thing fails. */
3710 if (GET_CODE (new) == CLOBBER
3711 && XEXP (new, 0) == const0_rtx)
3712 return new;
3714 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3718 else
3720 len = GET_RTX_LENGTH (code);
3721 fmt = GET_RTX_FORMAT (code);
3723 /* We don't need to process a SET_DEST that is a register, CC0,
3724 or PC, so set up to skip this common case. All other cases
3725 where we want to suppress replacing something inside a
3726 SET_SRC are handled via the IN_DEST operand. */
3727 if (code == SET
3728 && (REG_P (SET_DEST (x))
3729 || GET_CODE (SET_DEST (x)) == CC0
3730 || GET_CODE (SET_DEST (x)) == PC))
3731 fmt = "ie";
3733 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3734 constant. */
3735 if (fmt[0] == 'e')
3736 op0_mode = GET_MODE (XEXP (x, 0));
3738 for (i = 0; i < len; i++)
3740 if (fmt[i] == 'E')
3742 int j;
3743 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3745 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3747 new = (unique_copy && n_occurrences
3748 ? copy_rtx (to) : to);
3749 n_occurrences++;
3751 else
3753 new = subst (XVECEXP (x, i, j), from, to, 0,
3754 unique_copy);
3756 /* If this substitution failed, this whole thing
3757 fails. */
3758 if (GET_CODE (new) == CLOBBER
3759 && XEXP (new, 0) == const0_rtx)
3760 return new;
3763 SUBST (XVECEXP (x, i, j), new);
3766 else if (fmt[i] == 'e')
3768 /* If this is a register being set, ignore it. */
3769 new = XEXP (x, i);
3770 if (in_dest
3771 && i == 0
3772 && (((code == SUBREG || code == ZERO_EXTRACT)
3773 && REG_P (new))
3774 || code == STRICT_LOW_PART))
3777 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3779 /* In general, don't install a subreg involving two
3780 modes not tieable. It can worsen register
3781 allocation, and can even make invalid reload
3782 insns, since the reg inside may need to be copied
3783 from in the outside mode, and that may be invalid
3784 if it is an fp reg copied in integer mode.
3786 We allow two exceptions to this: It is valid if
3787 it is inside another SUBREG and the mode of that
3788 SUBREG and the mode of the inside of TO is
3789 tieable and it is valid if X is a SET that copies
3790 FROM to CC0. */
3792 if (GET_CODE (to) == SUBREG
3793 && ! MODES_TIEABLE_P (GET_MODE (to),
3794 GET_MODE (SUBREG_REG (to)))
3795 && ! (code == SUBREG
3796 && MODES_TIEABLE_P (GET_MODE (x),
3797 GET_MODE (SUBREG_REG (to))))
3798 #ifdef HAVE_cc0
3799 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3800 #endif
3802 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3804 #ifdef CANNOT_CHANGE_MODE_CLASS
3805 if (code == SUBREG
3806 && REG_P (to)
3807 && REGNO (to) < FIRST_PSEUDO_REGISTER
3808 && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3809 GET_MODE (to),
3810 GET_MODE (x)))
3811 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3812 #endif
3814 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3815 n_occurrences++;
3817 else
3818 /* If we are in a SET_DEST, suppress most cases unless we
3819 have gone inside a MEM, in which case we want to
3820 simplify the address. We assume here that things that
3821 are actually part of the destination have their inner
3822 parts in the first expression. This is true for SUBREG,
3823 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3824 things aside from REG and MEM that should appear in a
3825 SET_DEST. */
3826 new = subst (XEXP (x, i), from, to,
3827 (((in_dest
3828 && (code == SUBREG || code == STRICT_LOW_PART
3829 || code == ZERO_EXTRACT))
3830 || code == SET)
3831 && i == 0), unique_copy);
3833 /* If we found that we will have to reject this combination,
3834 indicate that by returning the CLOBBER ourselves, rather than
3835 an expression containing it. This will speed things up as
3836 well as prevent accidents where two CLOBBERs are considered
3837 to be equal, thus producing an incorrect simplification. */
3839 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3840 return new;
3842 if (GET_CODE (x) == SUBREG
3843 && (GET_CODE (new) == CONST_INT
3844 || GET_CODE (new) == CONST_DOUBLE))
3846 enum machine_mode mode = GET_MODE (x);
3848 x = simplify_subreg (GET_MODE (x), new,
3849 GET_MODE (SUBREG_REG (x)),
3850 SUBREG_BYTE (x));
3851 if (! x)
3852 x = gen_rtx_CLOBBER (mode, const0_rtx);
3854 else if (GET_CODE (new) == CONST_INT
3855 && GET_CODE (x) == ZERO_EXTEND)
3857 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3858 new, GET_MODE (XEXP (x, 0)));
3859 gcc_assert (x);
3861 else
3862 SUBST (XEXP (x, i), new);
3867 /* Try to simplify X. If the simplification changed the code, it is likely
3868 that further simplification will help, so loop, but limit the number
3869 of repetitions that will be performed. */
3871 for (i = 0; i < 4; i++)
3873 /* If X is sufficiently simple, don't bother trying to do anything
3874 with it. */
3875 if (code != CONST_INT && code != REG && code != CLOBBER)
3876 x = combine_simplify_rtx (x, op0_mode, in_dest);
3878 if (GET_CODE (x) == code)
3879 break;
3881 code = GET_CODE (x);
3883 /* We no longer know the original mode of operand 0 since we
3884 have changed the form of X) */
3885 op0_mode = VOIDmode;
3888 return x;
3891 /* Simplify X, a piece of RTL. We just operate on the expression at the
3892 outer level; call `subst' to simplify recursively. Return the new
3893 expression.
3895 OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero
3896 if we are inside a SET_DEST. */
3898 static rtx
3899 combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
3901 enum rtx_code code = GET_CODE (x);
3902 enum machine_mode mode = GET_MODE (x);
3903 rtx temp;
3904 rtx reversed;
3905 int i;
3907 /* If this is a commutative operation, put a constant last and a complex
3908 expression first. We don't need to do this for comparisons here. */
3909 if (COMMUTATIVE_ARITH_P (x)
3910 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3912 temp = XEXP (x, 0);
3913 SUBST (XEXP (x, 0), XEXP (x, 1));
3914 SUBST (XEXP (x, 1), temp);
3917 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3918 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3919 things. Check for cases where both arms are testing the same
3920 condition.
3922 Don't do anything if all operands are very simple. */
3924 if ((BINARY_P (x)
3925 && ((!OBJECT_P (XEXP (x, 0))
3926 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3927 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
3928 || (!OBJECT_P (XEXP (x, 1))
3929 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3930 && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
3931 || (UNARY_P (x)
3932 && (!OBJECT_P (XEXP (x, 0))
3933 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3934 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
3936 rtx cond, true_rtx, false_rtx;
3938 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3939 if (cond != 0
3940 /* If everything is a comparison, what we have is highly unlikely
3941 to be simpler, so don't use it. */
3942 && ! (COMPARISON_P (x)
3943 && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
3945 rtx cop1 = const0_rtx;
3946 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3948 if (cond_code == NE && COMPARISON_P (cond))
3949 return x;
3951 /* Simplify the alternative arms; this may collapse the true and
3952 false arms to store-flag values. Be careful to use copy_rtx
3953 here since true_rtx or false_rtx might share RTL with x as a
3954 result of the if_then_else_cond call above. */
3955 true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
3956 false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
3958 /* If true_rtx and false_rtx are not general_operands, an if_then_else
3959 is unlikely to be simpler. */
3960 if (general_operand (true_rtx, VOIDmode)
3961 && general_operand (false_rtx, VOIDmode))
3963 enum rtx_code reversed;
3965 /* Restarting if we generate a store-flag expression will cause
3966 us to loop. Just drop through in this case. */
3968 /* If the result values are STORE_FLAG_VALUE and zero, we can
3969 just make the comparison operation. */
3970 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3971 x = simplify_gen_relational (cond_code, mode, VOIDmode,
3972 cond, cop1);
3973 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3974 && ((reversed = reversed_comparison_code_parts
3975 (cond_code, cond, cop1, NULL))
3976 != UNKNOWN))
3977 x = simplify_gen_relational (reversed, mode, VOIDmode,
3978 cond, cop1);
3980 /* Likewise, we can make the negate of a comparison operation
3981 if the result values are - STORE_FLAG_VALUE and zero. */
3982 else if (GET_CODE (true_rtx) == CONST_INT
3983 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3984 && false_rtx == const0_rtx)
3985 x = simplify_gen_unary (NEG, mode,
3986 simplify_gen_relational (cond_code,
3987 mode, VOIDmode,
3988 cond, cop1),
3989 mode);
3990 else if (GET_CODE (false_rtx) == CONST_INT
3991 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3992 && true_rtx == const0_rtx
3993 && ((reversed = reversed_comparison_code_parts
3994 (cond_code, cond, cop1, NULL))
3995 != UNKNOWN))
3996 x = simplify_gen_unary (NEG, mode,
3997 simplify_gen_relational (reversed,
3998 mode, VOIDmode,
3999 cond, cop1),
4000 mode);
4001 else
4002 return gen_rtx_IF_THEN_ELSE (mode,
4003 simplify_gen_relational (cond_code,
4004 mode,
4005 VOIDmode,
4006 cond,
4007 cop1),
4008 true_rtx, false_rtx);
4010 code = GET_CODE (x);
4011 op0_mode = VOIDmode;
4016 /* Try to fold this expression in case we have constants that weren't
4017 present before. */
4018 temp = 0;
4019 switch (GET_RTX_CLASS (code))
4021 case RTX_UNARY:
4022 if (op0_mode == VOIDmode)
4023 op0_mode = GET_MODE (XEXP (x, 0));
4024 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4025 break;
4026 case RTX_COMPARE:
4027 case RTX_COMM_COMPARE:
4029 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4030 if (cmp_mode == VOIDmode)
4032 cmp_mode = GET_MODE (XEXP (x, 1));
4033 if (cmp_mode == VOIDmode)
4034 cmp_mode = op0_mode;
4036 temp = simplify_relational_operation (code, mode, cmp_mode,
4037 XEXP (x, 0), XEXP (x, 1));
4039 break;
4040 case RTX_COMM_ARITH:
4041 case RTX_BIN_ARITH:
4042 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4043 break;
4044 case RTX_BITFIELD_OPS:
4045 case RTX_TERNARY:
4046 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
4047 XEXP (x, 1), XEXP (x, 2));
4048 break;
4049 default:
4050 break;
4053 if (temp)
4055 x = temp;
4056 code = GET_CODE (temp);
4057 op0_mode = VOIDmode;
4058 mode = GET_MODE (temp);
4061 /* First see if we can apply the inverse distributive law. */
4062 if (code == PLUS || code == MINUS
4063 || code == AND || code == IOR || code == XOR)
4065 x = apply_distributive_law (x);
4066 code = GET_CODE (x);
4067 op0_mode = VOIDmode;
4070 /* If CODE is an associative operation not otherwise handled, see if we
4071 can associate some operands. This can win if they are constants or
4072 if they are logically related (i.e. (a & b) & a). */
4073 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
4074 || code == AND || code == IOR || code == XOR
4075 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
4076 && ((INTEGRAL_MODE_P (mode) && code != DIV)
4077 || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
4079 if (GET_CODE (XEXP (x, 0)) == code)
4081 rtx other = XEXP (XEXP (x, 0), 0);
4082 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
4083 rtx inner_op1 = XEXP (x, 1);
4084 rtx inner;
4086 /* Make sure we pass the constant operand if any as the second
4087 one if this is a commutative operation. */
4088 if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
4090 rtx tem = inner_op0;
4091 inner_op0 = inner_op1;
4092 inner_op1 = tem;
4094 inner = simplify_binary_operation (code == MINUS ? PLUS
4095 : code == DIV ? MULT
4096 : code,
4097 mode, inner_op0, inner_op1);
4099 /* For commutative operations, try the other pair if that one
4100 didn't simplify. */
4101 if (inner == 0 && COMMUTATIVE_ARITH_P (x))
4103 other = XEXP (XEXP (x, 0), 1);
4104 inner = simplify_binary_operation (code, mode,
4105 XEXP (XEXP (x, 0), 0),
4106 XEXP (x, 1));
4109 if (inner)
4110 return simplify_gen_binary (code, mode, other, inner);
4114 /* A little bit of algebraic simplification here. */
4115 switch (code)
4117 case MEM:
4118 /* Ensure that our address has any ASHIFTs converted to MULT in case
4119 address-recognizing predicates are called later. */
4120 temp = make_compound_operation (XEXP (x, 0), MEM);
4121 SUBST (XEXP (x, 0), temp);
4122 break;
4124 case SUBREG:
4125 if (op0_mode == VOIDmode)
4126 op0_mode = GET_MODE (SUBREG_REG (x));
4128 /* See if this can be moved to simplify_subreg. */
4129 if (CONSTANT_P (SUBREG_REG (x))
4130 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
4131 /* Don't call gen_lowpart if the inner mode
4132 is VOIDmode and we cannot simplify it, as SUBREG without
4133 inner mode is invalid. */
4134 && (GET_MODE (SUBREG_REG (x)) != VOIDmode
4135 || gen_lowpart_common (mode, SUBREG_REG (x))))
4136 return gen_lowpart (mode, SUBREG_REG (x));
4138 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
4139 break;
4141 rtx temp;
4142 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
4143 SUBREG_BYTE (x));
4144 if (temp)
4145 return temp;
4148 /* Don't change the mode of the MEM if that would change the meaning
4149 of the address. */
4150 if (MEM_P (SUBREG_REG (x))
4151 && (MEM_VOLATILE_P (SUBREG_REG (x))
4152 || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
4153 return gen_rtx_CLOBBER (mode, const0_rtx);
4155 /* Note that we cannot do any narrowing for non-constants since
4156 we might have been counting on using the fact that some bits were
4157 zero. We now do this in the SET. */
4159 break;
4161 case NOT:
4162 if (GET_CODE (XEXP (x, 0)) == SUBREG
4163 && subreg_lowpart_p (XEXP (x, 0))
4164 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
4165 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
4166 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
4167 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
4169 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
4171 x = gen_rtx_ROTATE (inner_mode,
4172 simplify_gen_unary (NOT, inner_mode, const1_rtx,
4173 inner_mode),
4174 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
4175 return gen_lowpart (mode, x);
4178 /* Apply De Morgan's laws to reduce number of patterns for machines
4179 with negating logical insns (and-not, nand, etc.). If result has
4180 only one NOT, put it first, since that is how the patterns are
4181 coded. */
4183 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
4185 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
4186 enum machine_mode op_mode;
4188 op_mode = GET_MODE (in1);
4189 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
4191 op_mode = GET_MODE (in2);
4192 if (op_mode == VOIDmode)
4193 op_mode = mode;
4194 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
4196 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
4198 rtx tem = in2;
4199 in2 = in1; in1 = tem;
4202 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
4203 mode, in1, in2);
4205 break;
4207 case NEG:
4208 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
4209 if (GET_CODE (XEXP (x, 0)) == XOR
4210 && XEXP (XEXP (x, 0), 1) == const1_rtx
4211 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4212 return simplify_gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4213 constm1_rtx);
4215 temp = expand_compound_operation (XEXP (x, 0));
4217 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4218 replaced by (lshiftrt X C). This will convert
4219 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4221 if (GET_CODE (temp) == ASHIFTRT
4222 && GET_CODE (XEXP (temp, 1)) == CONST_INT
4223 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4224 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4225 INTVAL (XEXP (temp, 1)));
4227 /* If X has only a single bit that might be nonzero, say, bit I, convert
4228 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4229 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4230 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4231 or a SUBREG of one since we'd be making the expression more
4232 complex if it was just a register. */
4234 if (!REG_P (temp)
4235 && ! (GET_CODE (temp) == SUBREG
4236 && REG_P (SUBREG_REG (temp)))
4237 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4239 rtx temp1 = simplify_shift_const
4240 (NULL_RTX, ASHIFTRT, mode,
4241 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4242 GET_MODE_BITSIZE (mode) - 1 - i),
4243 GET_MODE_BITSIZE (mode) - 1 - i);
4245 /* If all we did was surround TEMP with the two shifts, we
4246 haven't improved anything, so don't use it. Otherwise,
4247 we are better off with TEMP1. */
4248 if (GET_CODE (temp1) != ASHIFTRT
4249 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4250 || XEXP (XEXP (temp1, 0), 0) != temp)
4251 return temp1;
4253 break;
4255 case TRUNCATE:
4256 /* We can't handle truncation to a partial integer mode here
4257 because we don't know the real bitsize of the partial
4258 integer mode. */
4259 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4260 break;
4262 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4263 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4264 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4265 SUBST (XEXP (x, 0),
4266 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4267 GET_MODE_MASK (mode), NULL_RTX, 0));
4269 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
4270 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4271 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4272 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4273 return XEXP (XEXP (x, 0), 0);
4275 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4276 (OP:SI foo:SI) if OP is NEG or ABS. */
4277 if ((GET_CODE (XEXP (x, 0)) == ABS
4278 || GET_CODE (XEXP (x, 0)) == NEG)
4279 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4280 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4281 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4282 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4283 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4285 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4286 (truncate:SI x). */
4287 if (GET_CODE (XEXP (x, 0)) == SUBREG
4288 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4289 && subreg_lowpart_p (XEXP (x, 0)))
4290 return SUBREG_REG (XEXP (x, 0));
4292 /* If we know that the value is already truncated, we can
4293 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4294 is nonzero for the corresponding modes. But don't do this
4295 for an (LSHIFTRT (MULT ...)) since this will cause problems
4296 with the umulXi3_highpart patterns. */
4297 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4298 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4299 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4300 >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4301 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4302 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4303 return gen_lowpart (mode, XEXP (x, 0));
4305 /* A truncate of a comparison can be replaced with a subreg if
4306 STORE_FLAG_VALUE permits. This is like the previous test,
4307 but it works even if the comparison is done in a mode larger
4308 than HOST_BITS_PER_WIDE_INT. */
4309 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4310 && COMPARISON_P (XEXP (x, 0))
4311 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4312 return gen_lowpart (mode, XEXP (x, 0));
4314 /* Similarly, a truncate of a register whose value is a
4315 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4316 permits. */
4317 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4318 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4319 && (temp = get_last_value (XEXP (x, 0)))
4320 && COMPARISON_P (temp))
4321 return gen_lowpart (mode, XEXP (x, 0));
4323 break;
4325 case FLOAT_TRUNCATE:
4326 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4327 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4328 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4329 return XEXP (XEXP (x, 0), 0);
4331 /* (float_truncate:SF (float_truncate:DF foo:XF))
4332 = (float_truncate:SF foo:XF).
4333 This may eliminate double rounding, so it is unsafe.
4335 (float_truncate:SF (float_extend:XF foo:DF))
4336 = (float_truncate:SF foo:DF).
4338 (float_truncate:DF (float_extend:XF foo:SF))
4339 = (float_extend:SF foo:DF). */
4340 if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
4341 && flag_unsafe_math_optimizations)
4342 || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
4343 return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
4344 0)))
4345 > GET_MODE_SIZE (mode)
4346 ? FLOAT_TRUNCATE : FLOAT_EXTEND,
4347 mode,
4348 XEXP (XEXP (x, 0), 0), mode);
4350 /* (float_truncate (float x)) is (float x) */
4351 if (GET_CODE (XEXP (x, 0)) == FLOAT
4352 && (flag_unsafe_math_optimizations
4353 || ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4354 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4355 - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4356 GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4357 return simplify_gen_unary (FLOAT, mode,
4358 XEXP (XEXP (x, 0), 0),
4359 GET_MODE (XEXP (XEXP (x, 0), 0)));
4361 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4362 (OP:SF foo:SF) if OP is NEG or ABS. */
4363 if ((GET_CODE (XEXP (x, 0)) == ABS
4364 || GET_CODE (XEXP (x, 0)) == NEG)
4365 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4366 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4367 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4368 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4370 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4371 is (float_truncate:SF x). */
4372 if (GET_CODE (XEXP (x, 0)) == SUBREG
4373 && subreg_lowpart_p (XEXP (x, 0))
4374 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4375 return SUBREG_REG (XEXP (x, 0));
4376 break;
4377 case FLOAT_EXTEND:
4378 /* (float_extend (float_extend x)) is (float_extend x)
4380 (float_extend (float x)) is (float x) assuming that double
4381 rounding can't happen.
4383 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4384 || (GET_CODE (XEXP (x, 0)) == FLOAT
4385 && ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4386 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4387 - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4388 GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4389 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4390 XEXP (XEXP (x, 0), 0),
4391 GET_MODE (XEXP (XEXP (x, 0), 0)));
4393 break;
4394 #ifdef HAVE_cc0
4395 case COMPARE:
4396 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4397 using cc0, in which case we want to leave it as a COMPARE
4398 so we can distinguish it from a register-register-copy. */
4399 if (XEXP (x, 1) == const0_rtx)
4400 return XEXP (x, 0);
4402 /* x - 0 is the same as x unless x's mode has signed zeros and
4403 allows rounding towards -infinity. Under those conditions,
4404 0 - 0 is -0. */
4405 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4406 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4407 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4408 return XEXP (x, 0);
4409 break;
4410 #endif
4412 case CONST:
4413 /* (const (const X)) can become (const X). Do it this way rather than
4414 returning the inner CONST since CONST can be shared with a
4415 REG_EQUAL note. */
4416 if (GET_CODE (XEXP (x, 0)) == CONST)
4417 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4418 break;
4420 #ifdef HAVE_lo_sum
4421 case LO_SUM:
4422 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4423 can add in an offset. find_split_point will split this address up
4424 again if it doesn't match. */
4425 if (GET_CODE (XEXP (x, 0)) == HIGH
4426 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4427 return XEXP (x, 1);
4428 break;
4429 #endif
4431 case PLUS:
4432 /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4434 if (GET_CODE (XEXP (x, 0)) == MULT
4435 && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4437 rtx in1, in2;
4439 in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4440 in2 = XEXP (XEXP (x, 0), 1);
4441 return simplify_gen_binary (MINUS, mode, XEXP (x, 1),
4442 simplify_gen_binary (MULT, mode,
4443 in1, in2));
4446 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4447 outermost. That's because that's the way indexed addresses are
4448 supposed to appear. This code used to check many more cases, but
4449 they are now checked elsewhere. */
4450 if (GET_CODE (XEXP (x, 0)) == PLUS
4451 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4452 return simplify_gen_binary (PLUS, mode,
4453 simplify_gen_binary (PLUS, mode,
4454 XEXP (XEXP (x, 0), 0),
4455 XEXP (x, 1)),
4456 XEXP (XEXP (x, 0), 1));
4458 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4459 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4460 bit-field and can be replaced by either a sign_extend or a
4461 sign_extract. The `and' may be a zero_extend and the two
4462 <c>, -<c> constants may be reversed. */
4463 if (GET_CODE (XEXP (x, 0)) == XOR
4464 && GET_CODE (XEXP (x, 1)) == CONST_INT
4465 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4466 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4467 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4468 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4469 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4470 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4471 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4472 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4473 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4474 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4475 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4476 == (unsigned int) i + 1))))
4477 return simplify_shift_const
4478 (NULL_RTX, ASHIFTRT, mode,
4479 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4480 XEXP (XEXP (XEXP (x, 0), 0), 0),
4481 GET_MODE_BITSIZE (mode) - (i + 1)),
4482 GET_MODE_BITSIZE (mode) - (i + 1));
4484 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4485 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4486 is 1. This produces better code than the alternative immediately
4487 below. */
4488 if (COMPARISON_P (XEXP (x, 0))
4489 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4490 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4491 && (reversed = reversed_comparison (XEXP (x, 0), mode)))
4492 return
4493 simplify_gen_unary (NEG, mode, reversed, mode);
4495 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4496 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4497 the bitsize of the mode - 1. This allows simplification of
4498 "a = (b & 8) == 0;" */
4499 if (XEXP (x, 1) == constm1_rtx
4500 && !REG_P (XEXP (x, 0))
4501 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4502 && REG_P (SUBREG_REG (XEXP (x, 0))))
4503 && nonzero_bits (XEXP (x, 0), mode) == 1)
4504 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4505 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4506 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4507 GET_MODE_BITSIZE (mode) - 1),
4508 GET_MODE_BITSIZE (mode) - 1);
4510 /* If we are adding two things that have no bits in common, convert
4511 the addition into an IOR. This will often be further simplified,
4512 for example in cases like ((a & 1) + (a & 2)), which can
4513 become a & 3. */
4515 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4516 && (nonzero_bits (XEXP (x, 0), mode)
4517 & nonzero_bits (XEXP (x, 1), mode)) == 0)
4519 /* Try to simplify the expression further. */
4520 rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4521 temp = combine_simplify_rtx (tor, mode, in_dest);
4523 /* If we could, great. If not, do not go ahead with the IOR
4524 replacement, since PLUS appears in many special purpose
4525 address arithmetic instructions. */
4526 if (GET_CODE (temp) != CLOBBER && temp != tor)
4527 return temp;
4529 break;
4531 case MINUS:
4532 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4533 by reversing the comparison code if valid. */
4534 if (STORE_FLAG_VALUE == 1
4535 && XEXP (x, 0) == const1_rtx
4536 && COMPARISON_P (XEXP (x, 1))
4537 && (reversed = reversed_comparison (XEXP (x, 1), mode)))
4538 return reversed;
4540 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4541 (and <foo> (const_int pow2-1)) */
4542 if (GET_CODE (XEXP (x, 1)) == AND
4543 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4544 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4545 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4546 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4547 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4549 /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4551 if (GET_CODE (XEXP (x, 1)) == MULT
4552 && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4554 rtx in1, in2;
4556 in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4557 in2 = XEXP (XEXP (x, 1), 1);
4558 return simplify_gen_binary (PLUS, mode,
4559 simplify_gen_binary (MULT, mode,
4560 in1, in2),
4561 XEXP (x, 0));
4564 /* Canonicalize (minus (neg A) (mult B C)) to
4565 (minus (mult (neg B) C) A). */
4566 if (GET_CODE (XEXP (x, 1)) == MULT
4567 && GET_CODE (XEXP (x, 0)) == NEG)
4569 rtx in1, in2;
4571 in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4572 in2 = XEXP (XEXP (x, 1), 1);
4573 return simplify_gen_binary (MINUS, mode,
4574 simplify_gen_binary (MULT, mode,
4575 in1, in2),
4576 XEXP (XEXP (x, 0), 0));
4579 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4580 integers. */
4581 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4582 return simplify_gen_binary (MINUS, mode,
4583 simplify_gen_binary (MINUS, mode,
4584 XEXP (x, 0),
4585 XEXP (XEXP (x, 1), 0)),
4586 XEXP (XEXP (x, 1), 1));
4587 break;
4589 case MULT:
4590 /* If we have (mult (plus A B) C), apply the distributive law and then
4591 the inverse distributive law to see if things simplify. This
4592 occurs mostly in addresses, often when unrolling loops. */
4594 if (GET_CODE (XEXP (x, 0)) == PLUS)
4596 rtx result = distribute_and_simplify_rtx (x, 0);
4597 if (result)
4598 return result;
4601 /* Try simplify a*(b/c) as (a*b)/c. */
4602 if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4603 && GET_CODE (XEXP (x, 0)) == DIV)
4605 rtx tem = simplify_binary_operation (MULT, mode,
4606 XEXP (XEXP (x, 0), 0),
4607 XEXP (x, 1));
4608 if (tem)
4609 return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4611 break;
4613 case UDIV:
4614 /* If this is a divide by a power of two, treat it as a shift if
4615 its first operand is a shift. */
4616 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4617 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4618 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4619 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4620 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4621 || GET_CODE (XEXP (x, 0)) == ROTATE
4622 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4623 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4624 break;
4626 case EQ: case NE:
4627 case GT: case GTU: case GE: case GEU:
4628 case LT: case LTU: case LE: case LEU:
4629 case UNEQ: case LTGT:
4630 case UNGT: case UNGE:
4631 case UNLT: case UNLE:
4632 case UNORDERED: case ORDERED:
4633 /* If the first operand is a condition code, we can't do anything
4634 with it. */
4635 if (GET_CODE (XEXP (x, 0)) == COMPARE
4636 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4637 && ! CC0_P (XEXP (x, 0))))
4639 rtx op0 = XEXP (x, 0);
4640 rtx op1 = XEXP (x, 1);
4641 enum rtx_code new_code;
4643 if (GET_CODE (op0) == COMPARE)
4644 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4646 /* Simplify our comparison, if possible. */
4647 new_code = simplify_comparison (code, &op0, &op1);
4649 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4650 if only the low-order bit is possibly nonzero in X (such as when
4651 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4652 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4653 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4654 (plus X 1).
4656 Remove any ZERO_EXTRACT we made when thinking this was a
4657 comparison. It may now be simpler to use, e.g., an AND. If a
4658 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4659 the call to make_compound_operation in the SET case. */
4661 if (STORE_FLAG_VALUE == 1
4662 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4663 && op1 == const0_rtx
4664 && mode == GET_MODE (op0)
4665 && nonzero_bits (op0, mode) == 1)
4666 return gen_lowpart (mode,
4667 expand_compound_operation (op0));
4669 else if (STORE_FLAG_VALUE == 1
4670 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4671 && op1 == const0_rtx
4672 && mode == GET_MODE (op0)
4673 && (num_sign_bit_copies (op0, mode)
4674 == GET_MODE_BITSIZE (mode)))
4676 op0 = expand_compound_operation (op0);
4677 return simplify_gen_unary (NEG, mode,
4678 gen_lowpart (mode, op0),
4679 mode);
4682 else if (STORE_FLAG_VALUE == 1
4683 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4684 && op1 == const0_rtx
4685 && mode == GET_MODE (op0)
4686 && nonzero_bits (op0, mode) == 1)
4688 op0 = expand_compound_operation (op0);
4689 return simplify_gen_binary (XOR, mode,
4690 gen_lowpart (mode, op0),
4691 const1_rtx);
4694 else if (STORE_FLAG_VALUE == 1
4695 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4696 && op1 == const0_rtx
4697 && mode == GET_MODE (op0)
4698 && (num_sign_bit_copies (op0, mode)
4699 == GET_MODE_BITSIZE (mode)))
4701 op0 = expand_compound_operation (op0);
4702 return plus_constant (gen_lowpart (mode, op0), 1);
4705 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4706 those above. */
4707 if (STORE_FLAG_VALUE == -1
4708 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4709 && op1 == const0_rtx
4710 && (num_sign_bit_copies (op0, mode)
4711 == GET_MODE_BITSIZE (mode)))
4712 return gen_lowpart (mode,
4713 expand_compound_operation (op0));
4715 else if (STORE_FLAG_VALUE == -1
4716 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4717 && op1 == const0_rtx
4718 && mode == GET_MODE (op0)
4719 && nonzero_bits (op0, mode) == 1)
4721 op0 = expand_compound_operation (op0);
4722 return simplify_gen_unary (NEG, mode,
4723 gen_lowpart (mode, op0),
4724 mode);
4727 else if (STORE_FLAG_VALUE == -1
4728 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4729 && op1 == const0_rtx
4730 && mode == GET_MODE (op0)
4731 && (num_sign_bit_copies (op0, mode)
4732 == GET_MODE_BITSIZE (mode)))
4734 op0 = expand_compound_operation (op0);
4735 return simplify_gen_unary (NOT, mode,
4736 gen_lowpart (mode, op0),
4737 mode);
4740 /* If X is 0/1, (eq X 0) is X-1. */
4741 else if (STORE_FLAG_VALUE == -1
4742 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4743 && op1 == const0_rtx
4744 && mode == GET_MODE (op0)
4745 && nonzero_bits (op0, mode) == 1)
4747 op0 = expand_compound_operation (op0);
4748 return plus_constant (gen_lowpart (mode, op0), -1);
4751 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4752 one bit that might be nonzero, we can convert (ne x 0) to
4753 (ashift x c) where C puts the bit in the sign bit. Remove any
4754 AND with STORE_FLAG_VALUE when we are done, since we are only
4755 going to test the sign bit. */
4756 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4757 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4758 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4759 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4760 && op1 == const0_rtx
4761 && mode == GET_MODE (op0)
4762 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4764 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4765 expand_compound_operation (op0),
4766 GET_MODE_BITSIZE (mode) - 1 - i);
4767 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4768 return XEXP (x, 0);
4769 else
4770 return x;
4773 /* If the code changed, return a whole new comparison. */
4774 if (new_code != code)
4775 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4777 /* Otherwise, keep this operation, but maybe change its operands.
4778 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4779 SUBST (XEXP (x, 0), op0);
4780 SUBST (XEXP (x, 1), op1);
4782 break;
4784 case IF_THEN_ELSE:
4785 return simplify_if_then_else (x);
4787 case ZERO_EXTRACT:
4788 case SIGN_EXTRACT:
4789 case ZERO_EXTEND:
4790 case SIGN_EXTEND:
4791 /* If we are processing SET_DEST, we are done. */
4792 if (in_dest)
4793 return x;
4795 return expand_compound_operation (x);
4797 case SET:
4798 return simplify_set (x);
4800 case AND:
4801 case IOR:
4802 case XOR:
4803 return simplify_logical (x);
4805 case ABS:
4806 /* (abs (neg <foo>)) -> (abs <foo>) */
4807 if (GET_CODE (XEXP (x, 0)) == NEG)
4808 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4810 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4811 do nothing. */
4812 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4813 break;
4815 /* If operand is something known to be positive, ignore the ABS. */
4816 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4817 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4818 <= HOST_BITS_PER_WIDE_INT)
4819 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4820 & ((HOST_WIDE_INT) 1
4821 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4822 == 0)))
4823 return XEXP (x, 0);
4825 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4826 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4827 return gen_rtx_NEG (mode, XEXP (x, 0));
4829 break;
4831 case FFS:
4832 /* (ffs (*_extend <X>)) = (ffs <X>) */
4833 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4834 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4835 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4836 break;
4838 case POPCOUNT:
4839 case PARITY:
4840 /* (pop* (zero_extend <X>)) = (pop* <X>) */
4841 if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4842 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4843 break;
4845 case FLOAT:
4846 /* (float (sign_extend <X>)) = (float <X>). */
4847 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4848 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4849 break;
4851 case ASHIFT:
4852 case LSHIFTRT:
4853 case ASHIFTRT:
4854 case ROTATE:
4855 case ROTATERT:
4856 /* If this is a shift by a constant amount, simplify it. */
4857 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4858 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4859 INTVAL (XEXP (x, 1)));
4861 else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
4862 SUBST (XEXP (x, 1),
4863 force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4864 ((HOST_WIDE_INT) 1
4865 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4866 - 1,
4867 NULL_RTX, 0));
4868 break;
4870 case VEC_SELECT:
4872 rtx op0 = XEXP (x, 0);
4873 rtx op1 = XEXP (x, 1);
4874 int len;
4876 gcc_assert (GET_CODE (op1) == PARALLEL);
4877 len = XVECLEN (op1, 0);
4878 if (len == 1
4879 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4880 && GET_CODE (op0) == VEC_CONCAT)
4882 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4884 /* Try to find the element in the VEC_CONCAT. */
4885 for (;;)
4887 if (GET_MODE (op0) == GET_MODE (x))
4888 return op0;
4889 if (GET_CODE (op0) == VEC_CONCAT)
4891 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4892 if (offset < op0_size)
4893 op0 = XEXP (op0, 0);
4894 else
4896 offset -= op0_size;
4897 op0 = XEXP (op0, 1);
4900 else
4901 break;
4906 break;
4908 default:
4909 break;
4912 return x;
4915 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4917 static rtx
4918 simplify_if_then_else (rtx x)
4920 enum machine_mode mode = GET_MODE (x);
4921 rtx cond = XEXP (x, 0);
4922 rtx true_rtx = XEXP (x, 1);
4923 rtx false_rtx = XEXP (x, 2);
4924 enum rtx_code true_code = GET_CODE (cond);
4925 int comparison_p = COMPARISON_P (cond);
4926 rtx temp;
4927 int i;
4928 enum rtx_code false_code;
4929 rtx reversed;
4931 /* Simplify storing of the truth value. */
4932 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4933 return simplify_gen_relational (true_code, mode, VOIDmode,
4934 XEXP (cond, 0), XEXP (cond, 1));
4936 /* Also when the truth value has to be reversed. */
4937 if (comparison_p
4938 && true_rtx == const0_rtx && false_rtx == const_true_rtx
4939 && (reversed = reversed_comparison (cond, mode)))
4940 return reversed;
4942 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4943 in it is being compared against certain values. Get the true and false
4944 comparisons and see if that says anything about the value of each arm. */
4946 if (comparison_p
4947 && ((false_code = reversed_comparison_code (cond, NULL))
4948 != UNKNOWN)
4949 && REG_P (XEXP (cond, 0)))
4951 HOST_WIDE_INT nzb;
4952 rtx from = XEXP (cond, 0);
4953 rtx true_val = XEXP (cond, 1);
4954 rtx false_val = true_val;
4955 int swapped = 0;
4957 /* If FALSE_CODE is EQ, swap the codes and arms. */
4959 if (false_code == EQ)
4961 swapped = 1, true_code = EQ, false_code = NE;
4962 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4965 /* If we are comparing against zero and the expression being tested has
4966 only a single bit that might be nonzero, that is its value when it is
4967 not equal to zero. Similarly if it is known to be -1 or 0. */
4969 if (true_code == EQ && true_val == const0_rtx
4970 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4971 false_code = EQ, false_val = GEN_INT (nzb);
4972 else if (true_code == EQ && true_val == const0_rtx
4973 && (num_sign_bit_copies (from, GET_MODE (from))
4974 == GET_MODE_BITSIZE (GET_MODE (from))))
4975 false_code = EQ, false_val = constm1_rtx;
4977 /* Now simplify an arm if we know the value of the register in the
4978 branch and it is used in the arm. Be careful due to the potential
4979 of locally-shared RTL. */
4981 if (reg_mentioned_p (from, true_rtx))
4982 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4983 from, true_val),
4984 pc_rtx, pc_rtx, 0, 0);
4985 if (reg_mentioned_p (from, false_rtx))
4986 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4987 from, false_val),
4988 pc_rtx, pc_rtx, 0, 0);
4990 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4991 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4993 true_rtx = XEXP (x, 1);
4994 false_rtx = XEXP (x, 2);
4995 true_code = GET_CODE (cond);
4998 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4999 reversed, do so to avoid needing two sets of patterns for
5000 subtract-and-branch insns. Similarly if we have a constant in the true
5001 arm, the false arm is the same as the first operand of the comparison, or
5002 the false arm is more complicated than the true arm. */
5004 if (comparison_p
5005 && reversed_comparison_code (cond, NULL) != UNKNOWN
5006 && (true_rtx == pc_rtx
5007 || (CONSTANT_P (true_rtx)
5008 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
5009 || true_rtx == const0_rtx
5010 || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5011 || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5012 && !OBJECT_P (false_rtx))
5013 || reg_mentioned_p (true_rtx, false_rtx)
5014 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5016 true_code = reversed_comparison_code (cond, NULL);
5017 SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5018 SUBST (XEXP (x, 1), false_rtx);
5019 SUBST (XEXP (x, 2), true_rtx);
5021 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5022 cond = XEXP (x, 0);
5024 /* It is possible that the conditional has been simplified out. */
5025 true_code = GET_CODE (cond);
5026 comparison_p = COMPARISON_P (cond);
5029 /* If the two arms are identical, we don't need the comparison. */
5031 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5032 return true_rtx;
5034 /* Convert a == b ? b : a to "a". */
5035 if (true_code == EQ && ! side_effects_p (cond)
5036 && !HONOR_NANS (mode)
5037 && rtx_equal_p (XEXP (cond, 0), false_rtx)
5038 && rtx_equal_p (XEXP (cond, 1), true_rtx))
5039 return false_rtx;
5040 else if (true_code == NE && ! side_effects_p (cond)
5041 && !HONOR_NANS (mode)
5042 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5043 && rtx_equal_p (XEXP (cond, 1), false_rtx))
5044 return true_rtx;
5046 /* Look for cases where we have (abs x) or (neg (abs X)). */
5048 if (GET_MODE_CLASS (mode) == MODE_INT
5049 && GET_CODE (false_rtx) == NEG
5050 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5051 && comparison_p
5052 && rtx_equal_p (true_rtx, XEXP (cond, 0))
5053 && ! side_effects_p (true_rtx))
5054 switch (true_code)
5056 case GT:
5057 case GE:
5058 return simplify_gen_unary (ABS, mode, true_rtx, mode);
5059 case LT:
5060 case LE:
5061 return
5062 simplify_gen_unary (NEG, mode,
5063 simplify_gen_unary (ABS, mode, true_rtx, mode),
5064 mode);
5065 default:
5066 break;
5069 /* Look for MIN or MAX. */
5071 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5072 && comparison_p
5073 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5074 && rtx_equal_p (XEXP (cond, 1), false_rtx)
5075 && ! side_effects_p (cond))
5076 switch (true_code)
5078 case GE:
5079 case GT:
5080 return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5081 case LE:
5082 case LT:
5083 return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5084 case GEU:
5085 case GTU:
5086 return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5087 case LEU:
5088 case LTU:
5089 return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5090 default:
5091 break;
5094 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5095 second operand is zero, this can be done as (OP Z (mult COND C2)) where
5096 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5097 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5098 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5099 neither 1 or -1, but it isn't worth checking for. */
5101 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5102 && comparison_p
5103 && GET_MODE_CLASS (mode) == MODE_INT
5104 && ! side_effects_p (x))
5106 rtx t = make_compound_operation (true_rtx, SET);
5107 rtx f = make_compound_operation (false_rtx, SET);
5108 rtx cond_op0 = XEXP (cond, 0);
5109 rtx cond_op1 = XEXP (cond, 1);
5110 enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5111 enum machine_mode m = mode;
5112 rtx z = 0, c1 = NULL_RTX;
5114 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5115 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5116 || GET_CODE (t) == ASHIFT
5117 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5118 && rtx_equal_p (XEXP (t, 0), f))
5119 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5121 /* If an identity-zero op is commutative, check whether there
5122 would be a match if we swapped the operands. */
5123 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5124 || GET_CODE (t) == XOR)
5125 && rtx_equal_p (XEXP (t, 1), f))
5126 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5127 else if (GET_CODE (t) == SIGN_EXTEND
5128 && (GET_CODE (XEXP (t, 0)) == PLUS
5129 || GET_CODE (XEXP (t, 0)) == MINUS
5130 || GET_CODE (XEXP (t, 0)) == IOR
5131 || GET_CODE (XEXP (t, 0)) == XOR
5132 || GET_CODE (XEXP (t, 0)) == ASHIFT
5133 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5134 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5135 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5136 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5137 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5138 && (num_sign_bit_copies (f, GET_MODE (f))
5139 > (unsigned int)
5140 (GET_MODE_BITSIZE (mode)
5141 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5143 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5144 extend_op = SIGN_EXTEND;
5145 m = GET_MODE (XEXP (t, 0));
5147 else if (GET_CODE (t) == SIGN_EXTEND
5148 && (GET_CODE (XEXP (t, 0)) == PLUS
5149 || GET_CODE (XEXP (t, 0)) == IOR
5150 || GET_CODE (XEXP (t, 0)) == XOR)
5151 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5152 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5153 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5154 && (num_sign_bit_copies (f, GET_MODE (f))
5155 > (unsigned int)
5156 (GET_MODE_BITSIZE (mode)
5157 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5159 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5160 extend_op = SIGN_EXTEND;
5161 m = GET_MODE (XEXP (t, 0));
5163 else if (GET_CODE (t) == ZERO_EXTEND
5164 && (GET_CODE (XEXP (t, 0)) == PLUS
5165 || GET_CODE (XEXP (t, 0)) == MINUS
5166 || GET_CODE (XEXP (t, 0)) == IOR
5167 || GET_CODE (XEXP (t, 0)) == XOR
5168 || GET_CODE (XEXP (t, 0)) == ASHIFT
5169 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5170 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5171 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5173 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5174 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5175 && ((nonzero_bits (f, GET_MODE (f))
5176 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5177 == 0))
5179 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5180 extend_op = ZERO_EXTEND;
5181 m = GET_MODE (XEXP (t, 0));
5183 else if (GET_CODE (t) == ZERO_EXTEND
5184 && (GET_CODE (XEXP (t, 0)) == PLUS
5185 || GET_CODE (XEXP (t, 0)) == IOR
5186 || GET_CODE (XEXP (t, 0)) == XOR)
5187 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5188 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5189 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5190 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5191 && ((nonzero_bits (f, GET_MODE (f))
5192 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5193 == 0))
5195 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5196 extend_op = ZERO_EXTEND;
5197 m = GET_MODE (XEXP (t, 0));
5200 if (z)
5202 temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5203 cond_op0, cond_op1),
5204 pc_rtx, pc_rtx, 0, 0);
5205 temp = simplify_gen_binary (MULT, m, temp,
5206 simplify_gen_binary (MULT, m, c1,
5207 const_true_rtx));
5208 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5209 temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5211 if (extend_op != UNKNOWN)
5212 temp = simplify_gen_unary (extend_op, mode, temp, m);
5214 return temp;
5218 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5219 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5220 negation of a single bit, we can convert this operation to a shift. We
5221 can actually do this more generally, but it doesn't seem worth it. */
5223 if (true_code == NE && XEXP (cond, 1) == const0_rtx
5224 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5225 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5226 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5227 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5228 == GET_MODE_BITSIZE (mode))
5229 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5230 return
5231 simplify_shift_const (NULL_RTX, ASHIFT, mode,
5232 gen_lowpart (mode, XEXP (cond, 0)), i);
5234 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
5235 if (true_code == NE && XEXP (cond, 1) == const0_rtx
5236 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5237 && GET_MODE (XEXP (cond, 0)) == mode
5238 && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5239 == nonzero_bits (XEXP (cond, 0), mode)
5240 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5241 return XEXP (cond, 0);
5243 return x;
5246 /* Simplify X, a SET expression. Return the new expression. */
5248 static rtx
5249 simplify_set (rtx x)
5251 rtx src = SET_SRC (x);
5252 rtx dest = SET_DEST (x);
5253 enum machine_mode mode
5254 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5255 rtx other_insn;
5256 rtx *cc_use;
5258 /* (set (pc) (return)) gets written as (return). */
5259 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5260 return src;
5262 /* Now that we know for sure which bits of SRC we are using, see if we can
5263 simplify the expression for the object knowing that we only need the
5264 low-order bits. */
5266 if (GET_MODE_CLASS (mode) == MODE_INT
5267 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5269 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
5270 SUBST (SET_SRC (x), src);
5273 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5274 the comparison result and try to simplify it unless we already have used
5275 undobuf.other_insn. */
5276 if ((GET_MODE_CLASS (mode) == MODE_CC
5277 || GET_CODE (src) == COMPARE
5278 || CC0_P (dest))
5279 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5280 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5281 && COMPARISON_P (*cc_use)
5282 && rtx_equal_p (XEXP (*cc_use, 0), dest))
5284 enum rtx_code old_code = GET_CODE (*cc_use);
5285 enum rtx_code new_code;
5286 rtx op0, op1, tmp;
5287 int other_changed = 0;
5288 enum machine_mode compare_mode = GET_MODE (dest);
5290 if (GET_CODE (src) == COMPARE)
5291 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5292 else
5293 op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5295 tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5296 op0, op1);
5297 if (!tmp)
5298 new_code = old_code;
5299 else if (!CONSTANT_P (tmp))
5301 new_code = GET_CODE (tmp);
5302 op0 = XEXP (tmp, 0);
5303 op1 = XEXP (tmp, 1);
5305 else
5307 rtx pat = PATTERN (other_insn);
5308 undobuf.other_insn = other_insn;
5309 SUBST (*cc_use, tmp);
5311 /* Attempt to simplify CC user. */
5312 if (GET_CODE (pat) == SET)
5314 rtx new = simplify_rtx (SET_SRC (pat));
5315 if (new != NULL_RTX)
5316 SUBST (SET_SRC (pat), new);
5319 /* Convert X into a no-op move. */
5320 SUBST (SET_DEST (x), pc_rtx);
5321 SUBST (SET_SRC (x), pc_rtx);
5322 return x;
5325 /* Simplify our comparison, if possible. */
5326 new_code = simplify_comparison (new_code, &op0, &op1);
5328 #ifdef SELECT_CC_MODE
5329 /* If this machine has CC modes other than CCmode, check to see if we
5330 need to use a different CC mode here. */
5331 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5332 compare_mode = GET_MODE (op0);
5333 else
5334 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5336 #ifndef HAVE_cc0
5337 /* If the mode changed, we have to change SET_DEST, the mode in the
5338 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5339 a hard register, just build new versions with the proper mode. If it
5340 is a pseudo, we lose unless it is only time we set the pseudo, in
5341 which case we can safely change its mode. */
5342 if (compare_mode != GET_MODE (dest))
5344 if (can_change_dest_mode (dest, 0, compare_mode))
5346 unsigned int regno = REGNO (dest);
5347 rtx new_dest = gen_rtx_REG (compare_mode, regno);
5349 if (regno >= FIRST_PSEUDO_REGISTER)
5350 SUBST (regno_reg_rtx[regno], new_dest);
5352 SUBST (SET_DEST (x), new_dest);
5353 SUBST (XEXP (*cc_use, 0), new_dest);
5354 other_changed = 1;
5356 dest = new_dest;
5359 #endif /* cc0 */
5360 #endif /* SELECT_CC_MODE */
5362 /* If the code changed, we have to build a new comparison in
5363 undobuf.other_insn. */
5364 if (new_code != old_code)
5366 int other_changed_previously = other_changed;
5367 unsigned HOST_WIDE_INT mask;
5369 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5370 dest, const0_rtx));
5371 other_changed = 1;
5373 /* If the only change we made was to change an EQ into an NE or
5374 vice versa, OP0 has only one bit that might be nonzero, and OP1
5375 is zero, check if changing the user of the condition code will
5376 produce a valid insn. If it won't, we can keep the original code
5377 in that insn by surrounding our operation with an XOR. */
5379 if (((old_code == NE && new_code == EQ)
5380 || (old_code == EQ && new_code == NE))
5381 && ! other_changed_previously && op1 == const0_rtx
5382 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5383 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5385 rtx pat = PATTERN (other_insn), note = 0;
5387 if ((recog_for_combine (&pat, other_insn, &note) < 0
5388 && ! check_asm_operands (pat)))
5390 PUT_CODE (*cc_use, old_code);
5391 other_changed = 0;
5393 op0 = simplify_gen_binary (XOR, GET_MODE (op0),
5394 op0, GEN_INT (mask));
5399 if (other_changed)
5400 undobuf.other_insn = other_insn;
5402 #ifdef HAVE_cc0
5403 /* If we are now comparing against zero, change our source if
5404 needed. If we do not use cc0, we always have a COMPARE. */
5405 if (op1 == const0_rtx && dest == cc0_rtx)
5407 SUBST (SET_SRC (x), op0);
5408 src = op0;
5410 else
5411 #endif
5413 /* Otherwise, if we didn't previously have a COMPARE in the
5414 correct mode, we need one. */
5415 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5417 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5418 src = SET_SRC (x);
5420 else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
5422 SUBST(SET_SRC (x), op0);
5423 src = SET_SRC (x);
5425 else
5427 /* Otherwise, update the COMPARE if needed. */
5428 SUBST (XEXP (src, 0), op0);
5429 SUBST (XEXP (src, 1), op1);
5432 else
5434 /* Get SET_SRC in a form where we have placed back any
5435 compound expressions. Then do the checks below. */
5436 src = make_compound_operation (src, SET);
5437 SUBST (SET_SRC (x), src);
5440 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5441 and X being a REG or (subreg (reg)), we may be able to convert this to
5442 (set (subreg:m2 x) (op)).
5444 We can always do this if M1 is narrower than M2 because that means that
5445 we only care about the low bits of the result.
5447 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5448 perform a narrower operation than requested since the high-order bits will
5449 be undefined. On machine where it is defined, this transformation is safe
5450 as long as M1 and M2 have the same number of words. */
5452 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5453 && !OBJECT_P (SUBREG_REG (src))
5454 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5455 / UNITS_PER_WORD)
5456 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5457 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5458 #ifndef WORD_REGISTER_OPERATIONS
5459 && (GET_MODE_SIZE (GET_MODE (src))
5460 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5461 #endif
5462 #ifdef CANNOT_CHANGE_MODE_CLASS
5463 && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
5464 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5465 GET_MODE (SUBREG_REG (src)),
5466 GET_MODE (src)))
5467 #endif
5468 && (REG_P (dest)
5469 || (GET_CODE (dest) == SUBREG
5470 && REG_P (SUBREG_REG (dest)))))
5472 SUBST (SET_DEST (x),
5473 gen_lowpart (GET_MODE (SUBREG_REG (src)),
5474 dest));
5475 SUBST (SET_SRC (x), SUBREG_REG (src));
5477 src = SET_SRC (x), dest = SET_DEST (x);
5480 #ifdef HAVE_cc0
5481 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5482 in SRC. */
5483 if (dest == cc0_rtx
5484 && GET_CODE (src) == SUBREG
5485 && subreg_lowpart_p (src)
5486 && (GET_MODE_BITSIZE (GET_MODE (src))
5487 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5489 rtx inner = SUBREG_REG (src);
5490 enum machine_mode inner_mode = GET_MODE (inner);
5492 /* Here we make sure that we don't have a sign bit on. */
5493 if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5494 && (nonzero_bits (inner, inner_mode)
5495 < ((unsigned HOST_WIDE_INT) 1
5496 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5498 SUBST (SET_SRC (x), inner);
5499 src = SET_SRC (x);
5502 #endif
5504 #ifdef LOAD_EXTEND_OP
5505 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5506 would require a paradoxical subreg. Replace the subreg with a
5507 zero_extend to avoid the reload that would otherwise be required. */
5509 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5510 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
5511 && SUBREG_BYTE (src) == 0
5512 && (GET_MODE_SIZE (GET_MODE (src))
5513 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5514 && MEM_P (SUBREG_REG (src)))
5516 SUBST (SET_SRC (x),
5517 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5518 GET_MODE (src), SUBREG_REG (src)));
5520 src = SET_SRC (x);
5522 #endif
5524 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5525 are comparing an item known to be 0 or -1 against 0, use a logical
5526 operation instead. Check for one of the arms being an IOR of the other
5527 arm with some value. We compute three terms to be IOR'ed together. In
5528 practice, at most two will be nonzero. Then we do the IOR's. */
5530 if (GET_CODE (dest) != PC
5531 && GET_CODE (src) == IF_THEN_ELSE
5532 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5533 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5534 && XEXP (XEXP (src, 0), 1) == const0_rtx
5535 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5536 #ifdef HAVE_conditional_move
5537 && ! can_conditionally_move_p (GET_MODE (src))
5538 #endif
5539 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5540 GET_MODE (XEXP (XEXP (src, 0), 0)))
5541 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5542 && ! side_effects_p (src))
5544 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5545 ? XEXP (src, 1) : XEXP (src, 2));
5546 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5547 ? XEXP (src, 2) : XEXP (src, 1));
5548 rtx term1 = const0_rtx, term2, term3;
5550 if (GET_CODE (true_rtx) == IOR
5551 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5552 term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5553 else if (GET_CODE (true_rtx) == IOR
5554 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5555 term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5556 else if (GET_CODE (false_rtx) == IOR
5557 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5558 term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5559 else if (GET_CODE (false_rtx) == IOR
5560 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5561 term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5563 term2 = simplify_gen_binary (AND, GET_MODE (src),
5564 XEXP (XEXP (src, 0), 0), true_rtx);
5565 term3 = simplify_gen_binary (AND, GET_MODE (src),
5566 simplify_gen_unary (NOT, GET_MODE (src),
5567 XEXP (XEXP (src, 0), 0),
5568 GET_MODE (src)),
5569 false_rtx);
5571 SUBST (SET_SRC (x),
5572 simplify_gen_binary (IOR, GET_MODE (src),
5573 simplify_gen_binary (IOR, GET_MODE (src),
5574 term1, term2),
5575 term3));
5577 src = SET_SRC (x);
5580 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5581 whole thing fail. */
5582 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5583 return src;
5584 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5585 return dest;
5586 else
5587 /* Convert this into a field assignment operation, if possible. */
5588 return make_field_assignment (x);
5591 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5592 result. */
5594 static rtx
5595 simplify_logical (rtx x)
5597 enum machine_mode mode = GET_MODE (x);
5598 rtx op0 = XEXP (x, 0);
5599 rtx op1 = XEXP (x, 1);
5600 rtx tmp, reversed;
5602 switch (GET_CODE (x))
5604 case AND:
5605 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5606 insn (and may simplify more). */
5607 if (GET_CODE (op0) == XOR
5608 && rtx_equal_p (XEXP (op0, 0), op1)
5609 && ! side_effects_p (op1))
5610 x = simplify_gen_binary (AND, mode,
5611 simplify_gen_unary (NOT, mode,
5612 XEXP (op0, 1), mode),
5613 op1);
5615 if (GET_CODE (op0) == XOR
5616 && rtx_equal_p (XEXP (op0, 1), op1)
5617 && ! side_effects_p (op1))
5618 x = simplify_gen_binary (AND, mode,
5619 simplify_gen_unary (NOT, mode,
5620 XEXP (op0, 0), mode),
5621 op1);
5623 /* Similarly for (~(A ^ B)) & A. */
5624 if (GET_CODE (op0) == NOT
5625 && GET_CODE (XEXP (op0, 0)) == XOR
5626 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5627 && ! side_effects_p (op1))
5628 x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5630 if (GET_CODE (op0) == NOT
5631 && GET_CODE (XEXP (op0, 0)) == XOR
5632 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5633 && ! side_effects_p (op1))
5634 x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5636 /* We can call simplify_and_const_int only if we don't lose
5637 any (sign) bits when converting INTVAL (op1) to
5638 "unsigned HOST_WIDE_INT". */
5639 if (GET_CODE (op1) == CONST_INT
5640 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5641 || INTVAL (op1) > 0))
5643 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5645 /* If we have (ior (and (X C1) C2)) and the next restart would be
5646 the last, simplify this by making C1 as small as possible
5647 and then exit. Only do this if C1 actually changes: for now
5648 this only saves memory but, should this transformation be
5649 moved to simplify-rtx.c, we'd risk unbounded recursion there. */
5650 if (GET_CODE (x) == IOR && GET_CODE (op0) == AND
5651 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5652 && GET_CODE (op1) == CONST_INT
5653 && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
5654 return simplify_gen_binary (IOR, mode,
5655 simplify_gen_binary
5656 (AND, mode, XEXP (op0, 0),
5657 GEN_INT (INTVAL (XEXP (op0, 1))
5658 & ~INTVAL (op1))), op1);
5660 if (GET_CODE (x) != AND)
5661 return x;
5663 op0 = XEXP (x, 0);
5664 op1 = XEXP (x, 1);
5667 /* Convert (A | B) & A to A. */
5668 if (GET_CODE (op0) == IOR
5669 && (rtx_equal_p (XEXP (op0, 0), op1)
5670 || rtx_equal_p (XEXP (op0, 1), op1))
5671 && ! side_effects_p (XEXP (op0, 0))
5672 && ! side_effects_p (XEXP (op0, 1)))
5673 return op1;
5675 /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5676 apply the distributive law and then the inverse distributive
5677 law to see if things simplify. */
5678 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5680 rtx result = distribute_and_simplify_rtx (x, 0);
5681 if (result)
5682 return result;
5684 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5686 rtx result = distribute_and_simplify_rtx (x, 1);
5687 if (result)
5688 return result;
5690 break;
5692 case IOR:
5693 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
5694 if (GET_CODE (op1) == CONST_INT
5695 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5696 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5697 return op1;
5699 /* Convert (A & B) | A to A. */
5700 if (GET_CODE (op0) == AND
5701 && (rtx_equal_p (XEXP (op0, 0), op1)
5702 || rtx_equal_p (XEXP (op0, 1), op1))
5703 && ! side_effects_p (XEXP (op0, 0))
5704 && ! side_effects_p (XEXP (op0, 1)))
5705 return op1;
5707 /* If we have (ior (and A B) C), apply the distributive law and then
5708 the inverse distributive law to see if things simplify. */
5710 if (GET_CODE (op0) == AND)
5712 rtx result = distribute_and_simplify_rtx (x, 0);
5713 if (result)
5714 return result;
5717 if (GET_CODE (op1) == AND)
5719 rtx result = distribute_and_simplify_rtx (x, 1);
5720 if (result)
5721 return result;
5724 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5725 mode size to (rotate A CX). */
5727 if (GET_CODE (op1) == ASHIFT
5728 || GET_CODE (op1) == SUBREG)
5729 tmp = op1, op1 = op0, op0 = tmp;
5731 if (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT
5732 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5733 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5734 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5735 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5736 == GET_MODE_BITSIZE (mode)))
5737 return gen_rtx_ROTATE (mode, XEXP (op1, 0), XEXP (op0, 1));
5739 /* Same, but for ashift that has been "simplified" to a wider mode
5740 by simplify_shift_const. */
5742 if (GET_CODE (op0) == SUBREG
5743 && GET_CODE (SUBREG_REG (op0)) == ASHIFT
5744 && GET_CODE (op1) == LSHIFTRT
5745 && GET_CODE (XEXP (op1, 0)) == SUBREG
5746 && GET_MODE (op0) == GET_MODE (XEXP (op1, 0))
5747 && SUBREG_BYTE (op0) == SUBREG_BYTE (XEXP (op1, 0))
5748 && (GET_MODE_SIZE (GET_MODE (op0))
5749 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5750 && rtx_equal_p (XEXP (SUBREG_REG (op0), 0),
5751 SUBREG_REG (XEXP (op1, 0)))
5752 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
5753 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5754 && (INTVAL (XEXP (SUBREG_REG (op0), 1)) + INTVAL (XEXP (op1, 1))
5755 == GET_MODE_BITSIZE (mode)))
5756 return gen_rtx_ROTATE (mode, XEXP (op1, 0),
5757 XEXP (SUBREG_REG (op0), 1));
5759 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5760 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5761 does not affect any of the bits in OP1, it can really be done
5762 as a PLUS and we can associate. We do this by seeing if OP1
5763 can be safely shifted left C bits. */
5764 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5765 && GET_CODE (XEXP (op0, 0)) == PLUS
5766 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5767 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5768 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5770 int count = INTVAL (XEXP (op0, 1));
5771 HOST_WIDE_INT mask = INTVAL (op1) << count;
5773 if (mask >> count == INTVAL (op1)
5774 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5776 SUBST (XEXP (XEXP (op0, 0), 1),
5777 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5778 return op0;
5781 break;
5783 case XOR:
5784 /* If we are XORing two things that have no bits in common,
5785 convert them into an IOR. This helps to detect rotation encoded
5786 using those methods and possibly other simplifications. */
5788 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5789 && (nonzero_bits (op0, mode)
5790 & nonzero_bits (op1, mode)) == 0)
5791 return (simplify_gen_binary (IOR, mode, op0, op1));
5793 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5794 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5795 (NOT y). */
5797 int num_negated = 0;
5799 if (GET_CODE (op0) == NOT)
5800 num_negated++, op0 = XEXP (op0, 0);
5801 if (GET_CODE (op1) == NOT)
5802 num_negated++, op1 = XEXP (op1, 0);
5804 if (num_negated == 2)
5806 SUBST (XEXP (x, 0), op0);
5807 SUBST (XEXP (x, 1), op1);
5809 else if (num_negated == 1)
5810 return
5811 simplify_gen_unary (NOT, mode,
5812 simplify_gen_binary (XOR, mode, op0, op1),
5813 mode);
5816 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5817 correspond to a machine insn or result in further simplifications
5818 if B is a constant. */
5820 if (GET_CODE (op0) == AND
5821 && rtx_equal_p (XEXP (op0, 1), op1)
5822 && ! side_effects_p (op1))
5823 return simplify_gen_binary (AND, mode,
5824 simplify_gen_unary (NOT, mode,
5825 XEXP (op0, 0), mode),
5826 op1);
5828 else if (GET_CODE (op0) == AND
5829 && rtx_equal_p (XEXP (op0, 0), op1)
5830 && ! side_effects_p (op1))
5831 return simplify_gen_binary (AND, mode,
5832 simplify_gen_unary (NOT, mode,
5833 XEXP (op0, 1), mode),
5834 op1);
5836 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5837 comparison if STORE_FLAG_VALUE is 1. */
5838 if (STORE_FLAG_VALUE == 1
5839 && op1 == const1_rtx
5840 && COMPARISON_P (op0)
5841 && (reversed = reversed_comparison (op0, mode)))
5842 return reversed;
5844 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5845 is (lt foo (const_int 0)), so we can perform the above
5846 simplification if STORE_FLAG_VALUE is 1. */
5848 if (STORE_FLAG_VALUE == 1
5849 && op1 == const1_rtx
5850 && GET_CODE (op0) == LSHIFTRT
5851 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5852 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5853 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5855 /* (xor (comparison foo bar) (const_int sign-bit))
5856 when STORE_FLAG_VALUE is the sign bit. */
5857 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5858 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5859 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5860 && op1 == const_true_rtx
5861 && COMPARISON_P (op0)
5862 && (reversed = reversed_comparison (op0, mode)))
5863 return reversed;
5865 break;
5867 default:
5868 gcc_unreachable ();
5871 return x;
5874 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5875 operations" because they can be replaced with two more basic operations.
5876 ZERO_EXTEND is also considered "compound" because it can be replaced with
5877 an AND operation, which is simpler, though only one operation.
5879 The function expand_compound_operation is called with an rtx expression
5880 and will convert it to the appropriate shifts and AND operations,
5881 simplifying at each stage.
5883 The function make_compound_operation is called to convert an expression
5884 consisting of shifts and ANDs into the equivalent compound expression.
5885 It is the inverse of this function, loosely speaking. */
5887 static rtx
5888 expand_compound_operation (rtx x)
5890 unsigned HOST_WIDE_INT pos = 0, len;
5891 int unsignedp = 0;
5892 unsigned int modewidth;
5893 rtx tem;
5895 switch (GET_CODE (x))
5897 case ZERO_EXTEND:
5898 unsignedp = 1;
5899 case SIGN_EXTEND:
5900 /* We can't necessarily use a const_int for a multiword mode;
5901 it depends on implicitly extending the value.
5902 Since we don't know the right way to extend it,
5903 we can't tell whether the implicit way is right.
5905 Even for a mode that is no wider than a const_int,
5906 we can't win, because we need to sign extend one of its bits through
5907 the rest of it, and we don't know which bit. */
5908 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5909 return x;
5911 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5912 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5913 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5914 reloaded. If not for that, MEM's would very rarely be safe.
5916 Reject MODEs bigger than a word, because we might not be able
5917 to reference a two-register group starting with an arbitrary register
5918 (and currently gen_lowpart might crash for a SUBREG). */
5920 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5921 return x;
5923 /* Reject MODEs that aren't scalar integers because turning vector
5924 or complex modes into shifts causes problems. */
5926 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5927 return x;
5929 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5930 /* If the inner object has VOIDmode (the only way this can happen
5931 is if it is an ASM_OPERANDS), we can't do anything since we don't
5932 know how much masking to do. */
5933 if (len == 0)
5934 return x;
5936 break;
5938 case ZERO_EXTRACT:
5939 unsignedp = 1;
5941 /* ... fall through ... */
5943 case SIGN_EXTRACT:
5944 /* If the operand is a CLOBBER, just return it. */
5945 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5946 return XEXP (x, 0);
5948 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5949 || GET_CODE (XEXP (x, 2)) != CONST_INT
5950 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5951 return x;
5953 /* Reject MODEs that aren't scalar integers because turning vector
5954 or complex modes into shifts causes problems. */
5956 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5957 return x;
5959 len = INTVAL (XEXP (x, 1));
5960 pos = INTVAL (XEXP (x, 2));
5962 /* If this goes outside the object being extracted, replace the object
5963 with a (use (mem ...)) construct that only combine understands
5964 and is used only for this purpose. */
5965 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5966 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5968 if (BITS_BIG_ENDIAN)
5969 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5971 break;
5973 default:
5974 return x;
5976 /* Convert sign extension to zero extension, if we know that the high
5977 bit is not set, as this is easier to optimize. It will be converted
5978 back to cheaper alternative in make_extraction. */
5979 if (GET_CODE (x) == SIGN_EXTEND
5980 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5981 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5982 & ~(((unsigned HOST_WIDE_INT)
5983 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5984 >> 1))
5985 == 0)))
5987 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5988 rtx temp2 = expand_compound_operation (temp);
5990 /* Make sure this is a profitable operation. */
5991 if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5992 return temp2;
5993 else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5994 return temp;
5995 else
5996 return x;
5999 /* We can optimize some special cases of ZERO_EXTEND. */
6000 if (GET_CODE (x) == ZERO_EXTEND)
6002 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6003 know that the last value didn't have any inappropriate bits
6004 set. */
6005 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6006 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6007 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6008 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
6009 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6010 return XEXP (XEXP (x, 0), 0);
6012 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6013 if (GET_CODE (XEXP (x, 0)) == SUBREG
6014 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6015 && subreg_lowpart_p (XEXP (x, 0))
6016 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6017 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
6018 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6019 return SUBREG_REG (XEXP (x, 0));
6021 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6022 is a comparison and STORE_FLAG_VALUE permits. This is like
6023 the first case, but it works even when GET_MODE (x) is larger
6024 than HOST_WIDE_INT. */
6025 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6026 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6027 && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6028 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6029 <= HOST_BITS_PER_WIDE_INT)
6030 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6031 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6032 return XEXP (XEXP (x, 0), 0);
6034 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6035 if (GET_CODE (XEXP (x, 0)) == SUBREG
6036 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6037 && subreg_lowpart_p (XEXP (x, 0))
6038 && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6039 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6040 <= HOST_BITS_PER_WIDE_INT)
6041 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6042 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6043 return SUBREG_REG (XEXP (x, 0));
6047 /* If we reach here, we want to return a pair of shifts. The inner
6048 shift is a left shift of BITSIZE - POS - LEN bits. The outer
6049 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
6050 logical depending on the value of UNSIGNEDP.
6052 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6053 converted into an AND of a shift.
6055 We must check for the case where the left shift would have a negative
6056 count. This can happen in a case like (x >> 31) & 255 on machines
6057 that can't shift by a constant. On those machines, we would first
6058 combine the shift with the AND to produce a variable-position
6059 extraction. Then the constant of 31 would be substituted in to produce
6060 a such a position. */
6062 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6063 if (modewidth + len >= pos)
6064 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6065 GET_MODE (x),
6066 simplify_shift_const (NULL_RTX, ASHIFT,
6067 GET_MODE (x),
6068 XEXP (x, 0),
6069 modewidth - pos - len),
6070 modewidth - len);
6072 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6073 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6074 simplify_shift_const (NULL_RTX, LSHIFTRT,
6075 GET_MODE (x),
6076 XEXP (x, 0), pos),
6077 ((HOST_WIDE_INT) 1 << len) - 1);
6078 else
6079 /* Any other cases we can't handle. */
6080 return x;
6082 /* If we couldn't do this for some reason, return the original
6083 expression. */
6084 if (GET_CODE (tem) == CLOBBER)
6085 return x;
6087 return tem;
6090 /* X is a SET which contains an assignment of one object into
6091 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6092 or certain SUBREGS). If possible, convert it into a series of
6093 logical operations.
6095 We half-heartedly support variable positions, but do not at all
6096 support variable lengths. */
6098 static rtx
6099 expand_field_assignment (rtx x)
6101 rtx inner;
6102 rtx pos; /* Always counts from low bit. */
6103 int len;
6104 rtx mask, cleared, masked;
6105 enum machine_mode compute_mode;
6107 /* Loop until we find something we can't simplify. */
6108 while (1)
6110 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6111 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6113 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6114 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6115 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6117 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6118 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
6120 inner = XEXP (SET_DEST (x), 0);
6121 len = INTVAL (XEXP (SET_DEST (x), 1));
6122 pos = XEXP (SET_DEST (x), 2);
6124 /* If the position is constant and spans the width of INNER,
6125 surround INNER with a USE to indicate this. */
6126 if (GET_CODE (pos) == CONST_INT
6127 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6128 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
6130 if (BITS_BIG_ENDIAN)
6132 if (GET_CODE (pos) == CONST_INT)
6133 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6134 - INTVAL (pos));
6135 else if (GET_CODE (pos) == MINUS
6136 && GET_CODE (XEXP (pos, 1)) == CONST_INT
6137 && (INTVAL (XEXP (pos, 1))
6138 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6139 /* If position is ADJUST - X, new position is X. */
6140 pos = XEXP (pos, 0);
6141 else
6142 pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6143 GEN_INT (GET_MODE_BITSIZE (
6144 GET_MODE (inner))
6145 - len),
6146 pos);
6150 /* A SUBREG between two modes that occupy the same numbers of words
6151 can be done by moving the SUBREG to the source. */
6152 else if (GET_CODE (SET_DEST (x)) == SUBREG
6153 /* We need SUBREGs to compute nonzero_bits properly. */
6154 && nonzero_sign_valid
6155 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6156 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6157 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6158 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6160 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6161 gen_lowpart
6162 (GET_MODE (SUBREG_REG (SET_DEST (x))),
6163 SET_SRC (x)));
6164 continue;
6166 else
6167 break;
6169 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6170 inner = SUBREG_REG (inner);
6172 compute_mode = GET_MODE (inner);
6174 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
6175 if (! SCALAR_INT_MODE_P (compute_mode))
6177 enum machine_mode imode;
6179 /* Don't do anything for vector or complex integral types. */
6180 if (! FLOAT_MODE_P (compute_mode))
6181 break;
6183 /* Try to find an integral mode to pun with. */
6184 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6185 if (imode == BLKmode)
6186 break;
6188 compute_mode = imode;
6189 inner = gen_lowpart (imode, inner);
6192 /* Compute a mask of LEN bits, if we can do this on the host machine. */
6193 if (len >= HOST_BITS_PER_WIDE_INT)
6194 break;
6196 /* Now compute the equivalent expression. Make a copy of INNER
6197 for the SET_DEST in case it is a MEM into which we will substitute;
6198 we don't want shared RTL in that case. */
6199 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6200 cleared = simplify_gen_binary (AND, compute_mode,
6201 simplify_gen_unary (NOT, compute_mode,
6202 simplify_gen_binary (ASHIFT,
6203 compute_mode,
6204 mask, pos),
6205 compute_mode),
6206 inner);
6207 masked = simplify_gen_binary (ASHIFT, compute_mode,
6208 simplify_gen_binary (
6209 AND, compute_mode,
6210 gen_lowpart (compute_mode, SET_SRC (x)),
6211 mask),
6212 pos);
6214 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6215 simplify_gen_binary (IOR, compute_mode,
6216 cleared, masked));
6219 return x;
6222 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
6223 it is an RTX that represents a variable starting position; otherwise,
6224 POS is the (constant) starting bit position (counted from the LSB).
6226 INNER may be a USE. This will occur when we started with a bitfield
6227 that went outside the boundary of the object in memory, which is
6228 allowed on most machines. To isolate this case, we produce a USE
6229 whose mode is wide enough and surround the MEM with it. The only
6230 code that understands the USE is this routine. If it is not removed,
6231 it will cause the resulting insn not to match.
6233 UNSIGNEDP is nonzero for an unsigned reference and zero for a
6234 signed reference.
6236 IN_DEST is nonzero if this is a reference in the destination of a
6237 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
6238 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6239 be used.
6241 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
6242 ZERO_EXTRACT should be built even for bits starting at bit 0.
6244 MODE is the desired mode of the result (if IN_DEST == 0).
6246 The result is an RTX for the extraction or NULL_RTX if the target
6247 can't handle it. */
6249 static rtx
6250 make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6251 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6252 int in_dest, int in_compare)
6254 /* This mode describes the size of the storage area
6255 to fetch the overall value from. Within that, we
6256 ignore the POS lowest bits, etc. */
6257 enum machine_mode is_mode = GET_MODE (inner);
6258 enum machine_mode inner_mode;
6259 enum machine_mode wanted_inner_mode = byte_mode;
6260 enum machine_mode wanted_inner_reg_mode = word_mode;
6261 enum machine_mode pos_mode = word_mode;
6262 enum machine_mode extraction_mode = word_mode;
6263 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6264 int spans_byte = 0;
6265 rtx new = 0;
6266 rtx orig_pos_rtx = pos_rtx;
6267 HOST_WIDE_INT orig_pos;
6269 /* Get some information about INNER and get the innermost object. */
6270 if (GET_CODE (inner) == USE)
6271 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
6272 /* We don't need to adjust the position because we set up the USE
6273 to pretend that it was a full-word object. */
6274 spans_byte = 1, inner = XEXP (inner, 0);
6275 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6277 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6278 consider just the QI as the memory to extract from.
6279 The subreg adds or removes high bits; its mode is
6280 irrelevant to the meaning of this extraction,
6281 since POS and LEN count from the lsb. */
6282 if (MEM_P (SUBREG_REG (inner)))
6283 is_mode = GET_MODE (SUBREG_REG (inner));
6284 inner = SUBREG_REG (inner);
6286 else if (GET_CODE (inner) == ASHIFT
6287 && GET_CODE (XEXP (inner, 1)) == CONST_INT
6288 && pos_rtx == 0 && pos == 0
6289 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6291 /* We're extracting the least significant bits of an rtx
6292 (ashift X (const_int C)), where LEN > C. Extract the
6293 least significant (LEN - C) bits of X, giving an rtx
6294 whose mode is MODE, then shift it left C times. */
6295 new = make_extraction (mode, XEXP (inner, 0),
6296 0, 0, len - INTVAL (XEXP (inner, 1)),
6297 unsignedp, in_dest, in_compare);
6298 if (new != 0)
6299 return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6302 inner_mode = GET_MODE (inner);
6304 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6305 pos = INTVAL (pos_rtx), pos_rtx = 0;
6307 /* See if this can be done without an extraction. We never can if the
6308 width of the field is not the same as that of some integer mode. For
6309 registers, we can only avoid the extraction if the position is at the
6310 low-order bit and this is either not in the destination or we have the
6311 appropriate STRICT_LOW_PART operation available.
6313 For MEM, we can avoid an extract if the field starts on an appropriate
6314 boundary and we can change the mode of the memory reference. However,
6315 we cannot directly access the MEM if we have a USE and the underlying
6316 MEM is not TMODE. This combination means that MEM was being used in a
6317 context where bits outside its mode were being referenced; that is only
6318 valid in bit-field insns. */
6320 if (tmode != BLKmode
6321 && ! (spans_byte && inner_mode != tmode)
6322 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6323 && !MEM_P (inner)
6324 && (! in_dest
6325 || (REG_P (inner)
6326 && have_insn_for (STRICT_LOW_PART, tmode))))
6327 || (MEM_P (inner) && pos_rtx == 0
6328 && (pos
6329 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6330 : BITS_PER_UNIT)) == 0
6331 /* We can't do this if we are widening INNER_MODE (it
6332 may not be aligned, for one thing). */
6333 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6334 && (inner_mode == tmode
6335 || (! mode_dependent_address_p (XEXP (inner, 0))
6336 && ! MEM_VOLATILE_P (inner))))))
6338 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6339 field. If the original and current mode are the same, we need not
6340 adjust the offset. Otherwise, we do if bytes big endian.
6342 If INNER is not a MEM, get a piece consisting of just the field
6343 of interest (in this case POS % BITS_PER_WORD must be 0). */
6345 if (MEM_P (inner))
6347 HOST_WIDE_INT offset;
6349 /* POS counts from lsb, but make OFFSET count in memory order. */
6350 if (BYTES_BIG_ENDIAN)
6351 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6352 else
6353 offset = pos / BITS_PER_UNIT;
6355 new = adjust_address_nv (inner, tmode, offset);
6357 else if (REG_P (inner))
6359 if (tmode != inner_mode)
6361 /* We can't call gen_lowpart in a DEST since we
6362 always want a SUBREG (see below) and it would sometimes
6363 return a new hard register. */
6364 if (pos || in_dest)
6366 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6368 if (WORDS_BIG_ENDIAN
6369 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6370 final_word = ((GET_MODE_SIZE (inner_mode)
6371 - GET_MODE_SIZE (tmode))
6372 / UNITS_PER_WORD) - final_word;
6374 final_word *= UNITS_PER_WORD;
6375 if (BYTES_BIG_ENDIAN &&
6376 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6377 final_word += (GET_MODE_SIZE (inner_mode)
6378 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6380 /* Avoid creating invalid subregs, for example when
6381 simplifying (x>>32)&255. */
6382 if (!validate_subreg (tmode, inner_mode, inner, final_word))
6383 return NULL_RTX;
6385 new = gen_rtx_SUBREG (tmode, inner, final_word);
6387 else
6388 new = gen_lowpart (tmode, inner);
6390 else
6391 new = inner;
6393 else
6394 new = force_to_mode (inner, tmode,
6395 len >= HOST_BITS_PER_WIDE_INT
6396 ? ~(unsigned HOST_WIDE_INT) 0
6397 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6398 NULL_RTX, 0);
6400 /* If this extraction is going into the destination of a SET,
6401 make a STRICT_LOW_PART unless we made a MEM. */
6403 if (in_dest)
6404 return (MEM_P (new) ? new
6405 : (GET_CODE (new) != SUBREG
6406 ? gen_rtx_CLOBBER (tmode, const0_rtx)
6407 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6409 if (mode == tmode)
6410 return new;
6412 if (GET_CODE (new) == CONST_INT)
6413 return gen_int_mode (INTVAL (new), mode);
6415 /* If we know that no extraneous bits are set, and that the high
6416 bit is not set, convert the extraction to the cheaper of
6417 sign and zero extension, that are equivalent in these cases. */
6418 if (flag_expensive_optimizations
6419 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6420 && ((nonzero_bits (new, tmode)
6421 & ~(((unsigned HOST_WIDE_INT)
6422 GET_MODE_MASK (tmode))
6423 >> 1))
6424 == 0)))
6426 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6427 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6429 /* Prefer ZERO_EXTENSION, since it gives more information to
6430 backends. */
6431 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6432 return temp;
6433 return temp1;
6436 /* Otherwise, sign- or zero-extend unless we already are in the
6437 proper mode. */
6439 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6440 mode, new));
6443 /* Unless this is a COMPARE or we have a funny memory reference,
6444 don't do anything with zero-extending field extracts starting at
6445 the low-order bit since they are simple AND operations. */
6446 if (pos_rtx == 0 && pos == 0 && ! in_dest
6447 && ! in_compare && ! spans_byte && unsignedp)
6448 return 0;
6450 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6451 we would be spanning bytes or if the position is not a constant and the
6452 length is not 1. In all other cases, we would only be going outside
6453 our object in cases when an original shift would have been
6454 undefined. */
6455 if (! spans_byte && MEM_P (inner)
6456 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6457 || (pos_rtx != 0 && len != 1)))
6458 return 0;
6460 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6461 and the mode for the result. */
6462 if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6464 wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6465 pos_mode = mode_for_extraction (EP_insv, 2);
6466 extraction_mode = mode_for_extraction (EP_insv, 3);
6469 if (! in_dest && unsignedp
6470 && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6472 wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6473 pos_mode = mode_for_extraction (EP_extzv, 3);
6474 extraction_mode = mode_for_extraction (EP_extzv, 0);
6477 if (! in_dest && ! unsignedp
6478 && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6480 wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6481 pos_mode = mode_for_extraction (EP_extv, 3);
6482 extraction_mode = mode_for_extraction (EP_extv, 0);
6485 /* Never narrow an object, since that might not be safe. */
6487 if (mode != VOIDmode
6488 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6489 extraction_mode = mode;
6491 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6492 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6493 pos_mode = GET_MODE (pos_rtx);
6495 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6496 if we have to change the mode of memory and cannot, the desired mode is
6497 EXTRACTION_MODE. */
6498 if (!MEM_P (inner))
6499 wanted_inner_mode = wanted_inner_reg_mode;
6500 else if (inner_mode != wanted_inner_mode
6501 && (mode_dependent_address_p (XEXP (inner, 0))
6502 || MEM_VOLATILE_P (inner)))
6503 wanted_inner_mode = extraction_mode;
6505 orig_pos = pos;
6507 if (BITS_BIG_ENDIAN)
6509 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6510 BITS_BIG_ENDIAN style. If position is constant, compute new
6511 position. Otherwise, build subtraction.
6512 Note that POS is relative to the mode of the original argument.
6513 If it's a MEM we need to recompute POS relative to that.
6514 However, if we're extracting from (or inserting into) a register,
6515 we want to recompute POS relative to wanted_inner_mode. */
6516 int width = (MEM_P (inner)
6517 ? GET_MODE_BITSIZE (is_mode)
6518 : GET_MODE_BITSIZE (wanted_inner_mode));
6520 if (pos_rtx == 0)
6521 pos = width - len - pos;
6522 else
6523 pos_rtx
6524 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6525 /* POS may be less than 0 now, but we check for that below.
6526 Note that it can only be less than 0 if !MEM_P (inner). */
6529 /* If INNER has a wider mode, make it smaller. If this is a constant
6530 extract, try to adjust the byte to point to the byte containing
6531 the value. */
6532 if (wanted_inner_mode != VOIDmode
6533 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6534 && ((MEM_P (inner)
6535 && (inner_mode == wanted_inner_mode
6536 || (! mode_dependent_address_p (XEXP (inner, 0))
6537 && ! MEM_VOLATILE_P (inner))))))
6539 int offset = 0;
6541 /* The computations below will be correct if the machine is big
6542 endian in both bits and bytes or little endian in bits and bytes.
6543 If it is mixed, we must adjust. */
6545 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6546 adjust OFFSET to compensate. */
6547 if (BYTES_BIG_ENDIAN
6548 && ! spans_byte
6549 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6550 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6552 /* If this is a constant position, we can move to the desired byte.
6553 Be careful not to go beyond the original object and maintain the
6554 natural alignment of the memory. */
6555 if (pos_rtx == 0)
6557 enum machine_mode bfmode = smallest_mode_for_size (len, MODE_INT);
6558 offset += (pos / GET_MODE_BITSIZE (bfmode)) * GET_MODE_SIZE (bfmode);
6559 pos %= GET_MODE_BITSIZE (bfmode);
6562 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6563 && ! spans_byte
6564 && is_mode != wanted_inner_mode)
6565 offset = (GET_MODE_SIZE (is_mode)
6566 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6568 if (offset != 0 || inner_mode != wanted_inner_mode)
6569 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6572 /* If INNER is not memory, we can always get it into the proper mode. If we
6573 are changing its mode, POS must be a constant and smaller than the size
6574 of the new mode. */
6575 else if (!MEM_P (inner))
6577 if (GET_MODE (inner) != wanted_inner_mode
6578 && (pos_rtx != 0
6579 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6580 return 0;
6582 inner = force_to_mode (inner, wanted_inner_mode,
6583 pos_rtx
6584 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6585 ? ~(unsigned HOST_WIDE_INT) 0
6586 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6587 << orig_pos),
6588 NULL_RTX, 0);
6591 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6592 have to zero extend. Otherwise, we can just use a SUBREG. */
6593 if (pos_rtx != 0
6594 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6596 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6598 /* If we know that no extraneous bits are set, and that the high
6599 bit is not set, convert extraction to cheaper one - either
6600 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6601 cases. */
6602 if (flag_expensive_optimizations
6603 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6604 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6605 & ~(((unsigned HOST_WIDE_INT)
6606 GET_MODE_MASK (GET_MODE (pos_rtx)))
6607 >> 1))
6608 == 0)))
6610 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6612 /* Prefer ZERO_EXTENSION, since it gives more information to
6613 backends. */
6614 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6615 temp = temp1;
6617 pos_rtx = temp;
6619 else if (pos_rtx != 0
6620 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6621 pos_rtx = gen_lowpart (pos_mode, pos_rtx);
6623 /* Make POS_RTX unless we already have it and it is correct. If we don't
6624 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6625 be a CONST_INT. */
6626 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6627 pos_rtx = orig_pos_rtx;
6629 else if (pos_rtx == 0)
6630 pos_rtx = GEN_INT (pos);
6632 /* Make the required operation. See if we can use existing rtx. */
6633 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6634 extraction_mode, inner, GEN_INT (len), pos_rtx);
6635 if (! in_dest)
6636 new = gen_lowpart (mode, new);
6638 return new;
6641 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6642 with any other operations in X. Return X without that shift if so. */
6644 static rtx
6645 extract_left_shift (rtx x, int count)
6647 enum rtx_code code = GET_CODE (x);
6648 enum machine_mode mode = GET_MODE (x);
6649 rtx tem;
6651 switch (code)
6653 case ASHIFT:
6654 /* This is the shift itself. If it is wide enough, we will return
6655 either the value being shifted if the shift count is equal to
6656 COUNT or a shift for the difference. */
6657 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6658 && INTVAL (XEXP (x, 1)) >= count)
6659 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6660 INTVAL (XEXP (x, 1)) - count);
6661 break;
6663 case NEG: case NOT:
6664 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6665 return simplify_gen_unary (code, mode, tem, mode);
6667 break;
6669 case PLUS: case IOR: case XOR: case AND:
6670 /* If we can safely shift this constant and we find the inner shift,
6671 make a new operation. */
6672 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6673 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6674 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6675 return simplify_gen_binary (code, mode, tem,
6676 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6678 break;
6680 default:
6681 break;
6684 return 0;
6687 /* Look at the expression rooted at X. Look for expressions
6688 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6689 Form these expressions.
6691 Return the new rtx, usually just X.
6693 Also, for machines like the VAX that don't have logical shift insns,
6694 try to convert logical to arithmetic shift operations in cases where
6695 they are equivalent. This undoes the canonicalizations to logical
6696 shifts done elsewhere.
6698 We try, as much as possible, to re-use rtl expressions to save memory.
6700 IN_CODE says what kind of expression we are processing. Normally, it is
6701 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6702 being kludges), it is MEM. When processing the arguments of a comparison
6703 or a COMPARE against zero, it is COMPARE. */
6705 static rtx
6706 make_compound_operation (rtx x, enum rtx_code in_code)
6708 enum rtx_code code = GET_CODE (x);
6709 enum machine_mode mode = GET_MODE (x);
6710 int mode_width = GET_MODE_BITSIZE (mode);
6711 rtx rhs, lhs;
6712 enum rtx_code next_code;
6713 int i;
6714 rtx new = 0;
6715 rtx tem;
6716 const char *fmt;
6718 /* Select the code to be used in recursive calls. Once we are inside an
6719 address, we stay there. If we have a comparison, set to COMPARE,
6720 but once inside, go back to our default of SET. */
6722 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6723 : ((code == COMPARE || COMPARISON_P (x))
6724 && XEXP (x, 1) == const0_rtx) ? COMPARE
6725 : in_code == COMPARE ? SET : in_code);
6727 /* Process depending on the code of this operation. If NEW is set
6728 nonzero, it will be returned. */
6730 switch (code)
6732 case ASHIFT:
6733 /* Convert shifts by constants into multiplications if inside
6734 an address. */
6735 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6736 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6737 && INTVAL (XEXP (x, 1)) >= 0)
6739 new = make_compound_operation (XEXP (x, 0), next_code);
6740 new = gen_rtx_MULT (mode, new,
6741 GEN_INT ((HOST_WIDE_INT) 1
6742 << INTVAL (XEXP (x, 1))));
6744 break;
6746 case AND:
6747 /* If the second operand is not a constant, we can't do anything
6748 with it. */
6749 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6750 break;
6752 /* If the constant is a power of two minus one and the first operand
6753 is a logical right shift, make an extraction. */
6754 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6755 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6757 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6758 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6759 0, in_code == COMPARE);
6762 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6763 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6764 && subreg_lowpart_p (XEXP (x, 0))
6765 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6766 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6768 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6769 next_code);
6770 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6771 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6772 0, in_code == COMPARE);
6774 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6775 else if ((GET_CODE (XEXP (x, 0)) == XOR
6776 || GET_CODE (XEXP (x, 0)) == IOR)
6777 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6778 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6779 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6781 /* Apply the distributive law, and then try to make extractions. */
6782 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6783 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6784 XEXP (x, 1)),
6785 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6786 XEXP (x, 1)));
6787 new = make_compound_operation (new, in_code);
6790 /* If we are have (and (rotate X C) M) and C is larger than the number
6791 of bits in M, this is an extraction. */
6793 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6794 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6795 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6796 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6798 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6799 new = make_extraction (mode, new,
6800 (GET_MODE_BITSIZE (mode)
6801 - INTVAL (XEXP (XEXP (x, 0), 1))),
6802 NULL_RTX, i, 1, 0, in_code == COMPARE);
6805 /* On machines without logical shifts, if the operand of the AND is
6806 a logical shift and our mask turns off all the propagated sign
6807 bits, we can replace the logical shift with an arithmetic shift. */
6808 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6809 && !have_insn_for (LSHIFTRT, mode)
6810 && have_insn_for (ASHIFTRT, mode)
6811 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6812 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6813 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6814 && mode_width <= HOST_BITS_PER_WIDE_INT)
6816 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6818 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6819 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6820 SUBST (XEXP (x, 0),
6821 gen_rtx_ASHIFTRT (mode,
6822 make_compound_operation
6823 (XEXP (XEXP (x, 0), 0), next_code),
6824 XEXP (XEXP (x, 0), 1)));
6827 /* If the constant is one less than a power of two, this might be
6828 representable by an extraction even if no shift is present.
6829 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6830 we are in a COMPARE. */
6831 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6832 new = make_extraction (mode,
6833 make_compound_operation (XEXP (x, 0),
6834 next_code),
6835 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6837 /* If we are in a comparison and this is an AND with a power of two,
6838 convert this into the appropriate bit extract. */
6839 else if (in_code == COMPARE
6840 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6841 new = make_extraction (mode,
6842 make_compound_operation (XEXP (x, 0),
6843 next_code),
6844 i, NULL_RTX, 1, 1, 0, 1);
6846 break;
6848 case LSHIFTRT:
6849 /* If the sign bit is known to be zero, replace this with an
6850 arithmetic shift. */
6851 if (have_insn_for (ASHIFTRT, mode)
6852 && ! have_insn_for (LSHIFTRT, mode)
6853 && mode_width <= HOST_BITS_PER_WIDE_INT
6854 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6856 new = gen_rtx_ASHIFTRT (mode,
6857 make_compound_operation (XEXP (x, 0),
6858 next_code),
6859 XEXP (x, 1));
6860 break;
6863 /* ... fall through ... */
6865 case ASHIFTRT:
6866 lhs = XEXP (x, 0);
6867 rhs = XEXP (x, 1);
6869 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6870 this is a SIGN_EXTRACT. */
6871 if (GET_CODE (rhs) == CONST_INT
6872 && GET_CODE (lhs) == ASHIFT
6873 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6874 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6876 new = make_compound_operation (XEXP (lhs, 0), next_code);
6877 new = make_extraction (mode, new,
6878 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6879 NULL_RTX, mode_width - INTVAL (rhs),
6880 code == LSHIFTRT, 0, in_code == COMPARE);
6881 break;
6884 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6885 If so, try to merge the shifts into a SIGN_EXTEND. We could
6886 also do this for some cases of SIGN_EXTRACT, but it doesn't
6887 seem worth the effort; the case checked for occurs on Alpha. */
6889 if (!OBJECT_P (lhs)
6890 && ! (GET_CODE (lhs) == SUBREG
6891 && (OBJECT_P (SUBREG_REG (lhs))))
6892 && GET_CODE (rhs) == CONST_INT
6893 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6894 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6895 new = make_extraction (mode, make_compound_operation (new, next_code),
6896 0, NULL_RTX, mode_width - INTVAL (rhs),
6897 code == LSHIFTRT, 0, in_code == COMPARE);
6899 break;
6901 case SUBREG:
6902 /* Call ourselves recursively on the inner expression. If we are
6903 narrowing the object and it has a different RTL code from
6904 what it originally did, do this SUBREG as a force_to_mode. */
6906 tem = make_compound_operation (SUBREG_REG (x), in_code);
6909 rtx simplified;
6910 simplified = simplify_subreg (GET_MODE (x), tem, GET_MODE (tem),
6911 SUBREG_BYTE (x));
6913 if (simplified)
6914 tem = simplified;
6916 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6917 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6918 && subreg_lowpart_p (x))
6920 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6921 NULL_RTX, 0);
6923 /* If we have something other than a SUBREG, we might have
6924 done an expansion, so rerun ourselves. */
6925 if (GET_CODE (newer) != SUBREG)
6926 newer = make_compound_operation (newer, in_code);
6928 return newer;
6931 if (simplified)
6932 return tem;
6934 break;
6936 default:
6937 break;
6940 if (new)
6942 x = gen_lowpart (mode, new);
6943 code = GET_CODE (x);
6946 /* Now recursively process each operand of this operation. */
6947 fmt = GET_RTX_FORMAT (code);
6948 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6949 if (fmt[i] == 'e')
6951 new = make_compound_operation (XEXP (x, i), next_code);
6952 SUBST (XEXP (x, i), new);
6955 /* If this is a commutative operation, the changes to the operands
6956 may have made it noncanonical. */
6957 if (COMMUTATIVE_ARITH_P (x)
6958 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
6960 tem = XEXP (x, 0);
6961 SUBST (XEXP (x, 0), XEXP (x, 1));
6962 SUBST (XEXP (x, 1), tem);
6965 return x;
6968 /* Given M see if it is a value that would select a field of bits
6969 within an item, but not the entire word. Return -1 if not.
6970 Otherwise, return the starting position of the field, where 0 is the
6971 low-order bit.
6973 *PLEN is set to the length of the field. */
6975 static int
6976 get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6978 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6979 int pos = exact_log2 (m & -m);
6980 int len = 0;
6982 if (pos >= 0)
6983 /* Now shift off the low-order zero bits and see if we have a
6984 power of two minus 1. */
6985 len = exact_log2 ((m >> pos) + 1);
6987 if (len <= 0)
6988 pos = -1;
6990 *plen = len;
6991 return pos;
6994 /* See if X can be simplified knowing that we will only refer to it in
6995 MODE and will only refer to those bits that are nonzero in MASK.
6996 If other bits are being computed or if masking operations are done
6997 that select a superset of the bits in MASK, they can sometimes be
6998 ignored.
7000 Return a possibly simplified expression, but always convert X to
7001 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
7003 Also, if REG is nonzero and X is a register equal in value to REG,
7004 replace X with REG.
7006 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7007 are all off in X. This is used when X will be complemented, by either
7008 NOT, NEG, or XOR. */
7010 static rtx
7011 force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
7012 rtx reg, int just_select)
7014 enum rtx_code code = GET_CODE (x);
7015 int next_select = just_select || code == XOR || code == NOT || code == NEG;
7016 enum machine_mode op_mode;
7017 unsigned HOST_WIDE_INT fuller_mask, nonzero;
7018 rtx op0, op1, temp;
7020 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
7021 code below will do the wrong thing since the mode of such an
7022 expression is VOIDmode.
7024 Also do nothing if X is a CLOBBER; this can happen if X was
7025 the return value from a call to gen_lowpart. */
7026 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
7027 return x;
7029 /* We want to perform the operation is its present mode unless we know
7030 that the operation is valid in MODE, in which case we do the operation
7031 in MODE. */
7032 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
7033 && have_insn_for (code, mode))
7034 ? mode : GET_MODE (x));
7036 /* It is not valid to do a right-shift in a narrower mode
7037 than the one it came in with. */
7038 if ((code == LSHIFTRT || code == ASHIFTRT)
7039 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7040 op_mode = GET_MODE (x);
7042 /* Truncate MASK to fit OP_MODE. */
7043 if (op_mode)
7044 mask &= GET_MODE_MASK (op_mode);
7046 /* When we have an arithmetic operation, or a shift whose count we
7047 do not know, we need to assume that all bits up to the highest-order
7048 bit in MASK will be needed. This is how we form such a mask. */
7049 if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7050 fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7051 else
7052 fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7053 - 1);
7055 /* Determine what bits of X are guaranteed to be (non)zero. */
7056 nonzero = nonzero_bits (x, mode);
7058 /* If none of the bits in X are needed, return a zero. */
7059 if (! just_select && (nonzero & mask) == 0)
7060 x = const0_rtx;
7062 /* If X is a CONST_INT, return a new one. Do this here since the
7063 test below will fail. */
7064 if (GET_CODE (x) == CONST_INT)
7066 if (SCALAR_INT_MODE_P (mode))
7067 return gen_int_mode (INTVAL (x) & mask, mode);
7068 else
7070 x = GEN_INT (INTVAL (x) & mask);
7071 return gen_lowpart_common (mode, x);
7075 /* If X is narrower than MODE and we want all the bits in X's mode, just
7076 get X in the proper mode. */
7077 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7078 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7079 return gen_lowpart (mode, x);
7081 switch (code)
7083 case CLOBBER:
7084 /* If X is a (clobber (const_int)), return it since we know we are
7085 generating something that won't match. */
7086 return x;
7088 case USE:
7089 /* X is a (use (mem ..)) that was made from a bit-field extraction that
7090 spanned the boundary of the MEM. If we are now masking so it is
7091 within that boundary, we don't need the USE any more. */
7092 if (! BITS_BIG_ENDIAN
7093 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
7094 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7095 break;
7097 case SIGN_EXTEND:
7098 case ZERO_EXTEND:
7099 case ZERO_EXTRACT:
7100 case SIGN_EXTRACT:
7101 x = expand_compound_operation (x);
7102 if (GET_CODE (x) != code)
7103 return force_to_mode (x, mode, mask, reg, next_select);
7104 break;
7106 case REG:
7107 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
7108 || rtx_equal_p (reg, get_last_value (x))))
7109 x = reg;
7110 break;
7112 case SUBREG:
7113 if (subreg_lowpart_p (x)
7114 /* We can ignore the effect of this SUBREG if it narrows the mode or
7115 if the constant masks to zero all the bits the mode doesn't
7116 have. */
7117 && ((GET_MODE_SIZE (GET_MODE (x))
7118 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7119 || (0 == (mask
7120 & GET_MODE_MASK (GET_MODE (x))
7121 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7122 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
7123 break;
7125 case AND:
7126 /* If this is an AND with a constant, convert it into an AND
7127 whose constant is the AND of that constant with MASK. If it
7128 remains an AND of MASK, delete it since it is redundant. */
7130 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7132 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7133 mask & INTVAL (XEXP (x, 1)));
7135 /* If X is still an AND, see if it is an AND with a mask that
7136 is just some low-order bits. If so, and it is MASK, we don't
7137 need it. */
7139 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7140 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7141 == mask))
7142 x = XEXP (x, 0);
7144 /* If it remains an AND, try making another AND with the bits
7145 in the mode mask that aren't in MASK turned on. If the
7146 constant in the AND is wide enough, this might make a
7147 cheaper constant. */
7149 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7150 && GET_MODE_MASK (GET_MODE (x)) != mask
7151 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7153 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7154 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7155 int width = GET_MODE_BITSIZE (GET_MODE (x));
7156 rtx y;
7158 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7159 number, sign extend it. */
7160 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7161 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7162 cval |= (HOST_WIDE_INT) -1 << width;
7164 y = simplify_gen_binary (AND, GET_MODE (x),
7165 XEXP (x, 0), GEN_INT (cval));
7166 if (rtx_cost (y, SET) < rtx_cost (x, SET))
7167 x = y;
7170 break;
7173 goto binop;
7175 case PLUS:
7176 /* In (and (plus FOO C1) M), if M is a mask that just turns off
7177 low-order bits (as in an alignment operation) and FOO is already
7178 aligned to that boundary, mask C1 to that boundary as well.
7179 This may eliminate that PLUS and, later, the AND. */
7182 unsigned int width = GET_MODE_BITSIZE (mode);
7183 unsigned HOST_WIDE_INT smask = mask;
7185 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7186 number, sign extend it. */
7188 if (width < HOST_BITS_PER_WIDE_INT
7189 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7190 smask |= (HOST_WIDE_INT) -1 << width;
7192 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7193 && exact_log2 (- smask) >= 0
7194 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7195 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7196 return force_to_mode (plus_constant (XEXP (x, 0),
7197 (INTVAL (XEXP (x, 1)) & smask)),
7198 mode, smask, reg, next_select);
7201 /* ... fall through ... */
7203 case MULT:
7204 /* For PLUS, MINUS and MULT, we need any bits less significant than the
7205 most significant bit in MASK since carries from those bits will
7206 affect the bits we are interested in. */
7207 mask = fuller_mask;
7208 goto binop;
7210 case MINUS:
7211 /* If X is (minus C Y) where C's least set bit is larger than any bit
7212 in the mask, then we may replace with (neg Y). */
7213 if (GET_CODE (XEXP (x, 0)) == CONST_INT
7214 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7215 & -INTVAL (XEXP (x, 0))))
7216 > mask))
7218 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7219 GET_MODE (x));
7220 return force_to_mode (x, mode, mask, reg, next_select);
7223 /* Similarly, if C contains every bit in the fuller_mask, then we may
7224 replace with (not Y). */
7225 if (GET_CODE (XEXP (x, 0)) == CONST_INT
7226 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7227 == INTVAL (XEXP (x, 0))))
7229 x = simplify_gen_unary (NOT, GET_MODE (x),
7230 XEXP (x, 1), GET_MODE (x));
7231 return force_to_mode (x, mode, mask, reg, next_select);
7234 mask = fuller_mask;
7235 goto binop;
7237 case IOR:
7238 case XOR:
7239 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7240 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7241 operation which may be a bitfield extraction. Ensure that the
7242 constant we form is not wider than the mode of X. */
7244 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7245 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7246 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7247 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7248 && GET_CODE (XEXP (x, 1)) == CONST_INT
7249 && ((INTVAL (XEXP (XEXP (x, 0), 1))
7250 + floor_log2 (INTVAL (XEXP (x, 1))))
7251 < GET_MODE_BITSIZE (GET_MODE (x)))
7252 && (INTVAL (XEXP (x, 1))
7253 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7255 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7256 << INTVAL (XEXP (XEXP (x, 0), 1)));
7257 temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7258 XEXP (XEXP (x, 0), 0), temp);
7259 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7260 XEXP (XEXP (x, 0), 1));
7261 return force_to_mode (x, mode, mask, reg, next_select);
7264 binop:
7265 /* For most binary operations, just propagate into the operation and
7266 change the mode if we have an operation of that mode. */
7268 op0 = gen_lowpart (op_mode,
7269 force_to_mode (XEXP (x, 0), mode, mask,
7270 reg, next_select));
7271 op1 = gen_lowpart (op_mode,
7272 force_to_mode (XEXP (x, 1), mode, mask,
7273 reg, next_select));
7275 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7276 x = simplify_gen_binary (code, op_mode, op0, op1);
7277 break;
7279 case ASHIFT:
7280 /* For left shifts, do the same, but just for the first operand.
7281 However, we cannot do anything with shifts where we cannot
7282 guarantee that the counts are smaller than the size of the mode
7283 because such a count will have a different meaning in a
7284 wider mode. */
7286 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7287 && INTVAL (XEXP (x, 1)) >= 0
7288 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7289 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7290 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7291 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7292 break;
7294 /* If the shift count is a constant and we can do arithmetic in
7295 the mode of the shift, refine which bits we need. Otherwise, use the
7296 conservative form of the mask. */
7297 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7298 && INTVAL (XEXP (x, 1)) >= 0
7299 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7300 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7301 mask >>= INTVAL (XEXP (x, 1));
7302 else
7303 mask = fuller_mask;
7305 op0 = gen_lowpart (op_mode,
7306 force_to_mode (XEXP (x, 0), op_mode,
7307 mask, reg, next_select));
7309 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7310 x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7311 break;
7313 case LSHIFTRT:
7314 /* Here we can only do something if the shift count is a constant,
7315 this shift constant is valid for the host, and we can do arithmetic
7316 in OP_MODE. */
7318 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7319 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7320 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7322 rtx inner = XEXP (x, 0);
7323 unsigned HOST_WIDE_INT inner_mask;
7325 /* Select the mask of the bits we need for the shift operand. */
7326 inner_mask = mask << INTVAL (XEXP (x, 1));
7328 /* We can only change the mode of the shift if we can do arithmetic
7329 in the mode of the shift and INNER_MASK is no wider than the
7330 width of X's mode. */
7331 if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7332 op_mode = GET_MODE (x);
7334 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7336 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7337 x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7340 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7341 shift and AND produces only copies of the sign bit (C2 is one less
7342 than a power of two), we can do this with just a shift. */
7344 if (GET_CODE (x) == LSHIFTRT
7345 && GET_CODE (XEXP (x, 1)) == CONST_INT
7346 /* The shift puts one of the sign bit copies in the least significant
7347 bit. */
7348 && ((INTVAL (XEXP (x, 1))
7349 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7350 >= GET_MODE_BITSIZE (GET_MODE (x)))
7351 && exact_log2 (mask + 1) >= 0
7352 /* Number of bits left after the shift must be more than the mask
7353 needs. */
7354 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7355 <= GET_MODE_BITSIZE (GET_MODE (x)))
7356 /* Must be more sign bit copies than the mask needs. */
7357 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7358 >= exact_log2 (mask + 1)))
7359 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7360 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7361 - exact_log2 (mask + 1)));
7363 goto shiftrt;
7365 case ASHIFTRT:
7366 /* If we are just looking for the sign bit, we don't need this shift at
7367 all, even if it has a variable count. */
7368 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7369 && (mask == ((unsigned HOST_WIDE_INT) 1
7370 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7371 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7373 /* If this is a shift by a constant, get a mask that contains those bits
7374 that are not copies of the sign bit. We then have two cases: If
7375 MASK only includes those bits, this can be a logical shift, which may
7376 allow simplifications. If MASK is a single-bit field not within
7377 those bits, we are requesting a copy of the sign bit and hence can
7378 shift the sign bit to the appropriate location. */
7380 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7381 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7383 int i = -1;
7385 /* If the considered data is wider than HOST_WIDE_INT, we can't
7386 represent a mask for all its bits in a single scalar.
7387 But we only care about the lower bits, so calculate these. */
7389 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7391 nonzero = ~(HOST_WIDE_INT) 0;
7393 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7394 is the number of bits a full-width mask would have set.
7395 We need only shift if these are fewer than nonzero can
7396 hold. If not, we must keep all bits set in nonzero. */
7398 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7399 < HOST_BITS_PER_WIDE_INT)
7400 nonzero >>= INTVAL (XEXP (x, 1))
7401 + HOST_BITS_PER_WIDE_INT
7402 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7404 else
7406 nonzero = GET_MODE_MASK (GET_MODE (x));
7407 nonzero >>= INTVAL (XEXP (x, 1));
7410 if ((mask & ~nonzero) == 0
7411 || (i = exact_log2 (mask)) >= 0)
7413 x = simplify_shift_const
7414 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7415 i < 0 ? INTVAL (XEXP (x, 1))
7416 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7418 if (GET_CODE (x) != ASHIFTRT)
7419 return force_to_mode (x, mode, mask, reg, next_select);
7423 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7424 even if the shift count isn't a constant. */
7425 if (mask == 1)
7426 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7427 XEXP (x, 0), XEXP (x, 1));
7429 shiftrt:
7431 /* If this is a zero- or sign-extension operation that just affects bits
7432 we don't care about, remove it. Be sure the call above returned
7433 something that is still a shift. */
7435 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7436 && GET_CODE (XEXP (x, 1)) == CONST_INT
7437 && INTVAL (XEXP (x, 1)) >= 0
7438 && (INTVAL (XEXP (x, 1))
7439 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7440 && GET_CODE (XEXP (x, 0)) == ASHIFT
7441 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7442 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7443 reg, next_select);
7445 break;
7447 case ROTATE:
7448 case ROTATERT:
7449 /* If the shift count is constant and we can do computations
7450 in the mode of X, compute where the bits we care about are.
7451 Otherwise, we can't do anything. Don't change the mode of
7452 the shift or propagate MODE into the shift, though. */
7453 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7454 && INTVAL (XEXP (x, 1)) >= 0)
7456 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7457 GET_MODE (x), GEN_INT (mask),
7458 XEXP (x, 1));
7459 if (temp && GET_CODE (temp) == CONST_INT)
7460 SUBST (XEXP (x, 0),
7461 force_to_mode (XEXP (x, 0), GET_MODE (x),
7462 INTVAL (temp), reg, next_select));
7464 break;
7466 case NEG:
7467 /* If we just want the low-order bit, the NEG isn't needed since it
7468 won't change the low-order bit. */
7469 if (mask == 1)
7470 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7472 /* We need any bits less significant than the most significant bit in
7473 MASK since carries from those bits will affect the bits we are
7474 interested in. */
7475 mask = fuller_mask;
7476 goto unop;
7478 case NOT:
7479 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7480 same as the XOR case above. Ensure that the constant we form is not
7481 wider than the mode of X. */
7483 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7484 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7485 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7486 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7487 < GET_MODE_BITSIZE (GET_MODE (x)))
7488 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7490 temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7491 GET_MODE (x));
7492 temp = simplify_gen_binary (XOR, GET_MODE (x),
7493 XEXP (XEXP (x, 0), 0), temp);
7494 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7495 temp, XEXP (XEXP (x, 0), 1));
7497 return force_to_mode (x, mode, mask, reg, next_select);
7500 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7501 use the full mask inside the NOT. */
7502 mask = fuller_mask;
7504 unop:
7505 op0 = gen_lowpart (op_mode,
7506 force_to_mode (XEXP (x, 0), mode, mask,
7507 reg, next_select));
7508 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7509 x = simplify_gen_unary (code, op_mode, op0, op_mode);
7510 break;
7512 case NE:
7513 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7514 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7515 which is equal to STORE_FLAG_VALUE. */
7516 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7517 && GET_MODE (XEXP (x, 0)) == mode
7518 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7519 && (nonzero_bits (XEXP (x, 0), mode)
7520 == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7521 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7523 break;
7525 case IF_THEN_ELSE:
7526 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7527 written in a narrower mode. We play it safe and do not do so. */
7529 SUBST (XEXP (x, 1),
7530 gen_lowpart (GET_MODE (x),
7531 force_to_mode (XEXP (x, 1), mode,
7532 mask, reg, next_select)));
7533 SUBST (XEXP (x, 2),
7534 gen_lowpart (GET_MODE (x),
7535 force_to_mode (XEXP (x, 2), mode,
7536 mask, reg, next_select)));
7537 break;
7539 default:
7540 break;
7543 /* Ensure we return a value of the proper mode. */
7544 return gen_lowpart (mode, x);
7547 /* Return nonzero if X is an expression that has one of two values depending on
7548 whether some other value is zero or nonzero. In that case, we return the
7549 value that is being tested, *PTRUE is set to the value if the rtx being
7550 returned has a nonzero value, and *PFALSE is set to the other alternative.
7552 If we return zero, we set *PTRUE and *PFALSE to X. */
7554 static rtx
7555 if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7557 enum machine_mode mode = GET_MODE (x);
7558 enum rtx_code code = GET_CODE (x);
7559 rtx cond0, cond1, true0, true1, false0, false1;
7560 unsigned HOST_WIDE_INT nz;
7562 /* If we are comparing a value against zero, we are done. */
7563 if ((code == NE || code == EQ)
7564 && XEXP (x, 1) == const0_rtx)
7566 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7567 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7568 return XEXP (x, 0);
7571 /* If this is a unary operation whose operand has one of two values, apply
7572 our opcode to compute those values. */
7573 else if (UNARY_P (x)
7574 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7576 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7577 *pfalse = simplify_gen_unary (code, mode, false0,
7578 GET_MODE (XEXP (x, 0)));
7579 return cond0;
7582 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7583 make can't possibly match and would suppress other optimizations. */
7584 else if (code == COMPARE)
7587 /* If this is a binary operation, see if either side has only one of two
7588 values. If either one does or if both do and they are conditional on
7589 the same value, compute the new true and false values. */
7590 else if (BINARY_P (x))
7592 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7593 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7595 if ((cond0 != 0 || cond1 != 0)
7596 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7598 /* If if_then_else_cond returned zero, then true/false are the
7599 same rtl. We must copy one of them to prevent invalid rtl
7600 sharing. */
7601 if (cond0 == 0)
7602 true0 = copy_rtx (true0);
7603 else if (cond1 == 0)
7604 true1 = copy_rtx (true1);
7606 if (COMPARISON_P (x))
7608 *ptrue = simplify_gen_relational (code, mode, VOIDmode,
7609 true0, true1);
7610 *pfalse = simplify_gen_relational (code, mode, VOIDmode,
7611 false0, false1);
7613 else
7615 *ptrue = simplify_gen_binary (code, mode, true0, true1);
7616 *pfalse = simplify_gen_binary (code, mode, false0, false1);
7619 return cond0 ? cond0 : cond1;
7622 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7623 operands is zero when the other is nonzero, and vice-versa,
7624 and STORE_FLAG_VALUE is 1 or -1. */
7626 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7627 && (code == PLUS || code == IOR || code == XOR || code == MINUS
7628 || code == UMAX)
7629 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7631 rtx op0 = XEXP (XEXP (x, 0), 1);
7632 rtx op1 = XEXP (XEXP (x, 1), 1);
7634 cond0 = XEXP (XEXP (x, 0), 0);
7635 cond1 = XEXP (XEXP (x, 1), 0);
7637 if (COMPARISON_P (cond0)
7638 && COMPARISON_P (cond1)
7639 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7640 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7641 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7642 || ((swap_condition (GET_CODE (cond0))
7643 == reversed_comparison_code (cond1, NULL))
7644 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7645 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7646 && ! side_effects_p (x))
7648 *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
7649 *pfalse = simplify_gen_binary (MULT, mode,
7650 (code == MINUS
7651 ? simplify_gen_unary (NEG, mode,
7652 op1, mode)
7653 : op1),
7654 const_true_rtx);
7655 return cond0;
7659 /* Similarly for MULT, AND and UMIN, except that for these the result
7660 is always zero. */
7661 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7662 && (code == MULT || code == AND || code == UMIN)
7663 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7665 cond0 = XEXP (XEXP (x, 0), 0);
7666 cond1 = XEXP (XEXP (x, 1), 0);
7668 if (COMPARISON_P (cond0)
7669 && COMPARISON_P (cond1)
7670 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7671 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7672 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7673 || ((swap_condition (GET_CODE (cond0))
7674 == reversed_comparison_code (cond1, NULL))
7675 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7676 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7677 && ! side_effects_p (x))
7679 *ptrue = *pfalse = const0_rtx;
7680 return cond0;
7685 else if (code == IF_THEN_ELSE)
7687 /* If we have IF_THEN_ELSE already, extract the condition and
7688 canonicalize it if it is NE or EQ. */
7689 cond0 = XEXP (x, 0);
7690 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7691 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7692 return XEXP (cond0, 0);
7693 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7695 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7696 return XEXP (cond0, 0);
7698 else
7699 return cond0;
7702 /* If X is a SUBREG, we can narrow both the true and false values
7703 if the inner expression, if there is a condition. */
7704 else if (code == SUBREG
7705 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7706 &true0, &false0)))
7708 true0 = simplify_gen_subreg (mode, true0,
7709 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7710 false0 = simplify_gen_subreg (mode, false0,
7711 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7712 if (true0 && false0)
7714 *ptrue = true0;
7715 *pfalse = false0;
7716 return cond0;
7720 /* If X is a constant, this isn't special and will cause confusions
7721 if we treat it as such. Likewise if it is equivalent to a constant. */
7722 else if (CONSTANT_P (x)
7723 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7726 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7727 will be least confusing to the rest of the compiler. */
7728 else if (mode == BImode)
7730 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7731 return x;
7734 /* If X is known to be either 0 or -1, those are the true and
7735 false values when testing X. */
7736 else if (x == constm1_rtx || x == const0_rtx
7737 || (mode != VOIDmode
7738 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7740 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7741 return x;
7744 /* Likewise for 0 or a single bit. */
7745 else if (SCALAR_INT_MODE_P (mode)
7746 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7747 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7749 *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7750 return x;
7753 /* Otherwise fail; show no condition with true and false values the same. */
7754 *ptrue = *pfalse = x;
7755 return 0;
7758 /* Return the value of expression X given the fact that condition COND
7759 is known to be true when applied to REG as its first operand and VAL
7760 as its second. X is known to not be shared and so can be modified in
7761 place.
7763 We only handle the simplest cases, and specifically those cases that
7764 arise with IF_THEN_ELSE expressions. */
7766 static rtx
7767 known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7769 enum rtx_code code = GET_CODE (x);
7770 rtx temp;
7771 const char *fmt;
7772 int i, j;
7774 if (side_effects_p (x))
7775 return x;
7777 /* If either operand of the condition is a floating point value,
7778 then we have to avoid collapsing an EQ comparison. */
7779 if (cond == EQ
7780 && rtx_equal_p (x, reg)
7781 && ! FLOAT_MODE_P (GET_MODE (x))
7782 && ! FLOAT_MODE_P (GET_MODE (val)))
7783 return val;
7785 if (cond == UNEQ && rtx_equal_p (x, reg))
7786 return val;
7788 /* If X is (abs REG) and we know something about REG's relationship
7789 with zero, we may be able to simplify this. */
7791 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7792 switch (cond)
7794 case GE: case GT: case EQ:
7795 return XEXP (x, 0);
7796 case LT: case LE:
7797 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7798 XEXP (x, 0),
7799 GET_MODE (XEXP (x, 0)));
7800 default:
7801 break;
7804 /* The only other cases we handle are MIN, MAX, and comparisons if the
7805 operands are the same as REG and VAL. */
7807 else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
7809 if (rtx_equal_p (XEXP (x, 0), val))
7810 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7812 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7814 if (COMPARISON_P (x))
7816 if (comparison_dominates_p (cond, code))
7817 return const_true_rtx;
7819 code = reversed_comparison_code (x, NULL);
7820 if (code != UNKNOWN
7821 && comparison_dominates_p (cond, code))
7822 return const0_rtx;
7823 else
7824 return x;
7826 else if (code == SMAX || code == SMIN
7827 || code == UMIN || code == UMAX)
7829 int unsignedp = (code == UMIN || code == UMAX);
7831 /* Do not reverse the condition when it is NE or EQ.
7832 This is because we cannot conclude anything about
7833 the value of 'SMAX (x, y)' when x is not equal to y,
7834 but we can when x equals y. */
7835 if ((code == SMAX || code == UMAX)
7836 && ! (cond == EQ || cond == NE))
7837 cond = reverse_condition (cond);
7839 switch (cond)
7841 case GE: case GT:
7842 return unsignedp ? x : XEXP (x, 1);
7843 case LE: case LT:
7844 return unsignedp ? x : XEXP (x, 0);
7845 case GEU: case GTU:
7846 return unsignedp ? XEXP (x, 1) : x;
7847 case LEU: case LTU:
7848 return unsignedp ? XEXP (x, 0) : x;
7849 default:
7850 break;
7855 else if (code == SUBREG)
7857 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7858 rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7860 if (SUBREG_REG (x) != r)
7862 /* We must simplify subreg here, before we lose track of the
7863 original inner_mode. */
7864 new = simplify_subreg (GET_MODE (x), r,
7865 inner_mode, SUBREG_BYTE (x));
7866 if (new)
7867 return new;
7868 else
7869 SUBST (SUBREG_REG (x), r);
7872 return x;
7874 /* We don't have to handle SIGN_EXTEND here, because even in the
7875 case of replacing something with a modeless CONST_INT, a
7876 CONST_INT is already (supposed to be) a valid sign extension for
7877 its narrower mode, which implies it's already properly
7878 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
7879 story is different. */
7880 else if (code == ZERO_EXTEND)
7882 enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7883 rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7885 if (XEXP (x, 0) != r)
7887 /* We must simplify the zero_extend here, before we lose
7888 track of the original inner_mode. */
7889 new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7890 r, inner_mode);
7891 if (new)
7892 return new;
7893 else
7894 SUBST (XEXP (x, 0), r);
7897 return x;
7900 fmt = GET_RTX_FORMAT (code);
7901 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7903 if (fmt[i] == 'e')
7904 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7905 else if (fmt[i] == 'E')
7906 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7907 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7908 cond, reg, val));
7911 return x;
7914 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7915 assignment as a field assignment. */
7917 static int
7918 rtx_equal_for_field_assignment_p (rtx x, rtx y)
7920 if (x == y || rtx_equal_p (x, y))
7921 return 1;
7923 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7924 return 0;
7926 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7927 Note that all SUBREGs of MEM are paradoxical; otherwise they
7928 would have been rewritten. */
7929 if (MEM_P (x) && GET_CODE (y) == SUBREG
7930 && MEM_P (SUBREG_REG (y))
7931 && rtx_equal_p (SUBREG_REG (y),
7932 gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
7933 return 1;
7935 if (MEM_P (y) && GET_CODE (x) == SUBREG
7936 && MEM_P (SUBREG_REG (x))
7937 && rtx_equal_p (SUBREG_REG (x),
7938 gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
7939 return 1;
7941 /* We used to see if get_last_value of X and Y were the same but that's
7942 not correct. In one direction, we'll cause the assignment to have
7943 the wrong destination and in the case, we'll import a register into this
7944 insn that might have already have been dead. So fail if none of the
7945 above cases are true. */
7946 return 0;
7949 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7950 Return that assignment if so.
7952 We only handle the most common cases. */
7954 static rtx
7955 make_field_assignment (rtx x)
7957 rtx dest = SET_DEST (x);
7958 rtx src = SET_SRC (x);
7959 rtx assign;
7960 rtx rhs, lhs;
7961 HOST_WIDE_INT c1;
7962 HOST_WIDE_INT pos;
7963 unsigned HOST_WIDE_INT len;
7964 rtx other;
7965 enum machine_mode mode;
7967 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7968 a clear of a one-bit field. We will have changed it to
7969 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7970 for a SUBREG. */
7972 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7973 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7974 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7975 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7977 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7978 1, 1, 1, 0);
7979 if (assign != 0)
7980 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7981 return x;
7984 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7985 && subreg_lowpart_p (XEXP (src, 0))
7986 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7987 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7988 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7989 && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7990 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7991 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7993 assign = make_extraction (VOIDmode, dest, 0,
7994 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7995 1, 1, 1, 0);
7996 if (assign != 0)
7997 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7998 return x;
8001 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
8002 one-bit field. */
8003 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
8004 && XEXP (XEXP (src, 0), 0) == const1_rtx
8005 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8007 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8008 1, 1, 1, 0);
8009 if (assign != 0)
8010 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
8011 return x;
8014 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8015 SRC is an AND with all bits of that field set, then we can discard
8016 the AND. */
8017 if (GET_CODE (dest) == ZERO_EXTRACT
8018 && GET_CODE (XEXP (dest, 1)) == CONST_INT
8019 && GET_CODE (src) == AND
8020 && GET_CODE (XEXP (src, 1)) == CONST_INT)
8022 HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
8023 unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
8024 unsigned HOST_WIDE_INT ze_mask;
8026 if (width >= HOST_BITS_PER_WIDE_INT)
8027 ze_mask = -1;
8028 else
8029 ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
8031 /* Complete overlap. We can remove the source AND. */
8032 if ((and_mask & ze_mask) == ze_mask)
8033 return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8035 /* Partial overlap. We can reduce the source AND. */
8036 if ((and_mask & ze_mask) != and_mask)
8038 mode = GET_MODE (src);
8039 src = gen_rtx_AND (mode, XEXP (src, 0),
8040 gen_int_mode (and_mask & ze_mask, mode));
8041 return gen_rtx_SET (VOIDmode, dest, src);
8045 /* The other case we handle is assignments into a constant-position
8046 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
8047 a mask that has all one bits except for a group of zero bits and
8048 OTHER is known to have zeros where C1 has ones, this is such an
8049 assignment. Compute the position and length from C1. Shift OTHER
8050 to the appropriate position, force it to the required mode, and
8051 make the extraction. Check for the AND in both operands. */
8053 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8054 return x;
8056 rhs = expand_compound_operation (XEXP (src, 0));
8057 lhs = expand_compound_operation (XEXP (src, 1));
8059 if (GET_CODE (rhs) == AND
8060 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
8061 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8062 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8063 else if (GET_CODE (lhs) == AND
8064 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
8065 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8066 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8067 else
8068 return x;
8070 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8071 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8072 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8073 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8074 return x;
8076 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8077 if (assign == 0)
8078 return x;
8080 /* The mode to use for the source is the mode of the assignment, or of
8081 what is inside a possible STRICT_LOW_PART. */
8082 mode = (GET_CODE (assign) == STRICT_LOW_PART
8083 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8085 /* Shift OTHER right POS places and make it the source, restricting it
8086 to the proper length and mode. */
8088 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
8089 GET_MODE (src), other, pos),
8090 mode,
8091 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8092 ? ~(unsigned HOST_WIDE_INT) 0
8093 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8094 dest, 0);
8096 /* If SRC is masked by an AND that does not make a difference in
8097 the value being stored, strip it. */
8098 if (GET_CODE (assign) == ZERO_EXTRACT
8099 && GET_CODE (XEXP (assign, 1)) == CONST_INT
8100 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8101 && GET_CODE (src) == AND
8102 && GET_CODE (XEXP (src, 1)) == CONST_INT
8103 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8104 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8105 src = XEXP (src, 0);
8107 return gen_rtx_SET (VOIDmode, assign, src);
8110 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8111 if so. */
8113 static rtx
8114 apply_distributive_law (rtx x)
8116 enum rtx_code code = GET_CODE (x);
8117 enum rtx_code inner_code;
8118 rtx lhs, rhs, other;
8119 rtx tem;
8121 /* Distributivity is not true for floating point as it can change the
8122 value. So we don't do it unless -funsafe-math-optimizations. */
8123 if (FLOAT_MODE_P (GET_MODE (x))
8124 && ! flag_unsafe_math_optimizations)
8125 return x;
8127 /* The outer operation can only be one of the following: */
8128 if (code != IOR && code != AND && code != XOR
8129 && code != PLUS && code != MINUS)
8130 return x;
8132 lhs = XEXP (x, 0);
8133 rhs = XEXP (x, 1);
8135 /* If either operand is a primitive we can't do anything, so get out
8136 fast. */
8137 if (OBJECT_P (lhs) || OBJECT_P (rhs))
8138 return x;
8140 lhs = expand_compound_operation (lhs);
8141 rhs = expand_compound_operation (rhs);
8142 inner_code = GET_CODE (lhs);
8143 if (inner_code != GET_CODE (rhs))
8144 return x;
8146 /* See if the inner and outer operations distribute. */
8147 switch (inner_code)
8149 case LSHIFTRT:
8150 case ASHIFTRT:
8151 case AND:
8152 case IOR:
8153 /* These all distribute except over PLUS. */
8154 if (code == PLUS || code == MINUS)
8155 return x;
8156 break;
8158 case MULT:
8159 if (code != PLUS && code != MINUS)
8160 return x;
8161 break;
8163 case ASHIFT:
8164 /* This is also a multiply, so it distributes over everything. */
8165 break;
8167 case SUBREG:
8168 /* Non-paradoxical SUBREGs distributes over all operations,
8169 provided the inner modes and byte offsets are the same, this
8170 is an extraction of a low-order part, we don't convert an fp
8171 operation to int or vice versa, this is not a vector mode,
8172 and we would not be converting a single-word operation into a
8173 multi-word operation. The latter test is not required, but
8174 it prevents generating unneeded multi-word operations. Some
8175 of the previous tests are redundant given the latter test,
8176 but are retained because they are required for correctness.
8178 We produce the result slightly differently in this case. */
8180 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8181 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8182 || ! subreg_lowpart_p (lhs)
8183 || (GET_MODE_CLASS (GET_MODE (lhs))
8184 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8185 || (GET_MODE_SIZE (GET_MODE (lhs))
8186 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8187 || VECTOR_MODE_P (GET_MODE (lhs))
8188 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
8189 return x;
8191 tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8192 SUBREG_REG (lhs), SUBREG_REG (rhs));
8193 return gen_lowpart (GET_MODE (x), tem);
8195 default:
8196 return x;
8199 /* Set LHS and RHS to the inner operands (A and B in the example
8200 above) and set OTHER to the common operand (C in the example).
8201 There is only one way to do this unless the inner operation is
8202 commutative. */
8203 if (COMMUTATIVE_ARITH_P (lhs)
8204 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8205 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8206 else if (COMMUTATIVE_ARITH_P (lhs)
8207 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8208 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8209 else if (COMMUTATIVE_ARITH_P (lhs)
8210 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8211 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8212 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8213 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8214 else
8215 return x;
8217 /* Form the new inner operation, seeing if it simplifies first. */
8218 tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8220 /* There is one exception to the general way of distributing:
8221 (a | c) ^ (b | c) -> (a ^ b) & ~c */
8222 if (code == XOR && inner_code == IOR)
8224 inner_code = AND;
8225 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8228 /* We may be able to continuing distributing the result, so call
8229 ourselves recursively on the inner operation before forming the
8230 outer operation, which we return. */
8231 return simplify_gen_binary (inner_code, GET_MODE (x),
8232 apply_distributive_law (tem), other);
8235 /* See if X is of the form (* (+ A B) C), and if so convert to
8236 (+ (* A C) (* B C)) and try to simplify.
8238 Most of the time, this results in no change. However, if some of
8239 the operands are the same or inverses of each other, simplifications
8240 will result.
8242 For example, (and (ior A B) (not B)) can occur as the result of
8243 expanding a bit field assignment. When we apply the distributive
8244 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8245 which then simplifies to (and (A (not B))).
8247 Note that no checks happen on the validity of applying the inverse
8248 distributive law. This is pointless since we can do it in the
8249 few places where this routine is called.
8251 N is the index of the term that is decomposed (the arithmetic operation,
8252 i.e. (+ A B) in the first example above). !N is the index of the term that
8253 is distributed, i.e. of C in the first example above. */
8254 static rtx
8255 distribute_and_simplify_rtx (rtx x, int n)
8257 enum machine_mode mode;
8258 enum rtx_code outer_code, inner_code;
8259 rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8261 decomposed = XEXP (x, n);
8262 if (!ARITHMETIC_P (decomposed))
8263 return NULL_RTX;
8265 mode = GET_MODE (x);
8266 outer_code = GET_CODE (x);
8267 distributed = XEXP (x, !n);
8269 inner_code = GET_CODE (decomposed);
8270 inner_op0 = XEXP (decomposed, 0);
8271 inner_op1 = XEXP (decomposed, 1);
8273 /* Special case (and (xor B C) (not A)), which is equivalent to
8274 (xor (ior A B) (ior A C)) */
8275 if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8277 distributed = XEXP (distributed, 0);
8278 outer_code = IOR;
8281 if (n == 0)
8283 /* Distribute the second term. */
8284 new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8285 new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8287 else
8289 /* Distribute the first term. */
8290 new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8291 new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8294 tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8295 new_op0, new_op1));
8296 if (GET_CODE (tmp) != outer_code
8297 && rtx_cost (tmp, SET) < rtx_cost (x, SET))
8298 return tmp;
8300 return NULL_RTX;
8303 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8304 in MODE.
8306 Return an equivalent form, if different from X. Otherwise, return X. If
8307 X is zero, we are to always construct the equivalent form. */
8309 static rtx
8310 simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8311 unsigned HOST_WIDE_INT constop)
8313 unsigned HOST_WIDE_INT nonzero;
8314 int i;
8316 /* Simplify VAROP knowing that we will be only looking at some of the
8317 bits in it.
8319 Note by passing in CONSTOP, we guarantee that the bits not set in
8320 CONSTOP are not significant and will never be examined. We must
8321 ensure that is the case by explicitly masking out those bits
8322 before returning. */
8323 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
8325 /* If VAROP is a CLOBBER, we will fail so return it. */
8326 if (GET_CODE (varop) == CLOBBER)
8327 return varop;
8329 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8330 to VAROP and return the new constant. */
8331 if (GET_CODE (varop) == CONST_INT)
8332 return gen_int_mode (INTVAL (varop) & constop, mode);
8334 /* See what bits may be nonzero in VAROP. Unlike the general case of
8335 a call to nonzero_bits, here we don't care about bits outside
8336 MODE. */
8338 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8340 /* Turn off all bits in the constant that are known to already be zero.
8341 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8342 which is tested below. */
8344 constop &= nonzero;
8346 /* If we don't have any bits left, return zero. */
8347 if (constop == 0)
8348 return const0_rtx;
8350 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8351 a power of two, we can replace this with an ASHIFT. */
8352 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8353 && (i = exact_log2 (constop)) >= 0)
8354 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8356 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8357 or XOR, then try to apply the distributive law. This may eliminate
8358 operations if either branch can be simplified because of the AND.
8359 It may also make some cases more complex, but those cases probably
8360 won't match a pattern either with or without this. */
8362 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8363 return
8364 gen_lowpart
8365 (mode,
8366 apply_distributive_law
8367 (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8368 simplify_and_const_int (NULL_RTX,
8369 GET_MODE (varop),
8370 XEXP (varop, 0),
8371 constop),
8372 simplify_and_const_int (NULL_RTX,
8373 GET_MODE (varop),
8374 XEXP (varop, 1),
8375 constop))));
8377 /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
8378 the AND and see if one of the operands simplifies to zero. If so, we
8379 may eliminate it. */
8381 if (GET_CODE (varop) == PLUS
8382 && exact_log2 (constop + 1) >= 0)
8384 rtx o0, o1;
8386 o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8387 o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8388 if (o0 == const0_rtx)
8389 return o1;
8390 if (o1 == const0_rtx)
8391 return o0;
8394 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
8395 if we already had one (just check for the simplest cases). */
8396 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8397 && GET_MODE (XEXP (x, 0)) == mode
8398 && SUBREG_REG (XEXP (x, 0)) == varop)
8399 varop = XEXP (x, 0);
8400 else
8401 varop = gen_lowpart (mode, varop);
8403 /* If we can't make the SUBREG, try to return what we were given. */
8404 if (GET_CODE (varop) == CLOBBER)
8405 return x ? x : varop;
8407 /* If we are only masking insignificant bits, return VAROP. */
8408 if (constop == nonzero)
8409 x = varop;
8410 else
8412 /* Otherwise, return an AND. */
8413 constop = trunc_int_for_mode (constop, mode);
8414 /* See how much, if any, of X we can use. */
8415 if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8416 x = simplify_gen_binary (AND, mode, varop, GEN_INT (constop));
8418 else
8420 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8421 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8422 SUBST (XEXP (x, 1), GEN_INT (constop));
8424 SUBST (XEXP (x, 0), varop);
8428 return x;
8431 /* Given a REG, X, compute which bits in X can be nonzero.
8432 We don't care about bits outside of those defined in MODE.
8434 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8435 a shift, AND, or zero_extract, we can do better. */
8437 static rtx
8438 reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
8439 rtx known_x ATTRIBUTE_UNUSED,
8440 enum machine_mode known_mode ATTRIBUTE_UNUSED,
8441 unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
8442 unsigned HOST_WIDE_INT *nonzero)
8444 rtx tem;
8446 /* If X is a register whose nonzero bits value is current, use it.
8447 Otherwise, if X is a register whose value we can find, use that
8448 value. Otherwise, use the previously-computed global nonzero bits
8449 for this register. */
8451 if (reg_stat[REGNO (x)].last_set_value != 0
8452 && (reg_stat[REGNO (x)].last_set_mode == mode
8453 || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
8454 && GET_MODE_CLASS (mode) == MODE_INT))
8455 && (reg_stat[REGNO (x)].last_set_label == label_tick
8456 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8457 && REG_N_SETS (REGNO (x)) == 1
8458 && ! REGNO_REG_SET_P
8459 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8460 REGNO (x))))
8461 && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8463 *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits;
8464 return NULL;
8467 tem = get_last_value (x);
8469 if (tem)
8471 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8472 /* If X is narrower than MODE and TEM is a non-negative
8473 constant that would appear negative in the mode of X,
8474 sign-extend it for use in reg_nonzero_bits because some
8475 machines (maybe most) will actually do the sign-extension
8476 and this is the conservative approach.
8478 ??? For 2.5, try to tighten up the MD files in this regard
8479 instead of this kludge. */
8481 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
8482 && GET_CODE (tem) == CONST_INT
8483 && INTVAL (tem) > 0
8484 && 0 != (INTVAL (tem)
8485 & ((HOST_WIDE_INT) 1
8486 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8487 tem = GEN_INT (INTVAL (tem)
8488 | ((HOST_WIDE_INT) (-1)
8489 << GET_MODE_BITSIZE (GET_MODE (x))));
8490 #endif
8491 return tem;
8493 else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits)
8495 unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits;
8497 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
8498 /* We don't know anything about the upper bits. */
8499 mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8500 *nonzero &= mask;
8503 return NULL;
8506 /* Return the number of bits at the high-order end of X that are known to
8507 be equal to the sign bit. X will be used in mode MODE; if MODE is
8508 VOIDmode, X will be used in its own mode. The returned value will always
8509 be between 1 and the number of bits in MODE. */
8511 static rtx
8512 reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
8513 rtx known_x ATTRIBUTE_UNUSED,
8514 enum machine_mode known_mode
8515 ATTRIBUTE_UNUSED,
8516 unsigned int known_ret ATTRIBUTE_UNUSED,
8517 unsigned int *result)
8519 rtx tem;
8521 if (reg_stat[REGNO (x)].last_set_value != 0
8522 && reg_stat[REGNO (x)].last_set_mode == mode
8523 && (reg_stat[REGNO (x)].last_set_label == label_tick
8524 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8525 && REG_N_SETS (REGNO (x)) == 1
8526 && ! REGNO_REG_SET_P
8527 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8528 REGNO (x))))
8529 && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8531 *result = reg_stat[REGNO (x)].last_set_sign_bit_copies;
8532 return NULL;
8535 tem = get_last_value (x);
8536 if (tem != 0)
8537 return tem;
8539 if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0
8540 && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
8541 *result = reg_stat[REGNO (x)].sign_bit_copies;
8543 return NULL;
8546 /* Return the number of "extended" bits there are in X, when interpreted
8547 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8548 unsigned quantities, this is the number of high-order zero bits.
8549 For signed quantities, this is the number of copies of the sign bit
8550 minus 1. In both case, this function returns the number of "spare"
8551 bits. For example, if two quantities for which this function returns
8552 at least 1 are added, the addition is known not to overflow.
8554 This function will always return 0 unless called during combine, which
8555 implies that it must be called from a define_split. */
8557 unsigned int
8558 extended_count (rtx x, enum machine_mode mode, int unsignedp)
8560 if (nonzero_sign_valid == 0)
8561 return 0;
8563 return (unsignedp
8564 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8565 ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8566 - floor_log2 (nonzero_bits (x, mode)))
8567 : 0)
8568 : num_sign_bit_copies (x, mode) - 1);
8571 /* This function is called from `simplify_shift_const' to merge two
8572 outer operations. Specifically, we have already found that we need
8573 to perform operation *POP0 with constant *PCONST0 at the outermost
8574 position. We would now like to also perform OP1 with constant CONST1
8575 (with *POP0 being done last).
8577 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8578 the resulting operation. *PCOMP_P is set to 1 if we would need to
8579 complement the innermost operand, otherwise it is unchanged.
8581 MODE is the mode in which the operation will be done. No bits outside
8582 the width of this mode matter. It is assumed that the width of this mode
8583 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8585 If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
8586 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8587 result is simply *PCONST0.
8589 If the resulting operation cannot be expressed as one operation, we
8590 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8592 static int
8593 merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
8595 enum rtx_code op0 = *pop0;
8596 HOST_WIDE_INT const0 = *pconst0;
8598 const0 &= GET_MODE_MASK (mode);
8599 const1 &= GET_MODE_MASK (mode);
8601 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8602 if (op0 == AND)
8603 const1 &= const0;
8605 /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
8606 if OP0 is SET. */
8608 if (op1 == UNKNOWN || op0 == SET)
8609 return 1;
8611 else if (op0 == UNKNOWN)
8612 op0 = op1, const0 = const1;
8614 else if (op0 == op1)
8616 switch (op0)
8618 case AND:
8619 const0 &= const1;
8620 break;
8621 case IOR:
8622 const0 |= const1;
8623 break;
8624 case XOR:
8625 const0 ^= const1;
8626 break;
8627 case PLUS:
8628 const0 += const1;
8629 break;
8630 case NEG:
8631 op0 = UNKNOWN;
8632 break;
8633 default:
8634 break;
8638 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8639 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8640 return 0;
8642 /* If the two constants aren't the same, we can't do anything. The
8643 remaining six cases can all be done. */
8644 else if (const0 != const1)
8645 return 0;
8647 else
8648 switch (op0)
8650 case IOR:
8651 if (op1 == AND)
8652 /* (a & b) | b == b */
8653 op0 = SET;
8654 else /* op1 == XOR */
8655 /* (a ^ b) | b == a | b */
8657 break;
8659 case XOR:
8660 if (op1 == AND)
8661 /* (a & b) ^ b == (~a) & b */
8662 op0 = AND, *pcomp_p = 1;
8663 else /* op1 == IOR */
8664 /* (a | b) ^ b == a & ~b */
8665 op0 = AND, const0 = ~const0;
8666 break;
8668 case AND:
8669 if (op1 == IOR)
8670 /* (a | b) & b == b */
8671 op0 = SET;
8672 else /* op1 == XOR */
8673 /* (a ^ b) & b) == (~a) & b */
8674 *pcomp_p = 1;
8675 break;
8676 default:
8677 break;
8680 /* Check for NO-OP cases. */
8681 const0 &= GET_MODE_MASK (mode);
8682 if (const0 == 0
8683 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8684 op0 = UNKNOWN;
8685 else if (const0 == 0 && op0 == AND)
8686 op0 = SET;
8687 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8688 && op0 == AND)
8689 op0 = UNKNOWN;
8691 /* ??? Slightly redundant with the above mask, but not entirely.
8692 Moving this above means we'd have to sign-extend the mode mask
8693 for the final test. */
8694 const0 = trunc_int_for_mode (const0, mode);
8696 *pop0 = op0;
8697 *pconst0 = const0;
8699 return 1;
8702 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8703 The result of the shift is RESULT_MODE. X, if nonzero, is an expression
8704 that we started with.
8706 The shift is normally computed in the widest mode we find in VAROP, as
8707 long as it isn't a different number of words than RESULT_MODE. Exceptions
8708 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8710 static rtx
8711 simplify_shift_const (rtx x, enum rtx_code code,
8712 enum machine_mode result_mode, rtx varop,
8713 int orig_count)
8715 enum rtx_code orig_code = code;
8716 unsigned int count;
8717 int signed_count;
8718 enum machine_mode mode = result_mode;
8719 enum machine_mode shift_mode, tmode;
8720 unsigned int mode_words
8721 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8722 /* We form (outer_op (code varop count) (outer_const)). */
8723 enum rtx_code outer_op = UNKNOWN;
8724 HOST_WIDE_INT outer_const = 0;
8725 rtx const_rtx;
8726 int complement_p = 0;
8727 rtx new;
8729 /* Make sure and truncate the "natural" shift on the way in. We don't
8730 want to do this inside the loop as it makes it more difficult to
8731 combine shifts. */
8732 if (SHIFT_COUNT_TRUNCATED)
8733 orig_count &= GET_MODE_BITSIZE (mode) - 1;
8735 /* If we were given an invalid count, don't do anything except exactly
8736 what was requested. */
8738 if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8740 if (x)
8741 return x;
8743 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
8746 count = orig_count;
8748 /* Unless one of the branches of the `if' in this loop does a `continue',
8749 we will `break' the loop after the `if'. */
8751 while (count != 0)
8753 /* If we have an operand of (clobber (const_int 0)), just return that
8754 value. */
8755 if (GET_CODE (varop) == CLOBBER)
8756 return varop;
8758 /* If we discovered we had to complement VAROP, leave. Making a NOT
8759 here would cause an infinite loop. */
8760 if (complement_p)
8761 break;
8763 /* Convert ROTATERT to ROTATE. */
8764 if (code == ROTATERT)
8766 unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
8767 code = ROTATE;
8768 if (VECTOR_MODE_P (result_mode))
8769 count = bitsize / GET_MODE_NUNITS (result_mode) - count;
8770 else
8771 count = bitsize - count;
8774 /* We need to determine what mode we will do the shift in. If the
8775 shift is a right shift or a ROTATE, we must always do it in the mode
8776 it was originally done in. Otherwise, we can do it in MODE, the
8777 widest mode encountered. */
8778 shift_mode
8779 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8780 ? result_mode : mode);
8782 /* Handle cases where the count is greater than the size of the mode
8783 minus 1. For ASHIFT, use the size minus one as the count (this can
8784 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8785 take the count modulo the size. For other shifts, the result is
8786 zero.
8788 Since these shifts are being produced by the compiler by combining
8789 multiple operations, each of which are defined, we know what the
8790 result is supposed to be. */
8792 if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
8794 if (code == ASHIFTRT)
8795 count = GET_MODE_BITSIZE (shift_mode) - 1;
8796 else if (code == ROTATE || code == ROTATERT)
8797 count %= GET_MODE_BITSIZE (shift_mode);
8798 else
8800 /* We can't simply return zero because there may be an
8801 outer op. */
8802 varop = const0_rtx;
8803 count = 0;
8804 break;
8808 /* An arithmetic right shift of a quantity known to be -1 or 0
8809 is a no-op. */
8810 if (code == ASHIFTRT
8811 && (num_sign_bit_copies (varop, shift_mode)
8812 == GET_MODE_BITSIZE (shift_mode)))
8814 count = 0;
8815 break;
8818 /* If we are doing an arithmetic right shift and discarding all but
8819 the sign bit copies, this is equivalent to doing a shift by the
8820 bitsize minus one. Convert it into that shift because it will often
8821 allow other simplifications. */
8823 if (code == ASHIFTRT
8824 && (count + num_sign_bit_copies (varop, shift_mode)
8825 >= GET_MODE_BITSIZE (shift_mode)))
8826 count = GET_MODE_BITSIZE (shift_mode) - 1;
8828 /* We simplify the tests below and elsewhere by converting
8829 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8830 `make_compound_operation' will convert it to an ASHIFTRT for
8831 those machines (such as VAX) that don't have an LSHIFTRT. */
8832 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8833 && code == ASHIFTRT
8834 && ((nonzero_bits (varop, shift_mode)
8835 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8836 == 0))
8837 code = LSHIFTRT;
8839 if (code == LSHIFTRT
8840 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8841 && !(nonzero_bits (varop, shift_mode) >> count))
8842 varop = const0_rtx;
8843 if (code == ASHIFT
8844 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8845 && !((nonzero_bits (varop, shift_mode) << count)
8846 & GET_MODE_MASK (shift_mode)))
8847 varop = const0_rtx;
8849 switch (GET_CODE (varop))
8851 case SIGN_EXTEND:
8852 case ZERO_EXTEND:
8853 case SIGN_EXTRACT:
8854 case ZERO_EXTRACT:
8855 new = expand_compound_operation (varop);
8856 if (new != varop)
8858 varop = new;
8859 continue;
8861 break;
8863 case MEM:
8864 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8865 minus the width of a smaller mode, we can do this with a
8866 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8867 if ((code == ASHIFTRT || code == LSHIFTRT)
8868 && ! mode_dependent_address_p (XEXP (varop, 0))
8869 && ! MEM_VOLATILE_P (varop)
8870 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8871 MODE_INT, 1)) != BLKmode)
8873 new = adjust_address_nv (varop, tmode,
8874 BYTES_BIG_ENDIAN ? 0
8875 : count / BITS_PER_UNIT);
8877 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8878 : ZERO_EXTEND, mode, new);
8879 count = 0;
8880 continue;
8882 break;
8884 case USE:
8885 /* Similar to the case above, except that we can only do this if
8886 the resulting mode is the same as that of the underlying
8887 MEM and adjust the address depending on the *bits* endianness
8888 because of the way that bit-field extract insns are defined. */
8889 if ((code == ASHIFTRT || code == LSHIFTRT)
8890 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8891 MODE_INT, 1)) != BLKmode
8892 && tmode == GET_MODE (XEXP (varop, 0)))
8894 if (BITS_BIG_ENDIAN)
8895 new = XEXP (varop, 0);
8896 else
8898 new = copy_rtx (XEXP (varop, 0));
8899 SUBST (XEXP (new, 0),
8900 plus_constant (XEXP (new, 0),
8901 count / BITS_PER_UNIT));
8904 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8905 : ZERO_EXTEND, mode, new);
8906 count = 0;
8907 continue;
8909 break;
8911 case SUBREG:
8912 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8913 the same number of words as what we've seen so far. Then store
8914 the widest mode in MODE. */
8915 if (subreg_lowpart_p (varop)
8916 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8917 > GET_MODE_SIZE (GET_MODE (varop)))
8918 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8919 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8920 == mode_words)
8922 varop = SUBREG_REG (varop);
8923 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8924 mode = GET_MODE (varop);
8925 continue;
8927 break;
8929 case MULT:
8930 /* Some machines use MULT instead of ASHIFT because MULT
8931 is cheaper. But it is still better on those machines to
8932 merge two shifts into one. */
8933 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8934 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8936 varop
8937 = simplify_gen_binary (ASHIFT, GET_MODE (varop),
8938 XEXP (varop, 0),
8939 GEN_INT (exact_log2 (
8940 INTVAL (XEXP (varop, 1)))));
8941 continue;
8943 break;
8945 case UDIV:
8946 /* Similar, for when divides are cheaper. */
8947 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8948 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8950 varop
8951 = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
8952 XEXP (varop, 0),
8953 GEN_INT (exact_log2 (
8954 INTVAL (XEXP (varop, 1)))));
8955 continue;
8957 break;
8959 case ASHIFTRT:
8960 /* If we are extracting just the sign bit of an arithmetic
8961 right shift, that shift is not needed. However, the sign
8962 bit of a wider mode may be different from what would be
8963 interpreted as the sign bit in a narrower mode, so, if
8964 the result is narrower, don't discard the shift. */
8965 if (code == LSHIFTRT
8966 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
8967 && (GET_MODE_BITSIZE (result_mode)
8968 >= GET_MODE_BITSIZE (GET_MODE (varop))))
8970 varop = XEXP (varop, 0);
8971 continue;
8974 /* ... fall through ... */
8976 case LSHIFTRT:
8977 case ASHIFT:
8978 case ROTATE:
8979 /* Here we have two nested shifts. The result is usually the
8980 AND of a new shift with a mask. We compute the result below. */
8981 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8982 && INTVAL (XEXP (varop, 1)) >= 0
8983 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8984 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8985 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8987 enum rtx_code first_code = GET_CODE (varop);
8988 unsigned int first_count = INTVAL (XEXP (varop, 1));
8989 unsigned HOST_WIDE_INT mask;
8990 rtx mask_rtx;
8992 /* We have one common special case. We can't do any merging if
8993 the inner code is an ASHIFTRT of a smaller mode. However, if
8994 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8995 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8996 we can convert it to
8997 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8998 This simplifies certain SIGN_EXTEND operations. */
8999 if (code == ASHIFT && first_code == ASHIFTRT
9000 && count == (unsigned int)
9001 (GET_MODE_BITSIZE (result_mode)
9002 - GET_MODE_BITSIZE (GET_MODE (varop))))
9004 /* C3 has the low-order C1 bits zero. */
9006 mask = (GET_MODE_MASK (mode)
9007 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9009 varop = simplify_and_const_int (NULL_RTX, result_mode,
9010 XEXP (varop, 0), mask);
9011 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9012 varop, count);
9013 count = first_count;
9014 code = ASHIFTRT;
9015 continue;
9018 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9019 than C1 high-order bits equal to the sign bit, we can convert
9020 this to either an ASHIFT or an ASHIFTRT depending on the
9021 two counts.
9023 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9025 if (code == ASHIFTRT && first_code == ASHIFT
9026 && GET_MODE (varop) == shift_mode
9027 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9028 > first_count))
9030 varop = XEXP (varop, 0);
9032 signed_count = count - first_count;
9033 if (signed_count < 0)
9034 count = -signed_count, code = ASHIFT;
9035 else
9036 count = signed_count;
9038 continue;
9041 /* There are some cases we can't do. If CODE is ASHIFTRT,
9042 we can only do this if FIRST_CODE is also ASHIFTRT.
9044 We can't do the case when CODE is ROTATE and FIRST_CODE is
9045 ASHIFTRT.
9047 If the mode of this shift is not the mode of the outer shift,
9048 we can't do this if either shift is a right shift or ROTATE.
9050 Finally, we can't do any of these if the mode is too wide
9051 unless the codes are the same.
9053 Handle the case where the shift codes are the same
9054 first. */
9056 if (code == first_code)
9058 if (GET_MODE (varop) != result_mode
9059 && (code == ASHIFTRT || code == LSHIFTRT
9060 || code == ROTATE))
9061 break;
9063 count += first_count;
9064 varop = XEXP (varop, 0);
9065 continue;
9068 if (code == ASHIFTRT
9069 || (code == ROTATE && first_code == ASHIFTRT)
9070 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9071 || (GET_MODE (varop) != result_mode
9072 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9073 || first_code == ROTATE
9074 || code == ROTATE)))
9075 break;
9077 /* To compute the mask to apply after the shift, shift the
9078 nonzero bits of the inner shift the same way the
9079 outer shift will. */
9081 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9083 mask_rtx
9084 = simplify_binary_operation (code, result_mode, mask_rtx,
9085 GEN_INT (count));
9087 /* Give up if we can't compute an outer operation to use. */
9088 if (mask_rtx == 0
9089 || GET_CODE (mask_rtx) != CONST_INT
9090 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9091 INTVAL (mask_rtx),
9092 result_mode, &complement_p))
9093 break;
9095 /* If the shifts are in the same direction, we add the
9096 counts. Otherwise, we subtract them. */
9097 signed_count = count;
9098 if ((code == ASHIFTRT || code == LSHIFTRT)
9099 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9100 signed_count += first_count;
9101 else
9102 signed_count -= first_count;
9104 /* If COUNT is positive, the new shift is usually CODE,
9105 except for the two exceptions below, in which case it is
9106 FIRST_CODE. If the count is negative, FIRST_CODE should
9107 always be used */
9108 if (signed_count > 0
9109 && ((first_code == ROTATE && code == ASHIFT)
9110 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9111 code = first_code, count = signed_count;
9112 else if (signed_count < 0)
9113 code = first_code, count = -signed_count;
9114 else
9115 count = signed_count;
9117 varop = XEXP (varop, 0);
9118 continue;
9121 /* If we have (A << B << C) for any shift, we can convert this to
9122 (A << C << B). This wins if A is a constant. Only try this if
9123 B is not a constant. */
9125 else if (GET_CODE (varop) == code
9126 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9127 && 0 != (new
9128 = simplify_binary_operation (code, mode,
9129 XEXP (varop, 0),
9130 GEN_INT (count))))
9132 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9133 count = 0;
9134 continue;
9136 break;
9138 case NOT:
9139 /* Make this fit the case below. */
9140 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9141 GEN_INT (GET_MODE_MASK (mode)));
9142 continue;
9144 case IOR:
9145 case AND:
9146 case XOR:
9147 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9148 with C the size of VAROP - 1 and the shift is logical if
9149 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9150 we have an (le X 0) operation. If we have an arithmetic shift
9151 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9152 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9154 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9155 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9156 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9157 && (code == LSHIFTRT || code == ASHIFTRT)
9158 && count == (unsigned int)
9159 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9160 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9162 count = 0;
9163 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9164 const0_rtx);
9166 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9167 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9169 continue;
9172 /* If we have (shift (logical)), move the logical to the outside
9173 to allow it to possibly combine with another logical and the
9174 shift to combine with another shift. This also canonicalizes to
9175 what a ZERO_EXTRACT looks like. Also, some machines have
9176 (and (shift)) insns. */
9178 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9179 /* We can't do this if we have (ashiftrt (xor)) and the
9180 constant has its sign bit set in shift_mode. */
9181 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9182 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9183 shift_mode))
9184 && (new = simplify_binary_operation (code, result_mode,
9185 XEXP (varop, 1),
9186 GEN_INT (count))) != 0
9187 && GET_CODE (new) == CONST_INT
9188 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9189 INTVAL (new), result_mode, &complement_p))
9191 varop = XEXP (varop, 0);
9192 continue;
9195 /* If we can't do that, try to simplify the shift in each arm of the
9196 logical expression, make a new logical expression, and apply
9197 the inverse distributive law. This also can't be done
9198 for some (ashiftrt (xor)). */
9199 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9200 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9201 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9202 shift_mode)))
9204 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9205 XEXP (varop, 0), count);
9206 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9207 XEXP (varop, 1), count);
9209 varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9210 lhs, rhs);
9211 varop = apply_distributive_law (varop);
9213 count = 0;
9214 continue;
9216 break;
9218 case EQ:
9219 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9220 says that the sign bit can be tested, FOO has mode MODE, C is
9221 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9222 that may be nonzero. */
9223 if (code == LSHIFTRT
9224 && XEXP (varop, 1) == const0_rtx
9225 && GET_MODE (XEXP (varop, 0)) == result_mode
9226 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9227 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9228 && ((STORE_FLAG_VALUE
9229 & ((HOST_WIDE_INT) 1
9230 < (GET_MODE_BITSIZE (result_mode) - 1))))
9231 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9232 && merge_outer_ops (&outer_op, &outer_const, XOR,
9233 (HOST_WIDE_INT) 1, result_mode,
9234 &complement_p))
9236 varop = XEXP (varop, 0);
9237 count = 0;
9238 continue;
9240 break;
9242 case NEG:
9243 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9244 than the number of bits in the mode is equivalent to A. */
9245 if (code == LSHIFTRT
9246 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9247 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9249 varop = XEXP (varop, 0);
9250 count = 0;
9251 continue;
9254 /* NEG commutes with ASHIFT since it is multiplication. Move the
9255 NEG outside to allow shifts to combine. */
9256 if (code == ASHIFT
9257 && merge_outer_ops (&outer_op, &outer_const, NEG,
9258 (HOST_WIDE_INT) 0, result_mode,
9259 &complement_p))
9261 varop = XEXP (varop, 0);
9262 continue;
9264 break;
9266 case PLUS:
9267 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9268 is one less than the number of bits in the mode is
9269 equivalent to (xor A 1). */
9270 if (code == LSHIFTRT
9271 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9272 && XEXP (varop, 1) == constm1_rtx
9273 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9274 && merge_outer_ops (&outer_op, &outer_const, XOR,
9275 (HOST_WIDE_INT) 1, result_mode,
9276 &complement_p))
9278 count = 0;
9279 varop = XEXP (varop, 0);
9280 continue;
9283 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9284 that might be nonzero in BAR are those being shifted out and those
9285 bits are known zero in FOO, we can replace the PLUS with FOO.
9286 Similarly in the other operand order. This code occurs when
9287 we are computing the size of a variable-size array. */
9289 if ((code == ASHIFTRT || code == LSHIFTRT)
9290 && count < HOST_BITS_PER_WIDE_INT
9291 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9292 && (nonzero_bits (XEXP (varop, 1), result_mode)
9293 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9295 varop = XEXP (varop, 0);
9296 continue;
9298 else if ((code == ASHIFTRT || code == LSHIFTRT)
9299 && count < HOST_BITS_PER_WIDE_INT
9300 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9301 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9302 >> count)
9303 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9304 & nonzero_bits (XEXP (varop, 1),
9305 result_mode)))
9307 varop = XEXP (varop, 1);
9308 continue;
9311 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9312 if (code == ASHIFT
9313 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9314 && (new = simplify_binary_operation (ASHIFT, result_mode,
9315 XEXP (varop, 1),
9316 GEN_INT (count))) != 0
9317 && GET_CODE (new) == CONST_INT
9318 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9319 INTVAL (new), result_mode, &complement_p))
9321 varop = XEXP (varop, 0);
9322 continue;
9325 /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9326 signbit', and attempt to change the PLUS to an XOR and move it to
9327 the outer operation as is done above in the AND/IOR/XOR case
9328 leg for shift(logical). See details in logical handling above
9329 for reasoning in doing so. */
9330 if (code == LSHIFTRT
9331 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9332 && mode_signbit_p (result_mode, XEXP (varop, 1))
9333 && (new = simplify_binary_operation (code, result_mode,
9334 XEXP (varop, 1),
9335 GEN_INT (count))) != 0
9336 && GET_CODE (new) == CONST_INT
9337 && merge_outer_ops (&outer_op, &outer_const, XOR,
9338 INTVAL (new), result_mode, &complement_p))
9340 varop = XEXP (varop, 0);
9341 continue;
9344 break;
9346 case MINUS:
9347 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9348 with C the size of VAROP - 1 and the shift is logical if
9349 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9350 we have a (gt X 0) operation. If the shift is arithmetic with
9351 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9352 we have a (neg (gt X 0)) operation. */
9354 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9355 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9356 && count == (unsigned int)
9357 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9358 && (code == LSHIFTRT || code == ASHIFTRT)
9359 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9360 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9361 == count
9362 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9364 count = 0;
9365 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9366 const0_rtx);
9368 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9369 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9371 continue;
9373 break;
9375 case TRUNCATE:
9376 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9377 if the truncate does not affect the value. */
9378 if (code == LSHIFTRT
9379 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9380 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9381 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9382 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9383 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9385 rtx varop_inner = XEXP (varop, 0);
9387 varop_inner
9388 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9389 XEXP (varop_inner, 0),
9390 GEN_INT
9391 (count + INTVAL (XEXP (varop_inner, 1))));
9392 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9393 count = 0;
9394 continue;
9396 break;
9398 default:
9399 break;
9402 break;
9405 /* We need to determine what mode to do the shift in. If the shift is
9406 a right shift or ROTATE, we must always do it in the mode it was
9407 originally done in. Otherwise, we can do it in MODE, the widest mode
9408 encountered. The code we care about is that of the shift that will
9409 actually be done, not the shift that was originally requested. */
9410 shift_mode
9411 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9412 ? result_mode : mode);
9414 /* We have now finished analyzing the shift. The result should be
9415 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9416 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9417 to the result of the shift. OUTER_CONST is the relevant constant,
9418 but we must turn off all bits turned off in the shift.
9420 If we were passed a value for X, see if we can use any pieces of
9421 it. If not, make new rtx. */
9423 if (x && GET_RTX_CLASS (GET_CODE (x)) == RTX_BIN_ARITH
9424 && GET_CODE (XEXP (x, 1)) == CONST_INT
9425 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9426 const_rtx = XEXP (x, 1);
9427 else
9428 const_rtx = GEN_INT (count);
9430 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9431 && GET_MODE (XEXP (x, 0)) == shift_mode
9432 && SUBREG_REG (XEXP (x, 0)) == varop)
9433 varop = XEXP (x, 0);
9434 else if (GET_MODE (varop) != shift_mode)
9435 varop = gen_lowpart (shift_mode, varop);
9437 /* If we can't make the SUBREG, try to return what we were given. */
9438 if (GET_CODE (varop) == CLOBBER)
9439 return x ? x : varop;
9441 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9442 if (new != 0)
9443 x = new;
9444 else
9445 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9447 /* If we have an outer operation and we just made a shift, it is
9448 possible that we could have simplified the shift were it not
9449 for the outer operation. So try to do the simplification
9450 recursively. */
9452 if (outer_op != UNKNOWN && GET_CODE (x) == code
9453 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9454 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9455 INTVAL (XEXP (x, 1)));
9457 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9458 turn off all the bits that the shift would have turned off. */
9459 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9460 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9461 GET_MODE_MASK (result_mode) >> orig_count);
9463 /* Do the remainder of the processing in RESULT_MODE. */
9464 x = gen_lowpart (result_mode, x);
9466 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9467 operation. */
9468 if (complement_p)
9469 x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9471 if (outer_op != UNKNOWN)
9473 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9474 outer_const = trunc_int_for_mode (outer_const, result_mode);
9476 if (outer_op == AND)
9477 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9478 else if (outer_op == SET)
9479 /* This means that we have determined that the result is
9480 equivalent to a constant. This should be rare. */
9481 x = GEN_INT (outer_const);
9482 else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
9483 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9484 else
9485 x = simplify_gen_binary (outer_op, result_mode, x,
9486 GEN_INT (outer_const));
9489 return x;
9492 /* Like recog, but we receive the address of a pointer to a new pattern.
9493 We try to match the rtx that the pointer points to.
9494 If that fails, we may try to modify or replace the pattern,
9495 storing the replacement into the same pointer object.
9497 Modifications include deletion or addition of CLOBBERs.
9499 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9500 the CLOBBERs are placed.
9502 The value is the final insn code from the pattern ultimately matched,
9503 or -1. */
9505 static int
9506 recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9508 rtx pat = *pnewpat;
9509 int insn_code_number;
9510 int num_clobbers_to_add = 0;
9511 int i;
9512 rtx notes = 0;
9513 rtx old_notes, old_pat;
9515 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9516 we use to indicate that something didn't match. If we find such a
9517 thing, force rejection. */
9518 if (GET_CODE (pat) == PARALLEL)
9519 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9520 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9521 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9522 return -1;
9524 old_pat = PATTERN (insn);
9525 old_notes = REG_NOTES (insn);
9526 PATTERN (insn) = pat;
9527 REG_NOTES (insn) = 0;
9529 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9531 /* If it isn't, there is the possibility that we previously had an insn
9532 that clobbered some register as a side effect, but the combined
9533 insn doesn't need to do that. So try once more without the clobbers
9534 unless this represents an ASM insn. */
9536 if (insn_code_number < 0 && ! check_asm_operands (pat)
9537 && GET_CODE (pat) == PARALLEL)
9539 int pos;
9541 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9542 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9544 if (i != pos)
9545 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9546 pos++;
9549 SUBST_INT (XVECLEN (pat, 0), pos);
9551 if (pos == 1)
9552 pat = XVECEXP (pat, 0, 0);
9554 PATTERN (insn) = pat;
9555 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9557 PATTERN (insn) = old_pat;
9558 REG_NOTES (insn) = old_notes;
9560 /* Recognize all noop sets, these will be killed by followup pass. */
9561 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9562 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9564 /* If we had any clobbers to add, make a new pattern than contains
9565 them. Then check to make sure that all of them are dead. */
9566 if (num_clobbers_to_add)
9568 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9569 rtvec_alloc (GET_CODE (pat) == PARALLEL
9570 ? (XVECLEN (pat, 0)
9571 + num_clobbers_to_add)
9572 : num_clobbers_to_add + 1));
9574 if (GET_CODE (pat) == PARALLEL)
9575 for (i = 0; i < XVECLEN (pat, 0); i++)
9576 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9577 else
9578 XVECEXP (newpat, 0, 0) = pat;
9580 add_clobbers (newpat, insn_code_number);
9582 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9583 i < XVECLEN (newpat, 0); i++)
9585 if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
9586 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9587 return -1;
9588 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9589 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9591 pat = newpat;
9594 *pnewpat = pat;
9595 *pnotes = notes;
9597 return insn_code_number;
9600 /* Like gen_lowpart_general but for use by combine. In combine it
9601 is not possible to create any new pseudoregs. However, it is
9602 safe to create invalid memory addresses, because combine will
9603 try to recognize them and all they will do is make the combine
9604 attempt fail.
9606 If for some reason this cannot do its job, an rtx
9607 (clobber (const_int 0)) is returned.
9608 An insn containing that will not be recognized. */
9610 static rtx
9611 gen_lowpart_for_combine (enum machine_mode omode, rtx x)
9613 enum machine_mode imode = GET_MODE (x);
9614 unsigned int osize = GET_MODE_SIZE (omode);
9615 unsigned int isize = GET_MODE_SIZE (imode);
9616 rtx result;
9618 if (omode == imode)
9619 return x;
9621 /* Return identity if this is a CONST or symbolic reference. */
9622 if (omode == Pmode
9623 && (GET_CODE (x) == CONST
9624 || GET_CODE (x) == SYMBOL_REF
9625 || GET_CODE (x) == LABEL_REF))
9626 return x;
9628 /* We can only support MODE being wider than a word if X is a
9629 constant integer or has a mode the same size. */
9630 if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
9631 && ! ((imode == VOIDmode
9632 && (GET_CODE (x) == CONST_INT
9633 || GET_CODE (x) == CONST_DOUBLE))
9634 || isize == osize))
9635 goto fail;
9637 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9638 won't know what to do. So we will strip off the SUBREG here and
9639 process normally. */
9640 if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
9642 x = SUBREG_REG (x);
9644 /* For use in case we fall down into the address adjustments
9645 further below, we need to adjust the known mode and size of
9646 x; imode and isize, since we just adjusted x. */
9647 imode = GET_MODE (x);
9649 if (imode == omode)
9650 return x;
9652 isize = GET_MODE_SIZE (imode);
9655 result = gen_lowpart_common (omode, x);
9657 #ifdef CANNOT_CHANGE_MODE_CLASS
9658 if (result != 0 && GET_CODE (result) == SUBREG)
9659 record_subregs_of_mode (result);
9660 #endif
9662 if (result)
9663 return result;
9665 if (MEM_P (x))
9667 int offset = 0;
9669 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9670 address. */
9671 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9672 goto fail;
9674 /* If we want to refer to something bigger than the original memref,
9675 generate a paradoxical subreg instead. That will force a reload
9676 of the original memref X. */
9677 if (isize < osize)
9678 return gen_rtx_SUBREG (omode, x, 0);
9680 if (WORDS_BIG_ENDIAN)
9681 offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
9683 /* Adjust the address so that the address-after-the-data is
9684 unchanged. */
9685 if (BYTES_BIG_ENDIAN)
9686 offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
9688 return adjust_address_nv (x, omode, offset);
9691 /* If X is a comparison operator, rewrite it in a new mode. This
9692 probably won't match, but may allow further simplifications. */
9693 else if (COMPARISON_P (x))
9694 return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
9696 /* If we couldn't simplify X any other way, just enclose it in a
9697 SUBREG. Normally, this SUBREG won't match, but some patterns may
9698 include an explicit SUBREG or we may simplify it further in combine. */
9699 else
9701 int offset = 0;
9702 rtx res;
9704 offset = subreg_lowpart_offset (omode, imode);
9705 if (imode == VOIDmode)
9707 imode = int_mode_for_mode (omode);
9708 x = gen_lowpart_common (imode, x);
9709 if (x == NULL)
9710 goto fail;
9712 res = simplify_gen_subreg (omode, x, imode, offset);
9713 if (res)
9714 return res;
9717 fail:
9718 return gen_rtx_CLOBBER (imode, const0_rtx);
9721 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9722 comparison code that will be tested.
9724 The result is a possibly different comparison code to use. *POP0 and
9725 *POP1 may be updated.
9727 It is possible that we might detect that a comparison is either always
9728 true or always false. However, we do not perform general constant
9729 folding in combine, so this knowledge isn't useful. Such tautologies
9730 should have been detected earlier. Hence we ignore all such cases. */
9732 static enum rtx_code
9733 simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
9735 rtx op0 = *pop0;
9736 rtx op1 = *pop1;
9737 rtx tem, tem1;
9738 int i;
9739 enum machine_mode mode, tmode;
9741 /* Try a few ways of applying the same transformation to both operands. */
9742 while (1)
9744 #ifndef WORD_REGISTER_OPERATIONS
9745 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9746 so check specially. */
9747 if (code != GTU && code != GEU && code != LTU && code != LEU
9748 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9749 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9750 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9751 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9752 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9753 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9754 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9755 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9756 && XEXP (op0, 1) == XEXP (op1, 1)
9757 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9758 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
9759 && (INTVAL (XEXP (op0, 1))
9760 == (GET_MODE_BITSIZE (GET_MODE (op0))
9761 - (GET_MODE_BITSIZE
9762 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9764 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9765 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9767 #endif
9769 /* If both operands are the same constant shift, see if we can ignore the
9770 shift. We can if the shift is a rotate or if the bits shifted out of
9771 this shift are known to be zero for both inputs and if the type of
9772 comparison is compatible with the shift. */
9773 if (GET_CODE (op0) == GET_CODE (op1)
9774 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9775 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9776 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9777 && (code != GT && code != LT && code != GE && code != LE))
9778 || (GET_CODE (op0) == ASHIFTRT
9779 && (code != GTU && code != LTU
9780 && code != GEU && code != LEU)))
9781 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9782 && INTVAL (XEXP (op0, 1)) >= 0
9783 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9784 && XEXP (op0, 1) == XEXP (op1, 1))
9786 enum machine_mode mode = GET_MODE (op0);
9787 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9788 int shift_count = INTVAL (XEXP (op0, 1));
9790 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9791 mask &= (mask >> shift_count) << shift_count;
9792 else if (GET_CODE (op0) == ASHIFT)
9793 mask = (mask & (mask << shift_count)) >> shift_count;
9795 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9796 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
9797 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9798 else
9799 break;
9802 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9803 SUBREGs are of the same mode, and, in both cases, the AND would
9804 be redundant if the comparison was done in the narrower mode,
9805 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9806 and the operand's possibly nonzero bits are 0xffffff01; in that case
9807 if we only care about QImode, we don't need the AND). This case
9808 occurs if the output mode of an scc insn is not SImode and
9809 STORE_FLAG_VALUE == 1 (e.g., the 386).
9811 Similarly, check for a case where the AND's are ZERO_EXTEND
9812 operations from some narrower mode even though a SUBREG is not
9813 present. */
9815 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9816 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9817 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9819 rtx inner_op0 = XEXP (op0, 0);
9820 rtx inner_op1 = XEXP (op1, 0);
9821 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9822 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9823 int changed = 0;
9825 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9826 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9827 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9828 && (GET_MODE (SUBREG_REG (inner_op0))
9829 == GET_MODE (SUBREG_REG (inner_op1)))
9830 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9831 <= HOST_BITS_PER_WIDE_INT)
9832 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9833 GET_MODE (SUBREG_REG (inner_op0)))))
9834 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9835 GET_MODE (SUBREG_REG (inner_op1))))))
9837 op0 = SUBREG_REG (inner_op0);
9838 op1 = SUBREG_REG (inner_op1);
9840 /* The resulting comparison is always unsigned since we masked
9841 off the original sign bit. */
9842 code = unsigned_condition (code);
9844 changed = 1;
9847 else if (c0 == c1)
9848 for (tmode = GET_CLASS_NARROWEST_MODE
9849 (GET_MODE_CLASS (GET_MODE (op0)));
9850 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9851 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9853 op0 = gen_lowpart (tmode, inner_op0);
9854 op1 = gen_lowpart (tmode, inner_op1);
9855 code = unsigned_condition (code);
9856 changed = 1;
9857 break;
9860 if (! changed)
9861 break;
9864 /* If both operands are NOT, we can strip off the outer operation
9865 and adjust the comparison code for swapped operands; similarly for
9866 NEG, except that this must be an equality comparison. */
9867 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9868 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9869 && (code == EQ || code == NE)))
9870 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9872 else
9873 break;
9876 /* If the first operand is a constant, swap the operands and adjust the
9877 comparison code appropriately, but don't do this if the second operand
9878 is already a constant integer. */
9879 if (swap_commutative_operands_p (op0, op1))
9881 tem = op0, op0 = op1, op1 = tem;
9882 code = swap_condition (code);
9885 /* We now enter a loop during which we will try to simplify the comparison.
9886 For the most part, we only are concerned with comparisons with zero,
9887 but some things may really be comparisons with zero but not start
9888 out looking that way. */
9890 while (GET_CODE (op1) == CONST_INT)
9892 enum machine_mode mode = GET_MODE (op0);
9893 unsigned int mode_width = GET_MODE_BITSIZE (mode);
9894 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9895 int equality_comparison_p;
9896 int sign_bit_comparison_p;
9897 int unsigned_comparison_p;
9898 HOST_WIDE_INT const_op;
9900 /* We only want to handle integral modes. This catches VOIDmode,
9901 CCmode, and the floating-point modes. An exception is that we
9902 can handle VOIDmode if OP0 is a COMPARE or a comparison
9903 operation. */
9905 if (GET_MODE_CLASS (mode) != MODE_INT
9906 && ! (mode == VOIDmode
9907 && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
9908 break;
9910 /* Get the constant we are comparing against and turn off all bits
9911 not on in our mode. */
9912 const_op = INTVAL (op1);
9913 if (mode != VOIDmode)
9914 const_op = trunc_int_for_mode (const_op, mode);
9915 op1 = GEN_INT (const_op);
9917 /* If we are comparing against a constant power of two and the value
9918 being compared can only have that single bit nonzero (e.g., it was
9919 `and'ed with that bit), we can replace this with a comparison
9920 with zero. */
9921 if (const_op
9922 && (code == EQ || code == NE || code == GE || code == GEU
9923 || code == LT || code == LTU)
9924 && mode_width <= HOST_BITS_PER_WIDE_INT
9925 && exact_log2 (const_op) >= 0
9926 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
9928 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9929 op1 = const0_rtx, const_op = 0;
9932 /* Similarly, if we are comparing a value known to be either -1 or
9933 0 with -1, change it to the opposite comparison against zero. */
9935 if (const_op == -1
9936 && (code == EQ || code == NE || code == GT || code == LE
9937 || code == GEU || code == LTU)
9938 && num_sign_bit_copies (op0, mode) == mode_width)
9940 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9941 op1 = const0_rtx, const_op = 0;
9944 /* Do some canonicalizations based on the comparison code. We prefer
9945 comparisons against zero and then prefer equality comparisons.
9946 If we can reduce the size of a constant, we will do that too. */
9948 switch (code)
9950 case LT:
9951 /* < C is equivalent to <= (C - 1) */
9952 if (const_op > 0)
9954 const_op -= 1;
9955 op1 = GEN_INT (const_op);
9956 code = LE;
9957 /* ... fall through to LE case below. */
9959 else
9960 break;
9962 case LE:
9963 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9964 if (const_op < 0)
9966 const_op += 1;
9967 op1 = GEN_INT (const_op);
9968 code = LT;
9971 /* If we are doing a <= 0 comparison on a value known to have
9972 a zero sign bit, we can replace this with == 0. */
9973 else if (const_op == 0
9974 && mode_width <= HOST_BITS_PER_WIDE_INT
9975 && (nonzero_bits (op0, mode)
9976 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9977 code = EQ;
9978 break;
9980 case GE:
9981 /* >= C is equivalent to > (C - 1). */
9982 if (const_op > 0)
9984 const_op -= 1;
9985 op1 = GEN_INT (const_op);
9986 code = GT;
9987 /* ... fall through to GT below. */
9989 else
9990 break;
9992 case GT:
9993 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
9994 if (const_op < 0)
9996 const_op += 1;
9997 op1 = GEN_INT (const_op);
9998 code = GE;
10001 /* If we are doing a > 0 comparison on a value known to have
10002 a zero sign bit, we can replace this with != 0. */
10003 else if (const_op == 0
10004 && mode_width <= HOST_BITS_PER_WIDE_INT
10005 && (nonzero_bits (op0, mode)
10006 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10007 code = NE;
10008 break;
10010 case LTU:
10011 /* < C is equivalent to <= (C - 1). */
10012 if (const_op > 0)
10014 const_op -= 1;
10015 op1 = GEN_INT (const_op);
10016 code = LEU;
10017 /* ... fall through ... */
10020 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10021 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10022 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10024 const_op = 0, op1 = const0_rtx;
10025 code = GE;
10026 break;
10028 else
10029 break;
10031 case LEU:
10032 /* unsigned <= 0 is equivalent to == 0 */
10033 if (const_op == 0)
10034 code = EQ;
10036 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10037 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10038 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10040 const_op = 0, op1 = const0_rtx;
10041 code = GE;
10043 break;
10045 case GEU:
10046 /* >= C is equivalent to > (C - 1). */
10047 if (const_op > 1)
10049 const_op -= 1;
10050 op1 = GEN_INT (const_op);
10051 code = GTU;
10052 /* ... fall through ... */
10055 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10056 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10057 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10059 const_op = 0, op1 = const0_rtx;
10060 code = LT;
10061 break;
10063 else
10064 break;
10066 case GTU:
10067 /* unsigned > 0 is equivalent to != 0 */
10068 if (const_op == 0)
10069 code = NE;
10071 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10072 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10073 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10075 const_op = 0, op1 = const0_rtx;
10076 code = LT;
10078 break;
10080 default:
10081 break;
10084 /* Compute some predicates to simplify code below. */
10086 equality_comparison_p = (code == EQ || code == NE);
10087 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10088 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10089 || code == GEU);
10091 /* If this is a sign bit comparison and we can do arithmetic in
10092 MODE, say that we will only be needing the sign bit of OP0. */
10093 if (sign_bit_comparison_p
10094 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10095 op0 = force_to_mode (op0, mode,
10096 ((HOST_WIDE_INT) 1
10097 << (GET_MODE_BITSIZE (mode) - 1)),
10098 NULL_RTX, 0);
10100 /* Now try cases based on the opcode of OP0. If none of the cases
10101 does a "continue", we exit this loop immediately after the
10102 switch. */
10104 switch (GET_CODE (op0))
10106 case ZERO_EXTRACT:
10107 /* If we are extracting a single bit from a variable position in
10108 a constant that has only a single bit set and are comparing it
10109 with zero, we can convert this into an equality comparison
10110 between the position and the location of the single bit. */
10111 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10112 have already reduced the shift count modulo the word size. */
10113 if (!SHIFT_COUNT_TRUNCATED
10114 && GET_CODE (XEXP (op0, 0)) == CONST_INT
10115 && XEXP (op0, 1) == const1_rtx
10116 && equality_comparison_p && const_op == 0
10117 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10119 if (BITS_BIG_ENDIAN)
10121 enum machine_mode new_mode
10122 = mode_for_extraction (EP_extzv, 1);
10123 if (new_mode == MAX_MACHINE_MODE)
10124 i = BITS_PER_WORD - 1 - i;
10125 else
10127 mode = new_mode;
10128 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10132 op0 = XEXP (op0, 2);
10133 op1 = GEN_INT (i);
10134 const_op = i;
10136 /* Result is nonzero iff shift count is equal to I. */
10137 code = reverse_condition (code);
10138 continue;
10141 /* ... fall through ... */
10143 case SIGN_EXTRACT:
10144 tem = expand_compound_operation (op0);
10145 if (tem != op0)
10147 op0 = tem;
10148 continue;
10150 break;
10152 case NOT:
10153 /* If testing for equality, we can take the NOT of the constant. */
10154 if (equality_comparison_p
10155 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10157 op0 = XEXP (op0, 0);
10158 op1 = tem;
10159 continue;
10162 /* If just looking at the sign bit, reverse the sense of the
10163 comparison. */
10164 if (sign_bit_comparison_p)
10166 op0 = XEXP (op0, 0);
10167 code = (code == GE ? LT : GE);
10168 continue;
10170 break;
10172 case NEG:
10173 /* If testing for equality, we can take the NEG of the constant. */
10174 if (equality_comparison_p
10175 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10177 op0 = XEXP (op0, 0);
10178 op1 = tem;
10179 continue;
10182 /* The remaining cases only apply to comparisons with zero. */
10183 if (const_op != 0)
10184 break;
10186 /* When X is ABS or is known positive,
10187 (neg X) is < 0 if and only if X != 0. */
10189 if (sign_bit_comparison_p
10190 && (GET_CODE (XEXP (op0, 0)) == ABS
10191 || (mode_width <= HOST_BITS_PER_WIDE_INT
10192 && (nonzero_bits (XEXP (op0, 0), mode)
10193 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10195 op0 = XEXP (op0, 0);
10196 code = (code == LT ? NE : EQ);
10197 continue;
10200 /* If we have NEG of something whose two high-order bits are the
10201 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10202 if (num_sign_bit_copies (op0, mode) >= 2)
10204 op0 = XEXP (op0, 0);
10205 code = swap_condition (code);
10206 continue;
10208 break;
10210 case ROTATE:
10211 /* If we are testing equality and our count is a constant, we
10212 can perform the inverse operation on our RHS. */
10213 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10214 && (tem = simplify_binary_operation (ROTATERT, mode,
10215 op1, XEXP (op0, 1))) != 0)
10217 op0 = XEXP (op0, 0);
10218 op1 = tem;
10219 continue;
10222 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10223 a particular bit. Convert it to an AND of a constant of that
10224 bit. This will be converted into a ZERO_EXTRACT. */
10225 if (const_op == 0 && sign_bit_comparison_p
10226 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10227 && mode_width <= HOST_BITS_PER_WIDE_INT)
10229 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10230 ((HOST_WIDE_INT) 1
10231 << (mode_width - 1
10232 - INTVAL (XEXP (op0, 1)))));
10233 code = (code == LT ? NE : EQ);
10234 continue;
10237 /* Fall through. */
10239 case ABS:
10240 /* ABS is ignorable inside an equality comparison with zero. */
10241 if (const_op == 0 && equality_comparison_p)
10243 op0 = XEXP (op0, 0);
10244 continue;
10246 break;
10248 case SIGN_EXTEND:
10249 /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10250 (compare FOO CONST) if CONST fits in FOO's mode and we
10251 are either testing inequality or have an unsigned
10252 comparison with ZERO_EXTEND or a signed comparison with
10253 SIGN_EXTEND. But don't do it if we don't have a compare
10254 insn of the given mode, since we'd have to revert it
10255 later on, and then we wouldn't know whether to sign- or
10256 zero-extend. */
10257 mode = GET_MODE (XEXP (op0, 0));
10258 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10259 && ! unsigned_comparison_p
10260 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10261 && ((unsigned HOST_WIDE_INT) const_op
10262 < (((unsigned HOST_WIDE_INT) 1
10263 << (GET_MODE_BITSIZE (mode) - 1))))
10264 && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10266 op0 = XEXP (op0, 0);
10267 continue;
10269 break;
10271 case SUBREG:
10272 /* Check for the case where we are comparing A - C1 with C2, that is
10274 (subreg:MODE (plus (A) (-C1))) op (C2)
10276 with C1 a constant, and try to lift the SUBREG, i.e. to do the
10277 comparison in the wider mode. One of the following two conditions
10278 must be true in order for this to be valid:
10280 1. The mode extension results in the same bit pattern being added
10281 on both sides and the comparison is equality or unsigned. As
10282 C2 has been truncated to fit in MODE, the pattern can only be
10283 all 0s or all 1s.
10285 2. The mode extension results in the sign bit being copied on
10286 each side.
10288 The difficulty here is that we have predicates for A but not for
10289 (A - C1) so we need to check that C1 is within proper bounds so
10290 as to perturbate A as little as possible. */
10292 if (mode_width <= HOST_BITS_PER_WIDE_INT
10293 && subreg_lowpart_p (op0)
10294 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10295 && GET_CODE (SUBREG_REG (op0)) == PLUS
10296 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10298 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10299 rtx a = XEXP (SUBREG_REG (op0), 0);
10300 HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10302 if ((c1 > 0
10303 && (unsigned HOST_WIDE_INT) c1
10304 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10305 && (equality_comparison_p || unsigned_comparison_p)
10306 /* (A - C1) zero-extends if it is positive and sign-extends
10307 if it is negative, C2 both zero- and sign-extends. */
10308 && ((0 == (nonzero_bits (a, inner_mode)
10309 & ~GET_MODE_MASK (mode))
10310 && const_op >= 0)
10311 /* (A - C1) sign-extends if it is positive and 1-extends
10312 if it is negative, C2 both sign- and 1-extends. */
10313 || (num_sign_bit_copies (a, inner_mode)
10314 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10315 - mode_width)
10316 && const_op < 0)))
10317 || ((unsigned HOST_WIDE_INT) c1
10318 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10319 /* (A - C1) always sign-extends, like C2. */
10320 && num_sign_bit_copies (a, inner_mode)
10321 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10322 - (mode_width - 1))))
10324 op0 = SUBREG_REG (op0);
10325 continue;
10329 /* If the inner mode is narrower and we are extracting the low part,
10330 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10331 if (subreg_lowpart_p (op0)
10332 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10333 /* Fall through */ ;
10334 else
10335 break;
10337 /* ... fall through ... */
10339 case ZERO_EXTEND:
10340 mode = GET_MODE (XEXP (op0, 0));
10341 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10342 && (unsigned_comparison_p || equality_comparison_p)
10343 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10344 && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10345 && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10347 op0 = XEXP (op0, 0);
10348 continue;
10350 break;
10352 case PLUS:
10353 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10354 this for equality comparisons due to pathological cases involving
10355 overflows. */
10356 if (equality_comparison_p
10357 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10358 op1, XEXP (op0, 1))))
10360 op0 = XEXP (op0, 0);
10361 op1 = tem;
10362 continue;
10365 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10366 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10367 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10369 op0 = XEXP (XEXP (op0, 0), 0);
10370 code = (code == LT ? EQ : NE);
10371 continue;
10373 break;
10375 case MINUS:
10376 /* We used to optimize signed comparisons against zero, but that
10377 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10378 arrive here as equality comparisons, or (GEU, LTU) are
10379 optimized away. No need to special-case them. */
10381 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10382 (eq B (minus A C)), whichever simplifies. We can only do
10383 this for equality comparisons due to pathological cases involving
10384 overflows. */
10385 if (equality_comparison_p
10386 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10387 XEXP (op0, 1), op1)))
10389 op0 = XEXP (op0, 0);
10390 op1 = tem;
10391 continue;
10394 if (equality_comparison_p
10395 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10396 XEXP (op0, 0), op1)))
10398 op0 = XEXP (op0, 1);
10399 op1 = tem;
10400 continue;
10403 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10404 of bits in X minus 1, is one iff X > 0. */
10405 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10406 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10407 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10408 == mode_width - 1
10409 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10411 op0 = XEXP (op0, 1);
10412 code = (code == GE ? LE : GT);
10413 continue;
10415 break;
10417 case XOR:
10418 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10419 if C is zero or B is a constant. */
10420 if (equality_comparison_p
10421 && 0 != (tem = simplify_binary_operation (XOR, mode,
10422 XEXP (op0, 1), op1)))
10424 op0 = XEXP (op0, 0);
10425 op1 = tem;
10426 continue;
10428 break;
10430 case EQ: case NE:
10431 case UNEQ: case LTGT:
10432 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10433 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10434 case UNORDERED: case ORDERED:
10435 /* We can't do anything if OP0 is a condition code value, rather
10436 than an actual data value. */
10437 if (const_op != 0
10438 || CC0_P (XEXP (op0, 0))
10439 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10440 break;
10442 /* Get the two operands being compared. */
10443 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10444 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10445 else
10446 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10448 /* Check for the cases where we simply want the result of the
10449 earlier test or the opposite of that result. */
10450 if (code == NE || code == EQ
10451 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10452 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10453 && (STORE_FLAG_VALUE
10454 & (((HOST_WIDE_INT) 1
10455 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10456 && (code == LT || code == GE)))
10458 enum rtx_code new_code;
10459 if (code == LT || code == NE)
10460 new_code = GET_CODE (op0);
10461 else
10462 new_code = reversed_comparison_code (op0, NULL);
10464 if (new_code != UNKNOWN)
10466 code = new_code;
10467 op0 = tem;
10468 op1 = tem1;
10469 continue;
10472 break;
10474 case IOR:
10475 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10476 iff X <= 0. */
10477 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10478 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10479 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10481 op0 = XEXP (op0, 1);
10482 code = (code == GE ? GT : LE);
10483 continue;
10485 break;
10487 case AND:
10488 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10489 will be converted to a ZERO_EXTRACT later. */
10490 if (const_op == 0 && equality_comparison_p
10491 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10492 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10494 op0 = simplify_and_const_int
10495 (op0, mode, gen_rtx_LSHIFTRT (mode,
10496 XEXP (op0, 1),
10497 XEXP (XEXP (op0, 0), 1)),
10498 (HOST_WIDE_INT) 1);
10499 continue;
10502 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10503 zero and X is a comparison and C1 and C2 describe only bits set
10504 in STORE_FLAG_VALUE, we can compare with X. */
10505 if (const_op == 0 && equality_comparison_p
10506 && mode_width <= HOST_BITS_PER_WIDE_INT
10507 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10508 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10509 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10510 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10511 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10513 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10514 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10515 if ((~STORE_FLAG_VALUE & mask) == 0
10516 && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
10517 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10518 && COMPARISON_P (tem))))
10520 op0 = XEXP (XEXP (op0, 0), 0);
10521 continue;
10525 /* If we are doing an equality comparison of an AND of a bit equal
10526 to the sign bit, replace this with a LT or GE comparison of
10527 the underlying value. */
10528 if (equality_comparison_p
10529 && const_op == 0
10530 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10531 && mode_width <= HOST_BITS_PER_WIDE_INT
10532 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10533 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10535 op0 = XEXP (op0, 0);
10536 code = (code == EQ ? GE : LT);
10537 continue;
10540 /* If this AND operation is really a ZERO_EXTEND from a narrower
10541 mode, the constant fits within that mode, and this is either an
10542 equality or unsigned comparison, try to do this comparison in
10543 the narrower mode. */
10544 if ((equality_comparison_p || unsigned_comparison_p)
10545 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10546 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10547 & GET_MODE_MASK (mode))
10548 + 1)) >= 0
10549 && const_op >> i == 0
10550 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10552 op0 = gen_lowpart (tmode, XEXP (op0, 0));
10553 continue;
10556 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10557 fits in both M1 and M2 and the SUBREG is either paradoxical
10558 or represents the low part, permute the SUBREG and the AND
10559 and try again. */
10560 if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10562 unsigned HOST_WIDE_INT c1;
10563 tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10564 /* Require an integral mode, to avoid creating something like
10565 (AND:SF ...). */
10566 if (SCALAR_INT_MODE_P (tmode)
10567 /* It is unsafe to commute the AND into the SUBREG if the
10568 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10569 not defined. As originally written the upper bits
10570 have a defined value due to the AND operation.
10571 However, if we commute the AND inside the SUBREG then
10572 they no longer have defined values and the meaning of
10573 the code has been changed. */
10574 && (0
10575 #ifdef WORD_REGISTER_OPERATIONS
10576 || (mode_width > GET_MODE_BITSIZE (tmode)
10577 && mode_width <= BITS_PER_WORD)
10578 #endif
10579 || (mode_width <= GET_MODE_BITSIZE (tmode)
10580 && subreg_lowpart_p (XEXP (op0, 0))))
10581 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10582 && mode_width <= HOST_BITS_PER_WIDE_INT
10583 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10584 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10585 && (c1 & ~GET_MODE_MASK (tmode)) == 0
10586 && c1 != mask
10587 && c1 != GET_MODE_MASK (tmode))
10589 op0 = simplify_gen_binary (AND, tmode,
10590 SUBREG_REG (XEXP (op0, 0)),
10591 gen_int_mode (c1, tmode));
10592 op0 = gen_lowpart (mode, op0);
10593 continue;
10597 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
10598 if (const_op == 0 && equality_comparison_p
10599 && XEXP (op0, 1) == const1_rtx
10600 && GET_CODE (XEXP (op0, 0)) == NOT)
10602 op0 = simplify_and_const_int
10603 (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10604 code = (code == NE ? EQ : NE);
10605 continue;
10608 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10609 (eq (and (lshiftrt X) 1) 0).
10610 Also handle the case where (not X) is expressed using xor. */
10611 if (const_op == 0 && equality_comparison_p
10612 && XEXP (op0, 1) == const1_rtx
10613 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
10615 rtx shift_op = XEXP (XEXP (op0, 0), 0);
10616 rtx shift_count = XEXP (XEXP (op0, 0), 1);
10618 if (GET_CODE (shift_op) == NOT
10619 || (GET_CODE (shift_op) == XOR
10620 && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
10621 && GET_CODE (shift_count) == CONST_INT
10622 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10623 && (INTVAL (XEXP (shift_op, 1))
10624 == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
10626 op0 = simplify_and_const_int
10627 (NULL_RTX, mode,
10628 gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
10629 (HOST_WIDE_INT) 1);
10630 code = (code == NE ? EQ : NE);
10631 continue;
10634 break;
10636 case ASHIFT:
10637 /* If we have (compare (ashift FOO N) (const_int C)) and
10638 the high order N bits of FOO (N+1 if an inequality comparison)
10639 are known to be zero, we can do this by comparing FOO with C
10640 shifted right N bits so long as the low-order N bits of C are
10641 zero. */
10642 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10643 && INTVAL (XEXP (op0, 1)) >= 0
10644 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10645 < HOST_BITS_PER_WIDE_INT)
10646 && ((const_op
10647 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10648 && mode_width <= HOST_BITS_PER_WIDE_INT
10649 && (nonzero_bits (XEXP (op0, 0), mode)
10650 & ~(mask >> (INTVAL (XEXP (op0, 1))
10651 + ! equality_comparison_p))) == 0)
10653 /* We must perform a logical shift, not an arithmetic one,
10654 as we want the top N bits of C to be zero. */
10655 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10657 temp >>= INTVAL (XEXP (op0, 1));
10658 op1 = gen_int_mode (temp, mode);
10659 op0 = XEXP (op0, 0);
10660 continue;
10663 /* If we are doing a sign bit comparison, it means we are testing
10664 a particular bit. Convert it to the appropriate AND. */
10665 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10666 && mode_width <= HOST_BITS_PER_WIDE_INT)
10668 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10669 ((HOST_WIDE_INT) 1
10670 << (mode_width - 1
10671 - INTVAL (XEXP (op0, 1)))));
10672 code = (code == LT ? NE : EQ);
10673 continue;
10676 /* If this an equality comparison with zero and we are shifting
10677 the low bit to the sign bit, we can convert this to an AND of the
10678 low-order bit. */
10679 if (const_op == 0 && equality_comparison_p
10680 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10681 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10682 == mode_width - 1)
10684 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10685 (HOST_WIDE_INT) 1);
10686 continue;
10688 break;
10690 case ASHIFTRT:
10691 /* If this is an equality comparison with zero, we can do this
10692 as a logical shift, which might be much simpler. */
10693 if (equality_comparison_p && const_op == 0
10694 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10696 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10697 XEXP (op0, 0),
10698 INTVAL (XEXP (op0, 1)));
10699 continue;
10702 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10703 do the comparison in a narrower mode. */
10704 if (! unsigned_comparison_p
10705 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10706 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10707 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10708 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10709 MODE_INT, 1)) != BLKmode
10710 && (((unsigned HOST_WIDE_INT) const_op
10711 + (GET_MODE_MASK (tmode) >> 1) + 1)
10712 <= GET_MODE_MASK (tmode)))
10714 op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
10715 continue;
10718 /* Likewise if OP0 is a PLUS of a sign extension with a
10719 constant, which is usually represented with the PLUS
10720 between the shifts. */
10721 if (! unsigned_comparison_p
10722 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10723 && GET_CODE (XEXP (op0, 0)) == PLUS
10724 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10725 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10726 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10727 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10728 MODE_INT, 1)) != BLKmode
10729 && (((unsigned HOST_WIDE_INT) const_op
10730 + (GET_MODE_MASK (tmode) >> 1) + 1)
10731 <= GET_MODE_MASK (tmode)))
10733 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10734 rtx add_const = XEXP (XEXP (op0, 0), 1);
10735 rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
10736 add_const, XEXP (op0, 1));
10738 op0 = simplify_gen_binary (PLUS, tmode,
10739 gen_lowpart (tmode, inner),
10740 new_const);
10741 continue;
10744 /* ... fall through ... */
10745 case LSHIFTRT:
10746 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10747 the low order N bits of FOO are known to be zero, we can do this
10748 by comparing FOO with C shifted left N bits so long as no
10749 overflow occurs. */
10750 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10751 && INTVAL (XEXP (op0, 1)) >= 0
10752 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10753 && mode_width <= HOST_BITS_PER_WIDE_INT
10754 && (nonzero_bits (XEXP (op0, 0), mode)
10755 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10756 && (((unsigned HOST_WIDE_INT) const_op
10757 + (GET_CODE (op0) != LSHIFTRT
10758 ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10759 + 1)
10760 : 0))
10761 <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10763 /* If the shift was logical, then we must make the condition
10764 unsigned. */
10765 if (GET_CODE (op0) == LSHIFTRT)
10766 code = unsigned_condition (code);
10768 const_op <<= INTVAL (XEXP (op0, 1));
10769 op1 = GEN_INT (const_op);
10770 op0 = XEXP (op0, 0);
10771 continue;
10774 /* If we are using this shift to extract just the sign bit, we
10775 can replace this with an LT or GE comparison. */
10776 if (const_op == 0
10777 && (equality_comparison_p || sign_bit_comparison_p)
10778 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10779 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10780 == mode_width - 1)
10782 op0 = XEXP (op0, 0);
10783 code = (code == NE || code == GT ? LT : GE);
10784 continue;
10786 break;
10788 default:
10789 break;
10792 break;
10795 /* Now make any compound operations involved in this comparison. Then,
10796 check for an outmost SUBREG on OP0 that is not doing anything or is
10797 paradoxical. The latter transformation must only be performed when
10798 it is known that the "extra" bits will be the same in op0 and op1 or
10799 that they don't matter. There are three cases to consider:
10801 1. SUBREG_REG (op0) is a register. In this case the bits are don't
10802 care bits and we can assume they have any convenient value. So
10803 making the transformation is safe.
10805 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10806 In this case the upper bits of op0 are undefined. We should not make
10807 the simplification in that case as we do not know the contents of
10808 those bits.
10810 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10811 UNKNOWN. In that case we know those bits are zeros or ones. We must
10812 also be sure that they are the same as the upper bits of op1.
10814 We can never remove a SUBREG for a non-equality comparison because
10815 the sign bit is in a different place in the underlying object. */
10817 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10818 op1 = make_compound_operation (op1, SET);
10820 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10821 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10822 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10823 && (code == NE || code == EQ))
10825 if (GET_MODE_SIZE (GET_MODE (op0))
10826 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10828 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
10829 implemented. */
10830 if (REG_P (SUBREG_REG (op0)))
10832 op0 = SUBREG_REG (op0);
10833 op1 = gen_lowpart (GET_MODE (op0), op1);
10836 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10837 <= HOST_BITS_PER_WIDE_INT)
10838 && (nonzero_bits (SUBREG_REG (op0),
10839 GET_MODE (SUBREG_REG (op0)))
10840 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10842 tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
10844 if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10845 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10846 op0 = SUBREG_REG (op0), op1 = tem;
10850 /* We now do the opposite procedure: Some machines don't have compare
10851 insns in all modes. If OP0's mode is an integer mode smaller than a
10852 word and we can't do a compare in that mode, see if there is a larger
10853 mode for which we can do the compare. There are a number of cases in
10854 which we can use the wider mode. */
10856 mode = GET_MODE (op0);
10857 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10858 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10859 && ! have_insn_for (COMPARE, mode))
10860 for (tmode = GET_MODE_WIDER_MODE (mode);
10861 (tmode != VOIDmode
10862 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10863 tmode = GET_MODE_WIDER_MODE (tmode))
10864 if (have_insn_for (COMPARE, tmode))
10866 int zero_extended;
10868 /* If the only nonzero bits in OP0 and OP1 are those in the
10869 narrower mode and this is an equality or unsigned comparison,
10870 we can use the wider mode. Similarly for sign-extended
10871 values, in which case it is true for all comparisons. */
10872 zero_extended = ((code == EQ || code == NE
10873 || code == GEU || code == GTU
10874 || code == LEU || code == LTU)
10875 && (nonzero_bits (op0, tmode)
10876 & ~GET_MODE_MASK (mode)) == 0
10877 && ((GET_CODE (op1) == CONST_INT
10878 || (nonzero_bits (op1, tmode)
10879 & ~GET_MODE_MASK (mode)) == 0)));
10881 if (zero_extended
10882 || ((num_sign_bit_copies (op0, tmode)
10883 > (unsigned int) (GET_MODE_BITSIZE (tmode)
10884 - GET_MODE_BITSIZE (mode)))
10885 && (num_sign_bit_copies (op1, tmode)
10886 > (unsigned int) (GET_MODE_BITSIZE (tmode)
10887 - GET_MODE_BITSIZE (mode)))))
10889 /* If OP0 is an AND and we don't have an AND in MODE either,
10890 make a new AND in the proper mode. */
10891 if (GET_CODE (op0) == AND
10892 && !have_insn_for (AND, mode))
10893 op0 = simplify_gen_binary (AND, tmode,
10894 gen_lowpart (tmode,
10895 XEXP (op0, 0)),
10896 gen_lowpart (tmode,
10897 XEXP (op0, 1)));
10899 op0 = gen_lowpart (tmode, op0);
10900 if (zero_extended && GET_CODE (op1) == CONST_INT)
10901 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
10902 op1 = gen_lowpart (tmode, op1);
10903 break;
10906 /* If this is a test for negative, we can make an explicit
10907 test of the sign bit. */
10909 if (op1 == const0_rtx && (code == LT || code == GE)
10910 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10912 op0 = simplify_gen_binary (AND, tmode,
10913 gen_lowpart (tmode, op0),
10914 GEN_INT ((HOST_WIDE_INT) 1
10915 << (GET_MODE_BITSIZE (mode)
10916 - 1)));
10917 code = (code == LT) ? NE : EQ;
10918 break;
10922 #ifdef CANONICALIZE_COMPARISON
10923 /* If this machine only supports a subset of valid comparisons, see if we
10924 can convert an unsupported one into a supported one. */
10925 CANONICALIZE_COMPARISON (code, op0, op1);
10926 #endif
10928 *pop0 = op0;
10929 *pop1 = op1;
10931 return code;
10934 /* Utility function for record_value_for_reg. Count number of
10935 rtxs in X. */
10936 static int
10937 count_rtxs (rtx x)
10939 enum rtx_code code = GET_CODE (x);
10940 const char *fmt;
10941 int i, ret = 1;
10943 if (GET_RTX_CLASS (code) == '2'
10944 || GET_RTX_CLASS (code) == 'c')
10946 rtx x0 = XEXP (x, 0);
10947 rtx x1 = XEXP (x, 1);
10949 if (x0 == x1)
10950 return 1 + 2 * count_rtxs (x0);
10952 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
10953 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
10954 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10955 return 2 + 2 * count_rtxs (x0)
10956 + count_rtxs (x == XEXP (x1, 0)
10957 ? XEXP (x1, 1) : XEXP (x1, 0));
10959 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
10960 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
10961 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10962 return 2 + 2 * count_rtxs (x1)
10963 + count_rtxs (x == XEXP (x0, 0)
10964 ? XEXP (x0, 1) : XEXP (x0, 0));
10967 fmt = GET_RTX_FORMAT (code);
10968 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10969 if (fmt[i] == 'e')
10970 ret += count_rtxs (XEXP (x, i));
10972 return ret;
10975 /* Utility function for following routine. Called when X is part of a value
10976 being stored into last_set_value. Sets last_set_table_tick
10977 for each register mentioned. Similar to mention_regs in cse.c */
10979 static void
10980 update_table_tick (rtx x)
10982 enum rtx_code code = GET_CODE (x);
10983 const char *fmt = GET_RTX_FORMAT (code);
10984 int i;
10986 if (code == REG)
10988 unsigned int regno = REGNO (x);
10989 unsigned int endregno
10990 = regno + (regno < FIRST_PSEUDO_REGISTER
10991 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
10992 unsigned int r;
10994 for (r = regno; r < endregno; r++)
10995 reg_stat[r].last_set_table_tick = label_tick;
10997 return;
11000 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11001 /* Note that we can't have an "E" in values stored; see
11002 get_last_value_validate. */
11003 if (fmt[i] == 'e')
11005 /* Check for identical subexpressions. If x contains
11006 identical subexpression we only have to traverse one of
11007 them. */
11008 if (i == 0 && ARITHMETIC_P (x))
11010 /* Note that at this point x1 has already been
11011 processed. */
11012 rtx x0 = XEXP (x, 0);
11013 rtx x1 = XEXP (x, 1);
11015 /* If x0 and x1 are identical then there is no need to
11016 process x0. */
11017 if (x0 == x1)
11018 break;
11020 /* If x0 is identical to a subexpression of x1 then while
11021 processing x1, x0 has already been processed. Thus we
11022 are done with x. */
11023 if (ARITHMETIC_P (x1)
11024 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11025 break;
11027 /* If x1 is identical to a subexpression of x0 then we
11028 still have to process the rest of x0. */
11029 if (ARITHMETIC_P (x0)
11030 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11032 update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11033 break;
11037 update_table_tick (XEXP (x, i));
11041 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11042 are saying that the register is clobbered and we no longer know its
11043 value. If INSN is zero, don't update reg_stat[].last_set; this is
11044 only permitted with VALUE also zero and is used to invalidate the
11045 register. */
11047 static void
11048 record_value_for_reg (rtx reg, rtx insn, rtx value)
11050 unsigned int regno = REGNO (reg);
11051 unsigned int endregno
11052 = regno + (regno < FIRST_PSEUDO_REGISTER
11053 ? hard_regno_nregs[regno][GET_MODE (reg)] : 1);
11054 unsigned int i;
11056 /* If VALUE contains REG and we have a previous value for REG, substitute
11057 the previous value. */
11058 if (value && insn && reg_overlap_mentioned_p (reg, value))
11060 rtx tem;
11062 /* Set things up so get_last_value is allowed to see anything set up to
11063 our insn. */
11064 subst_low_cuid = INSN_CUID (insn);
11065 tem = get_last_value (reg);
11067 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11068 it isn't going to be useful and will take a lot of time to process,
11069 so just use the CLOBBER. */
11071 if (tem)
11073 if (ARITHMETIC_P (tem)
11074 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11075 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11076 tem = XEXP (tem, 0);
11077 else if (count_occurrences (value, reg, 1) >= 2)
11079 /* If there are two or more occurrences of REG in VALUE,
11080 prevent the value from growing too much. */
11081 if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11082 tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11085 value = replace_rtx (copy_rtx (value), reg, tem);
11089 /* For each register modified, show we don't know its value, that
11090 we don't know about its bitwise content, that its value has been
11091 updated, and that we don't know the location of the death of the
11092 register. */
11093 for (i = regno; i < endregno; i++)
11095 if (insn)
11096 reg_stat[i].last_set = insn;
11098 reg_stat[i].last_set_value = 0;
11099 reg_stat[i].last_set_mode = 0;
11100 reg_stat[i].last_set_nonzero_bits = 0;
11101 reg_stat[i].last_set_sign_bit_copies = 0;
11102 reg_stat[i].last_death = 0;
11105 /* Mark registers that are being referenced in this value. */
11106 if (value)
11107 update_table_tick (value);
11109 /* Now update the status of each register being set.
11110 If someone is using this register in this block, set this register
11111 to invalid since we will get confused between the two lives in this
11112 basic block. This makes using this register always invalid. In cse, we
11113 scan the table to invalidate all entries using this register, but this
11114 is too much work for us. */
11116 for (i = regno; i < endregno; i++)
11118 reg_stat[i].last_set_label = label_tick;
11119 if (value && reg_stat[i].last_set_table_tick == label_tick)
11120 reg_stat[i].last_set_invalid = 1;
11121 else
11122 reg_stat[i].last_set_invalid = 0;
11125 /* The value being assigned might refer to X (like in "x++;"). In that
11126 case, we must replace it with (clobber (const_int 0)) to prevent
11127 infinite loops. */
11128 if (value && ! get_last_value_validate (&value, insn,
11129 reg_stat[regno].last_set_label, 0))
11131 value = copy_rtx (value);
11132 if (! get_last_value_validate (&value, insn,
11133 reg_stat[regno].last_set_label, 1))
11134 value = 0;
11137 /* For the main register being modified, update the value, the mode, the
11138 nonzero bits, and the number of sign bit copies. */
11140 reg_stat[regno].last_set_value = value;
11142 if (value)
11144 enum machine_mode mode = GET_MODE (reg);
11145 subst_low_cuid = INSN_CUID (insn);
11146 reg_stat[regno].last_set_mode = mode;
11147 if (GET_MODE_CLASS (mode) == MODE_INT
11148 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11149 mode = nonzero_bits_mode;
11150 reg_stat[regno].last_set_nonzero_bits = nonzero_bits (value, mode);
11151 reg_stat[regno].last_set_sign_bit_copies
11152 = num_sign_bit_copies (value, GET_MODE (reg));
11156 /* Called via note_stores from record_dead_and_set_regs to handle one
11157 SET or CLOBBER in an insn. DATA is the instruction in which the
11158 set is occurring. */
11160 static void
11161 record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
11163 rtx record_dead_insn = (rtx) data;
11165 if (GET_CODE (dest) == SUBREG)
11166 dest = SUBREG_REG (dest);
11168 if (REG_P (dest))
11170 /* If we are setting the whole register, we know its value. Otherwise
11171 show that we don't know the value. We can handle SUBREG in
11172 some cases. */
11173 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11174 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11175 else if (GET_CODE (setter) == SET
11176 && GET_CODE (SET_DEST (setter)) == SUBREG
11177 && SUBREG_REG (SET_DEST (setter)) == dest
11178 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11179 && subreg_lowpart_p (SET_DEST (setter)))
11180 record_value_for_reg (dest, record_dead_insn,
11181 gen_lowpart (GET_MODE (dest),
11182 SET_SRC (setter)));
11183 else
11184 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11186 else if (MEM_P (dest)
11187 /* Ignore pushes, they clobber nothing. */
11188 && ! push_operand (dest, GET_MODE (dest)))
11189 mem_last_set = INSN_CUID (record_dead_insn);
11192 /* Update the records of when each REG was most recently set or killed
11193 for the things done by INSN. This is the last thing done in processing
11194 INSN in the combiner loop.
11196 We update reg_stat[], in particular fields last_set, last_set_value,
11197 last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11198 last_death, and also the similar information mem_last_set (which insn
11199 most recently modified memory) and last_call_cuid (which insn was the
11200 most recent subroutine call). */
11202 static void
11203 record_dead_and_set_regs (rtx insn)
11205 rtx link;
11206 unsigned int i;
11208 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11210 if (REG_NOTE_KIND (link) == REG_DEAD
11211 && REG_P (XEXP (link, 0)))
11213 unsigned int regno = REGNO (XEXP (link, 0));
11214 unsigned int endregno
11215 = regno + (regno < FIRST_PSEUDO_REGISTER
11216 ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))]
11217 : 1);
11219 for (i = regno; i < endregno; i++)
11220 reg_stat[i].last_death = insn;
11222 else if (REG_NOTE_KIND (link) == REG_INC)
11223 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11226 if (CALL_P (insn))
11228 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11229 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11231 reg_stat[i].last_set_value = 0;
11232 reg_stat[i].last_set_mode = 0;
11233 reg_stat[i].last_set_nonzero_bits = 0;
11234 reg_stat[i].last_set_sign_bit_copies = 0;
11235 reg_stat[i].last_death = 0;
11238 last_call_cuid = mem_last_set = INSN_CUID (insn);
11240 /* Don't bother recording what this insn does. It might set the
11241 return value register, but we can't combine into a call
11242 pattern anyway, so there's no point trying (and it may cause
11243 a crash, if e.g. we wind up asking for last_set_value of a
11244 SUBREG of the return value register). */
11245 return;
11248 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11251 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11252 register present in the SUBREG, so for each such SUBREG go back and
11253 adjust nonzero and sign bit information of the registers that are
11254 known to have some zero/sign bits set.
11256 This is needed because when combine blows the SUBREGs away, the
11257 information on zero/sign bits is lost and further combines can be
11258 missed because of that. */
11260 static void
11261 record_promoted_value (rtx insn, rtx subreg)
11263 rtx links, set;
11264 unsigned int regno = REGNO (SUBREG_REG (subreg));
11265 enum machine_mode mode = GET_MODE (subreg);
11267 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11268 return;
11270 for (links = LOG_LINKS (insn); links;)
11272 insn = XEXP (links, 0);
11273 set = single_set (insn);
11275 if (! set || !REG_P (SET_DEST (set))
11276 || REGNO (SET_DEST (set)) != regno
11277 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11279 links = XEXP (links, 1);
11280 continue;
11283 if (reg_stat[regno].last_set == insn)
11285 if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11286 reg_stat[regno].last_set_nonzero_bits &= GET_MODE_MASK (mode);
11289 if (REG_P (SET_SRC (set)))
11291 regno = REGNO (SET_SRC (set));
11292 links = LOG_LINKS (insn);
11294 else
11295 break;
11299 /* Scan X for promoted SUBREGs. For each one found,
11300 note what it implies to the registers used in it. */
11302 static void
11303 check_promoted_subreg (rtx insn, rtx x)
11305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11306 && REG_P (SUBREG_REG (x)))
11307 record_promoted_value (insn, x);
11308 else
11310 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11311 int i, j;
11313 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11314 switch (format[i])
11316 case 'e':
11317 check_promoted_subreg (insn, XEXP (x, i));
11318 break;
11319 case 'V':
11320 case 'E':
11321 if (XVEC (x, i) != 0)
11322 for (j = 0; j < XVECLEN (x, i); j++)
11323 check_promoted_subreg (insn, XVECEXP (x, i, j));
11324 break;
11329 /* Utility routine for the following function. Verify that all the registers
11330 mentioned in *LOC are valid when *LOC was part of a value set when
11331 label_tick == TICK. Return 0 if some are not.
11333 If REPLACE is nonzero, replace the invalid reference with
11334 (clobber (const_int 0)) and return 1. This replacement is useful because
11335 we often can get useful information about the form of a value (e.g., if
11336 it was produced by a shift that always produces -1 or 0) even though
11337 we don't know exactly what registers it was produced from. */
11339 static int
11340 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11342 rtx x = *loc;
11343 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11344 int len = GET_RTX_LENGTH (GET_CODE (x));
11345 int i;
11347 if (REG_P (x))
11349 unsigned int regno = REGNO (x);
11350 unsigned int endregno
11351 = regno + (regno < FIRST_PSEUDO_REGISTER
11352 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11353 unsigned int j;
11355 for (j = regno; j < endregno; j++)
11356 if (reg_stat[j].last_set_invalid
11357 /* If this is a pseudo-register that was only set once and not
11358 live at the beginning of the function, it is always valid. */
11359 || (! (regno >= FIRST_PSEUDO_REGISTER
11360 && REG_N_SETS (regno) == 1
11361 && (! REGNO_REG_SET_P
11362 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11363 regno)))
11364 && reg_stat[j].last_set_label > tick))
11366 if (replace)
11367 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11368 return replace;
11371 return 1;
11373 /* If this is a memory reference, make sure that there were
11374 no stores after it that might have clobbered the value. We don't
11375 have alias info, so we assume any store invalidates it. */
11376 else if (MEM_P (x) && !MEM_READONLY_P (x)
11377 && INSN_CUID (insn) <= mem_last_set)
11379 if (replace)
11380 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11381 return replace;
11384 for (i = 0; i < len; i++)
11386 if (fmt[i] == 'e')
11388 /* Check for identical subexpressions. If x contains
11389 identical subexpression we only have to traverse one of
11390 them. */
11391 if (i == 1 && ARITHMETIC_P (x))
11393 /* Note that at this point x0 has already been checked
11394 and found valid. */
11395 rtx x0 = XEXP (x, 0);
11396 rtx x1 = XEXP (x, 1);
11398 /* If x0 and x1 are identical then x is also valid. */
11399 if (x0 == x1)
11400 return 1;
11402 /* If x1 is identical to a subexpression of x0 then
11403 while checking x0, x1 has already been checked. Thus
11404 it is valid and so as x. */
11405 if (ARITHMETIC_P (x0)
11406 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11407 return 1;
11409 /* If x0 is identical to a subexpression of x1 then x is
11410 valid iff the rest of x1 is valid. */
11411 if (ARITHMETIC_P (x1)
11412 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11413 return
11414 get_last_value_validate (&XEXP (x1,
11415 x0 == XEXP (x1, 0) ? 1 : 0),
11416 insn, tick, replace);
11419 if (get_last_value_validate (&XEXP (x, i), insn, tick,
11420 replace) == 0)
11421 return 0;
11423 /* Don't bother with these. They shouldn't occur anyway. */
11424 else if (fmt[i] == 'E')
11425 return 0;
11428 /* If we haven't found a reason for it to be invalid, it is valid. */
11429 return 1;
11432 /* Get the last value assigned to X, if known. Some registers
11433 in the value may be replaced with (clobber (const_int 0)) if their value
11434 is known longer known reliably. */
11436 static rtx
11437 get_last_value (rtx x)
11439 unsigned int regno;
11440 rtx value;
11442 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11443 then convert it to the desired mode. If this is a paradoxical SUBREG,
11444 we cannot predict what values the "extra" bits might have. */
11445 if (GET_CODE (x) == SUBREG
11446 && subreg_lowpart_p (x)
11447 && (GET_MODE_SIZE (GET_MODE (x))
11448 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11449 && (value = get_last_value (SUBREG_REG (x))) != 0)
11450 return gen_lowpart (GET_MODE (x), value);
11452 if (!REG_P (x))
11453 return 0;
11455 regno = REGNO (x);
11456 value = reg_stat[regno].last_set_value;
11458 /* If we don't have a value, or if it isn't for this basic block and
11459 it's either a hard register, set more than once, or it's a live
11460 at the beginning of the function, return 0.
11462 Because if it's not live at the beginning of the function then the reg
11463 is always set before being used (is never used without being set).
11464 And, if it's set only once, and it's always set before use, then all
11465 uses must have the same last value, even if it's not from this basic
11466 block. */
11468 if (value == 0
11469 || (reg_stat[regno].last_set_label != label_tick
11470 && (regno < FIRST_PSEUDO_REGISTER
11471 || REG_N_SETS (regno) != 1
11472 || (REGNO_REG_SET_P
11473 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11474 regno)))))
11475 return 0;
11477 /* If the value was set in a later insn than the ones we are processing,
11478 we can't use it even if the register was only set once. */
11479 if (INSN_CUID (reg_stat[regno].last_set) >= subst_low_cuid)
11480 return 0;
11482 /* If the value has all its registers valid, return it. */
11483 if (get_last_value_validate (&value, reg_stat[regno].last_set,
11484 reg_stat[regno].last_set_label, 0))
11485 return value;
11487 /* Otherwise, make a copy and replace any invalid register with
11488 (clobber (const_int 0)). If that fails for some reason, return 0. */
11490 value = copy_rtx (value);
11491 if (get_last_value_validate (&value, reg_stat[regno].last_set,
11492 reg_stat[regno].last_set_label, 1))
11493 return value;
11495 return 0;
11498 /* Return nonzero if expression X refers to a REG or to memory
11499 that is set in an instruction more recent than FROM_CUID. */
11501 static int
11502 use_crosses_set_p (rtx x, int from_cuid)
11504 const char *fmt;
11505 int i;
11506 enum rtx_code code = GET_CODE (x);
11508 if (code == REG)
11510 unsigned int regno = REGNO (x);
11511 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11512 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11514 #ifdef PUSH_ROUNDING
11515 /* Don't allow uses of the stack pointer to be moved,
11516 because we don't know whether the move crosses a push insn. */
11517 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11518 return 1;
11519 #endif
11520 for (; regno < endreg; regno++)
11521 if (reg_stat[regno].last_set
11522 && INSN_CUID (reg_stat[regno].last_set) > from_cuid)
11523 return 1;
11524 return 0;
11527 if (code == MEM && mem_last_set > from_cuid)
11528 return 1;
11530 fmt = GET_RTX_FORMAT (code);
11532 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11534 if (fmt[i] == 'E')
11536 int j;
11537 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11538 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11539 return 1;
11541 else if (fmt[i] == 'e'
11542 && use_crosses_set_p (XEXP (x, i), from_cuid))
11543 return 1;
11545 return 0;
11548 /* Define three variables used for communication between the following
11549 routines. */
11551 static unsigned int reg_dead_regno, reg_dead_endregno;
11552 static int reg_dead_flag;
11554 /* Function called via note_stores from reg_dead_at_p.
11556 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11557 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11559 static void
11560 reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11562 unsigned int regno, endregno;
11564 if (!REG_P (dest))
11565 return;
11567 regno = REGNO (dest);
11568 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11569 ? hard_regno_nregs[regno][GET_MODE (dest)] : 1);
11571 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11572 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11575 /* Return nonzero if REG is known to be dead at INSN.
11577 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11578 referencing REG, it is dead. If we hit a SET referencing REG, it is
11579 live. Otherwise, see if it is live or dead at the start of the basic
11580 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11581 must be assumed to be always live. */
11583 static int
11584 reg_dead_at_p (rtx reg, rtx insn)
11586 basic_block block;
11587 unsigned int i;
11589 /* Set variables for reg_dead_at_p_1. */
11590 reg_dead_regno = REGNO (reg);
11591 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11592 ? hard_regno_nregs[reg_dead_regno]
11593 [GET_MODE (reg)]
11594 : 1);
11596 reg_dead_flag = 0;
11598 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
11599 we allow the machine description to decide whether use-and-clobber
11600 patterns are OK. */
11601 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11603 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11604 if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
11605 return 0;
11608 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11609 beginning of function. */
11610 for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
11611 insn = prev_nonnote_insn (insn))
11613 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11614 if (reg_dead_flag)
11615 return reg_dead_flag == 1 ? 1 : 0;
11617 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11618 return 1;
11621 /* Get the basic block that we were in. */
11622 if (insn == 0)
11623 block = ENTRY_BLOCK_PTR->next_bb;
11624 else
11626 FOR_EACH_BB (block)
11627 if (insn == BB_HEAD (block))
11628 break;
11630 if (block == EXIT_BLOCK_PTR)
11631 return 0;
11634 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11635 if (REGNO_REG_SET_P (block->il.rtl->global_live_at_start, i))
11636 return 0;
11638 return 1;
11641 /* Note hard registers in X that are used. This code is similar to
11642 that in flow.c, but much simpler since we don't care about pseudos. */
11644 static void
11645 mark_used_regs_combine (rtx x)
11647 RTX_CODE code = GET_CODE (x);
11648 unsigned int regno;
11649 int i;
11651 switch (code)
11653 case LABEL_REF:
11654 case SYMBOL_REF:
11655 case CONST_INT:
11656 case CONST:
11657 case CONST_DOUBLE:
11658 case CONST_VECTOR:
11659 case PC:
11660 case ADDR_VEC:
11661 case ADDR_DIFF_VEC:
11662 case ASM_INPUT:
11663 #ifdef HAVE_cc0
11664 /* CC0 must die in the insn after it is set, so we don't need to take
11665 special note of it here. */
11666 case CC0:
11667 #endif
11668 return;
11670 case CLOBBER:
11671 /* If we are clobbering a MEM, mark any hard registers inside the
11672 address as used. */
11673 if (MEM_P (XEXP (x, 0)))
11674 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11675 return;
11677 case REG:
11678 regno = REGNO (x);
11679 /* A hard reg in a wide mode may really be multiple registers.
11680 If so, mark all of them just like the first. */
11681 if (regno < FIRST_PSEUDO_REGISTER)
11683 unsigned int endregno, r;
11685 /* None of this applies to the stack, frame or arg pointers. */
11686 if (regno == STACK_POINTER_REGNUM
11687 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11688 || regno == HARD_FRAME_POINTER_REGNUM
11689 #endif
11690 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11691 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11692 #endif
11693 || regno == FRAME_POINTER_REGNUM)
11694 return;
11696 endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11697 for (r = regno; r < endregno; r++)
11698 SET_HARD_REG_BIT (newpat_used_regs, r);
11700 return;
11702 case SET:
11704 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11705 the address. */
11706 rtx testreg = SET_DEST (x);
11708 while (GET_CODE (testreg) == SUBREG
11709 || GET_CODE (testreg) == ZERO_EXTRACT
11710 || GET_CODE (testreg) == STRICT_LOW_PART)
11711 testreg = XEXP (testreg, 0);
11713 if (MEM_P (testreg))
11714 mark_used_regs_combine (XEXP (testreg, 0));
11716 mark_used_regs_combine (SET_SRC (x));
11718 return;
11720 default:
11721 break;
11724 /* Recursively scan the operands of this expression. */
11727 const char *fmt = GET_RTX_FORMAT (code);
11729 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11731 if (fmt[i] == 'e')
11732 mark_used_regs_combine (XEXP (x, i));
11733 else if (fmt[i] == 'E')
11735 int j;
11737 for (j = 0; j < XVECLEN (x, i); j++)
11738 mark_used_regs_combine (XVECEXP (x, i, j));
11744 /* Remove register number REGNO from the dead registers list of INSN.
11746 Return the note used to record the death, if there was one. */
11749 remove_death (unsigned int regno, rtx insn)
11751 rtx note = find_regno_note (insn, REG_DEAD, regno);
11753 if (note)
11755 REG_N_DEATHS (regno)--;
11756 remove_note (insn, note);
11759 return note;
11762 /* For each register (hardware or pseudo) used within expression X, if its
11763 death is in an instruction with cuid between FROM_CUID (inclusive) and
11764 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11765 list headed by PNOTES.
11767 That said, don't move registers killed by maybe_kill_insn.
11769 This is done when X is being merged by combination into TO_INSN. These
11770 notes will then be distributed as needed. */
11772 static void
11773 move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
11774 rtx *pnotes)
11776 const char *fmt;
11777 int len, i;
11778 enum rtx_code code = GET_CODE (x);
11780 if (code == REG)
11782 unsigned int regno = REGNO (x);
11783 rtx where_dead = reg_stat[regno].last_death;
11784 rtx before_dead, after_dead;
11786 /* Don't move the register if it gets killed in between from and to. */
11787 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11788 && ! reg_referenced_p (x, maybe_kill_insn))
11789 return;
11791 /* WHERE_DEAD could be a USE insn made by combine, so first we
11792 make sure that we have insns with valid INSN_CUID values. */
11793 before_dead = where_dead;
11794 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11795 before_dead = PREV_INSN (before_dead);
11797 after_dead = where_dead;
11798 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11799 after_dead = NEXT_INSN (after_dead);
11801 if (before_dead && after_dead
11802 && INSN_CUID (before_dead) >= from_cuid
11803 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11804 || (where_dead != after_dead
11805 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11807 rtx note = remove_death (regno, where_dead);
11809 /* It is possible for the call above to return 0. This can occur
11810 when last_death points to I2 or I1 that we combined with.
11811 In that case make a new note.
11813 We must also check for the case where X is a hard register
11814 and NOTE is a death note for a range of hard registers
11815 including X. In that case, we must put REG_DEAD notes for
11816 the remaining registers in place of NOTE. */
11818 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11819 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11820 > GET_MODE_SIZE (GET_MODE (x))))
11822 unsigned int deadregno = REGNO (XEXP (note, 0));
11823 unsigned int deadend
11824 = (deadregno + hard_regno_nregs[deadregno]
11825 [GET_MODE (XEXP (note, 0))]);
11826 unsigned int ourend
11827 = regno + hard_regno_nregs[regno][GET_MODE (x)];
11828 unsigned int i;
11830 for (i = deadregno; i < deadend; i++)
11831 if (i < regno || i >= ourend)
11832 REG_NOTES (where_dead)
11833 = gen_rtx_EXPR_LIST (REG_DEAD,
11834 regno_reg_rtx[i],
11835 REG_NOTES (where_dead));
11838 /* If we didn't find any note, or if we found a REG_DEAD note that
11839 covers only part of the given reg, and we have a multi-reg hard
11840 register, then to be safe we must check for REG_DEAD notes
11841 for each register other than the first. They could have
11842 their own REG_DEAD notes lying around. */
11843 else if ((note == 0
11844 || (note != 0
11845 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11846 < GET_MODE_SIZE (GET_MODE (x)))))
11847 && regno < FIRST_PSEUDO_REGISTER
11848 && hard_regno_nregs[regno][GET_MODE (x)] > 1)
11850 unsigned int ourend
11851 = regno + hard_regno_nregs[regno][GET_MODE (x)];
11852 unsigned int i, offset;
11853 rtx oldnotes = 0;
11855 if (note)
11856 offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
11857 else
11858 offset = 1;
11860 for (i = regno + offset; i < ourend; i++)
11861 move_deaths (regno_reg_rtx[i],
11862 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11865 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11867 XEXP (note, 1) = *pnotes;
11868 *pnotes = note;
11870 else
11871 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11873 REG_N_DEATHS (regno)++;
11876 return;
11879 else if (GET_CODE (x) == SET)
11881 rtx dest = SET_DEST (x);
11883 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11885 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11886 that accesses one word of a multi-word item, some
11887 piece of everything register in the expression is used by
11888 this insn, so remove any old death. */
11889 /* ??? So why do we test for equality of the sizes? */
11891 if (GET_CODE (dest) == ZERO_EXTRACT
11892 || GET_CODE (dest) == STRICT_LOW_PART
11893 || (GET_CODE (dest) == SUBREG
11894 && (((GET_MODE_SIZE (GET_MODE (dest))
11895 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11896 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11897 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11899 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11900 return;
11903 /* If this is some other SUBREG, we know it replaces the entire
11904 value, so use that as the destination. */
11905 if (GET_CODE (dest) == SUBREG)
11906 dest = SUBREG_REG (dest);
11908 /* If this is a MEM, adjust deaths of anything used in the address.
11909 For a REG (the only other possibility), the entire value is
11910 being replaced so the old value is not used in this insn. */
11912 if (MEM_P (dest))
11913 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11914 to_insn, pnotes);
11915 return;
11918 else if (GET_CODE (x) == CLOBBER)
11919 return;
11921 len = GET_RTX_LENGTH (code);
11922 fmt = GET_RTX_FORMAT (code);
11924 for (i = 0; i < len; i++)
11926 if (fmt[i] == 'E')
11928 int j;
11929 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11930 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11931 to_insn, pnotes);
11933 else if (fmt[i] == 'e')
11934 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11938 /* Return 1 if X is the target of a bit-field assignment in BODY, the
11939 pattern of an insn. X must be a REG. */
11941 static int
11942 reg_bitfield_target_p (rtx x, rtx body)
11944 int i;
11946 if (GET_CODE (body) == SET)
11948 rtx dest = SET_DEST (body);
11949 rtx target;
11950 unsigned int regno, tregno, endregno, endtregno;
11952 if (GET_CODE (dest) == ZERO_EXTRACT)
11953 target = XEXP (dest, 0);
11954 else if (GET_CODE (dest) == STRICT_LOW_PART)
11955 target = SUBREG_REG (XEXP (dest, 0));
11956 else
11957 return 0;
11959 if (GET_CODE (target) == SUBREG)
11960 target = SUBREG_REG (target);
11962 if (!REG_P (target))
11963 return 0;
11965 tregno = REGNO (target), regno = REGNO (x);
11966 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11967 return target == x;
11969 endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)];
11970 endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11972 return endregno > tregno && regno < endtregno;
11975 else if (GET_CODE (body) == PARALLEL)
11976 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11977 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11978 return 1;
11980 return 0;
11983 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11984 as appropriate. I3 and I2 are the insns resulting from the combination
11985 insns including FROM (I2 may be zero).
11987 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11988 not need REG_DEAD notes because they are being substituted for. This
11989 saves searching in the most common cases.
11991 Each note in the list is either ignored or placed on some insns, depending
11992 on the type of note. */
11994 static void
11995 distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
11996 rtx elim_i1)
11998 rtx note, next_note;
11999 rtx tem;
12001 for (note = notes; note; note = next_note)
12003 rtx place = 0, place2 = 0;
12005 /* If this NOTE references a pseudo register, ensure it references
12006 the latest copy of that register. */
12007 if (XEXP (note, 0) && REG_P (XEXP (note, 0))
12008 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12009 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12011 next_note = XEXP (note, 1);
12012 switch (REG_NOTE_KIND (note))
12014 case REG_BR_PROB:
12015 case REG_BR_PRED:
12016 /* Doesn't matter much where we put this, as long as it's somewhere.
12017 It is preferable to keep these notes on branches, which is most
12018 likely to be i3. */
12019 place = i3;
12020 break;
12022 case REG_VALUE_PROFILE:
12023 /* Just get rid of this note, as it is unused later anyway. */
12024 break;
12026 case REG_NON_LOCAL_GOTO:
12027 if (JUMP_P (i3))
12028 place = i3;
12029 else
12031 gcc_assert (i2 && JUMP_P (i2));
12032 place = i2;
12034 break;
12036 case REG_EH_REGION:
12037 /* These notes must remain with the call or trapping instruction. */
12038 if (CALL_P (i3))
12039 place = i3;
12040 else if (i2 && CALL_P (i2))
12041 place = i2;
12042 else
12044 gcc_assert (flag_non_call_exceptions);
12045 if (may_trap_p (i3))
12046 place = i3;
12047 else if (i2 && may_trap_p (i2))
12048 place = i2;
12049 /* ??? Otherwise assume we've combined things such that we
12050 can now prove that the instructions can't trap. Drop the
12051 note in this case. */
12053 break;
12055 case REG_NORETURN:
12056 case REG_SETJMP:
12057 /* These notes must remain with the call. It should not be
12058 possible for both I2 and I3 to be a call. */
12059 if (CALL_P (i3))
12060 place = i3;
12061 else
12063 gcc_assert (i2 && CALL_P (i2));
12064 place = i2;
12066 break;
12068 case REG_UNUSED:
12069 /* Any clobbers for i3 may still exist, and so we must process
12070 REG_UNUSED notes from that insn.
12072 Any clobbers from i2 or i1 can only exist if they were added by
12073 recog_for_combine. In that case, recog_for_combine created the
12074 necessary REG_UNUSED notes. Trying to keep any original
12075 REG_UNUSED notes from these insns can cause incorrect output
12076 if it is for the same register as the original i3 dest.
12077 In that case, we will notice that the register is set in i3,
12078 and then add a REG_UNUSED note for the destination of i3, which
12079 is wrong. However, it is possible to have REG_UNUSED notes from
12080 i2 or i1 for register which were both used and clobbered, so
12081 we keep notes from i2 or i1 if they will turn into REG_DEAD
12082 notes. */
12084 /* If this register is set or clobbered in I3, put the note there
12085 unless there is one already. */
12086 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12088 if (from_insn != i3)
12089 break;
12091 if (! (REG_P (XEXP (note, 0))
12092 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12093 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12094 place = i3;
12096 /* Otherwise, if this register is used by I3, then this register
12097 now dies here, so we must put a REG_DEAD note here unless there
12098 is one already. */
12099 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12100 && ! (REG_P (XEXP (note, 0))
12101 ? find_regno_note (i3, REG_DEAD,
12102 REGNO (XEXP (note, 0)))
12103 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12105 PUT_REG_NOTE_KIND (note, REG_DEAD);
12106 place = i3;
12108 break;
12110 case REG_EQUAL:
12111 case REG_EQUIV:
12112 case REG_NOALIAS:
12113 /* These notes say something about results of an insn. We can
12114 only support them if they used to be on I3 in which case they
12115 remain on I3. Otherwise they are ignored.
12117 If the note refers to an expression that is not a constant, we
12118 must also ignore the note since we cannot tell whether the
12119 equivalence is still true. It might be possible to do
12120 slightly better than this (we only have a problem if I2DEST
12121 or I1DEST is present in the expression), but it doesn't
12122 seem worth the trouble. */
12124 if (from_insn == i3
12125 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12126 place = i3;
12127 break;
12129 case REG_INC:
12130 case REG_NO_CONFLICT:
12131 /* These notes say something about how a register is used. They must
12132 be present on any use of the register in I2 or I3. */
12133 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12134 place = i3;
12136 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12138 if (place)
12139 place2 = i2;
12140 else
12141 place = i2;
12143 break;
12145 case REG_LABEL:
12146 /* This can show up in several ways -- either directly in the
12147 pattern, or hidden off in the constant pool with (or without?)
12148 a REG_EQUAL note. */
12149 /* ??? Ignore the without-reg_equal-note problem for now. */
12150 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12151 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12152 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12153 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12154 place = i3;
12156 if (i2
12157 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12158 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12159 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12160 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12162 if (place)
12163 place2 = i2;
12164 else
12165 place = i2;
12168 /* Don't attach REG_LABEL note to a JUMP_INSN. Add
12169 a JUMP_LABEL instead or decrement LABEL_NUSES. */
12170 if (place && JUMP_P (place))
12172 rtx label = JUMP_LABEL (place);
12174 if (!label)
12175 JUMP_LABEL (place) = XEXP (note, 0);
12176 else
12178 gcc_assert (label == XEXP (note, 0));
12179 if (LABEL_P (label))
12180 LABEL_NUSES (label)--;
12182 place = 0;
12184 if (place2 && JUMP_P (place2))
12186 rtx label = JUMP_LABEL (place2);
12188 if (!label)
12189 JUMP_LABEL (place2) = XEXP (note, 0);
12190 else
12192 gcc_assert (label == XEXP (note, 0));
12193 if (LABEL_P (label))
12194 LABEL_NUSES (label)--;
12196 place2 = 0;
12198 break;
12200 case REG_NONNEG:
12201 /* This note says something about the value of a register prior
12202 to the execution of an insn. It is too much trouble to see
12203 if the note is still correct in all situations. It is better
12204 to simply delete it. */
12205 break;
12207 case REG_RETVAL:
12208 /* If the insn previously containing this note still exists,
12209 put it back where it was. Otherwise move it to the previous
12210 insn. Adjust the corresponding REG_LIBCALL note. */
12211 if (!NOTE_P (from_insn))
12212 place = from_insn;
12213 else
12215 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12216 place = prev_real_insn (from_insn);
12217 if (tem && place)
12218 XEXP (tem, 0) = place;
12219 /* If we're deleting the last remaining instruction of a
12220 libcall sequence, don't add the notes. */
12221 else if (XEXP (note, 0) == from_insn)
12222 tem = place = 0;
12223 /* Don't add the dangling REG_RETVAL note. */
12224 else if (! tem)
12225 place = 0;
12227 break;
12229 case REG_LIBCALL:
12230 /* This is handled similarly to REG_RETVAL. */
12231 if (!NOTE_P (from_insn))
12232 place = from_insn;
12233 else
12235 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12236 place = next_real_insn (from_insn);
12237 if (tem && place)
12238 XEXP (tem, 0) = place;
12239 /* If we're deleting the last remaining instruction of a
12240 libcall sequence, don't add the notes. */
12241 else if (XEXP (note, 0) == from_insn)
12242 tem = place = 0;
12243 /* Don't add the dangling REG_LIBCALL note. */
12244 else if (! tem)
12245 place = 0;
12247 break;
12249 case REG_DEAD:
12250 /* If the register is used as an input in I3, it dies there.
12251 Similarly for I2, if it is nonzero and adjacent to I3.
12253 If the register is not used as an input in either I3 or I2
12254 and it is not one of the registers we were supposed to eliminate,
12255 there are two possibilities. We might have a non-adjacent I2
12256 or we might have somehow eliminated an additional register
12257 from a computation. For example, we might have had A & B where
12258 we discover that B will always be zero. In this case we will
12259 eliminate the reference to A.
12261 In both cases, we must search to see if we can find a previous
12262 use of A and put the death note there. */
12264 if (from_insn
12265 && CALL_P (from_insn)
12266 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12267 place = from_insn;
12268 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12269 place = i3;
12270 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12271 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12272 place = i2;
12274 if (place == 0
12275 && (rtx_equal_p (XEXP (note, 0), elim_i2)
12276 || rtx_equal_p (XEXP (note, 0), elim_i1)))
12277 break;
12279 if (place == 0)
12281 basic_block bb = this_basic_block;
12283 /* You might think you could search back from FROM_INSN
12284 rather than from I3, but combine tries to split invalid
12285 combined instructions. This can result in the old I2
12286 or I1 moving later in the insn sequence. */
12287 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12289 if (! INSN_P (tem))
12291 if (tem == BB_HEAD (bb))
12292 break;
12293 continue;
12296 /* If the register is being set at TEM, see if that is all
12297 TEM is doing. If so, delete TEM. Otherwise, make this
12298 into a REG_UNUSED note instead. Don't delete sets to
12299 global register vars. */
12300 if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12301 || !global_regs[REGNO (XEXP (note, 0))])
12302 && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12304 rtx set = single_set (tem);
12305 rtx inner_dest = 0;
12306 #ifdef HAVE_cc0
12307 rtx cc0_setter = NULL_RTX;
12308 #endif
12310 if (set != 0)
12311 for (inner_dest = SET_DEST (set);
12312 (GET_CODE (inner_dest) == STRICT_LOW_PART
12313 || GET_CODE (inner_dest) == SUBREG
12314 || GET_CODE (inner_dest) == ZERO_EXTRACT);
12315 inner_dest = XEXP (inner_dest, 0))
12318 /* Verify that it was the set, and not a clobber that
12319 modified the register.
12321 CC0 targets must be careful to maintain setter/user
12322 pairs. If we cannot delete the setter due to side
12323 effects, mark the user with an UNUSED note instead
12324 of deleting it. */
12326 if (set != 0 && ! side_effects_p (SET_SRC (set))
12327 && rtx_equal_p (XEXP (note, 0), inner_dest)
12328 #ifdef HAVE_cc0
12329 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12330 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12331 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12332 #endif
12335 /* Move the notes and links of TEM elsewhere.
12336 This might delete other dead insns recursively.
12337 First set the pattern to something that won't use
12338 any register. */
12339 rtx old_notes = REG_NOTES (tem);
12341 PATTERN (tem) = pc_rtx;
12342 REG_NOTES (tem) = NULL;
12344 distribute_notes (old_notes, tem, tem, NULL_RTX,
12345 NULL_RTX, NULL_RTX);
12346 distribute_links (LOG_LINKS (tem));
12348 SET_INSN_DELETED (tem);
12350 #ifdef HAVE_cc0
12351 /* Delete the setter too. */
12352 if (cc0_setter)
12354 PATTERN (cc0_setter) = pc_rtx;
12355 old_notes = REG_NOTES (cc0_setter);
12356 REG_NOTES (cc0_setter) = NULL;
12358 distribute_notes (old_notes, cc0_setter,
12359 cc0_setter, NULL_RTX,
12360 NULL_RTX, NULL_RTX);
12361 distribute_links (LOG_LINKS (cc0_setter));
12363 SET_INSN_DELETED (cc0_setter);
12365 #endif
12367 else
12369 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12371 /* If there isn't already a REG_UNUSED note, put one
12372 here. Do not place a REG_DEAD note, even if
12373 the register is also used here; that would not
12374 match the algorithm used in lifetime analysis
12375 and can cause the consistency check in the
12376 scheduler to fail. */
12377 if (! find_regno_note (tem, REG_UNUSED,
12378 REGNO (XEXP (note, 0))))
12379 place = tem;
12380 break;
12383 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12384 || (CALL_P (tem)
12385 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12387 /* This may not be the correct place for the death
12388 note if FROM_INSN is before TEM, and the reg is
12389 set between FROM_INSN and TEM. The reg might
12390 die two or more times. An existing death note
12391 means we are looking at the wrong live range. */
12392 if (from_insn
12393 && INSN_CUID (from_insn) < INSN_CUID (tem)
12394 && find_regno_note (tem, REG_DEAD,
12395 REGNO (XEXP (note, 0))))
12397 tem = from_insn;
12398 if (tem == BB_HEAD (bb))
12399 break;
12400 continue;
12403 place = tem;
12405 /* If we are doing a 3->2 combination, and we have a
12406 register which formerly died in i3 and was not used
12407 by i2, which now no longer dies in i3 and is used in
12408 i2 but does not die in i2, and place is between i2
12409 and i3, then we may need to move a link from place to
12410 i2. */
12411 if (i2 && INSN_UID (place) <= max_uid_cuid
12412 && INSN_CUID (place) > INSN_CUID (i2)
12413 && from_insn
12414 && INSN_CUID (from_insn) > INSN_CUID (i2)
12415 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12417 rtx links = LOG_LINKS (place);
12418 LOG_LINKS (place) = 0;
12419 distribute_links (links);
12421 break;
12424 if (tem == BB_HEAD (bb))
12425 break;
12428 /* We haven't found an insn for the death note and it
12429 is still a REG_DEAD note, but we have hit the beginning
12430 of the block. If the existing life info says the reg
12431 was dead, there's nothing left to do. Otherwise, we'll
12432 need to do a global life update after combine. */
12433 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12434 && REGNO_REG_SET_P (bb->il.rtl->global_live_at_start,
12435 REGNO (XEXP (note, 0))))
12436 SET_BIT (refresh_blocks, this_basic_block->index);
12439 /* If the register is set or already dead at PLACE, we needn't do
12440 anything with this note if it is still a REG_DEAD note.
12441 We check here if it is set at all, not if is it totally replaced,
12442 which is what `dead_or_set_p' checks, so also check for it being
12443 set partially. */
12445 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12447 unsigned int regno = REGNO (XEXP (note, 0));
12449 /* Similarly, if the instruction on which we want to place
12450 the note is a noop, we'll need do a global live update
12451 after we remove them in delete_noop_moves. */
12452 if (noop_move_p (place))
12453 SET_BIT (refresh_blocks, this_basic_block->index);
12455 if (dead_or_set_p (place, XEXP (note, 0))
12456 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12458 /* Unless the register previously died in PLACE, clear
12459 last_death. [I no longer understand why this is
12460 being done.] */
12461 if (reg_stat[regno].last_death != place)
12462 reg_stat[regno].last_death = 0;
12463 place = 0;
12465 else
12466 reg_stat[regno].last_death = place;
12468 /* If this is a death note for a hard reg that is occupying
12469 multiple registers, ensure that we are still using all
12470 parts of the object. If we find a piece of the object
12471 that is unused, we must arrange for an appropriate REG_DEAD
12472 note to be added for it. However, we can't just emit a USE
12473 and tag the note to it, since the register might actually
12474 be dead; so we recourse, and the recursive call then finds
12475 the previous insn that used this register. */
12477 if (place && regno < FIRST_PSEUDO_REGISTER
12478 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
12480 unsigned int endregno
12481 = regno + hard_regno_nregs[regno]
12482 [GET_MODE (XEXP (note, 0))];
12483 int all_used = 1;
12484 unsigned int i;
12486 for (i = regno; i < endregno; i++)
12487 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12488 && ! find_regno_fusage (place, USE, i))
12489 || dead_or_set_regno_p (place, i))
12490 all_used = 0;
12492 if (! all_used)
12494 /* Put only REG_DEAD notes for pieces that are
12495 not already dead or set. */
12497 for (i = regno; i < endregno;
12498 i += hard_regno_nregs[i][reg_raw_mode[i]])
12500 rtx piece = regno_reg_rtx[i];
12501 basic_block bb = this_basic_block;
12503 if (! dead_or_set_p (place, piece)
12504 && ! reg_bitfield_target_p (piece,
12505 PATTERN (place)))
12507 rtx new_note
12508 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12510 distribute_notes (new_note, place, place,
12511 NULL_RTX, NULL_RTX, NULL_RTX);
12513 else if (! refers_to_regno_p (i, i + 1,
12514 PATTERN (place), 0)
12515 && ! find_regno_fusage (place, USE, i))
12516 for (tem = PREV_INSN (place); ;
12517 tem = PREV_INSN (tem))
12519 if (! INSN_P (tem))
12521 if (tem == BB_HEAD (bb))
12523 SET_BIT (refresh_blocks,
12524 this_basic_block->index);
12525 break;
12527 continue;
12529 if (dead_or_set_p (tem, piece)
12530 || reg_bitfield_target_p (piece,
12531 PATTERN (tem)))
12533 REG_NOTES (tem)
12534 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12535 REG_NOTES (tem));
12536 break;
12542 place = 0;
12546 break;
12548 default:
12549 /* Any other notes should not be present at this point in the
12550 compilation. */
12551 gcc_unreachable ();
12554 if (place)
12556 XEXP (note, 1) = REG_NOTES (place);
12557 REG_NOTES (place) = note;
12559 else if ((REG_NOTE_KIND (note) == REG_DEAD
12560 || REG_NOTE_KIND (note) == REG_UNUSED)
12561 && REG_P (XEXP (note, 0)))
12562 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12564 if (place2)
12566 if ((REG_NOTE_KIND (note) == REG_DEAD
12567 || REG_NOTE_KIND (note) == REG_UNUSED)
12568 && REG_P (XEXP (note, 0)))
12569 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12571 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12572 REG_NOTE_KIND (note),
12573 XEXP (note, 0),
12574 REG_NOTES (place2));
12579 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12580 I3, I2, and I1 to new locations. This is also called to add a link
12581 pointing at I3 when I3's destination is changed. */
12583 static void
12584 distribute_links (rtx links)
12586 rtx link, next_link;
12588 for (link = links; link; link = next_link)
12590 rtx place = 0;
12591 rtx insn;
12592 rtx set, reg;
12594 next_link = XEXP (link, 1);
12596 /* If the insn that this link points to is a NOTE or isn't a single
12597 set, ignore it. In the latter case, it isn't clear what we
12598 can do other than ignore the link, since we can't tell which
12599 register it was for. Such links wouldn't be used by combine
12600 anyway.
12602 It is not possible for the destination of the target of the link to
12603 have been changed by combine. The only potential of this is if we
12604 replace I3, I2, and I1 by I3 and I2. But in that case the
12605 destination of I2 also remains unchanged. */
12607 if (NOTE_P (XEXP (link, 0))
12608 || (set = single_set (XEXP (link, 0))) == 0)
12609 continue;
12611 reg = SET_DEST (set);
12612 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12613 || GET_CODE (reg) == STRICT_LOW_PART)
12614 reg = XEXP (reg, 0);
12616 /* A LOG_LINK is defined as being placed on the first insn that uses
12617 a register and points to the insn that sets the register. Start
12618 searching at the next insn after the target of the link and stop
12619 when we reach a set of the register or the end of the basic block.
12621 Note that this correctly handles the link that used to point from
12622 I3 to I2. Also note that not much searching is typically done here
12623 since most links don't point very far away. */
12625 for (insn = NEXT_INSN (XEXP (link, 0));
12626 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12627 || BB_HEAD (this_basic_block->next_bb) != insn));
12628 insn = NEXT_INSN (insn))
12629 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12631 if (reg_referenced_p (reg, PATTERN (insn)))
12632 place = insn;
12633 break;
12635 else if (CALL_P (insn)
12636 && find_reg_fusage (insn, USE, reg))
12638 place = insn;
12639 break;
12641 else if (INSN_P (insn) && reg_set_p (reg, insn))
12642 break;
12644 /* If we found a place to put the link, place it there unless there
12645 is already a link to the same insn as LINK at that point. */
12647 if (place)
12649 rtx link2;
12651 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12652 if (XEXP (link2, 0) == XEXP (link, 0))
12653 break;
12655 if (link2 == 0)
12657 XEXP (link, 1) = LOG_LINKS (place);
12658 LOG_LINKS (place) = link;
12660 /* Set added_links_insn to the earliest insn we added a
12661 link to. */
12662 if (added_links_insn == 0
12663 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12664 added_links_insn = place;
12670 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12671 Check whether the expression pointer to by LOC is a register or
12672 memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12673 Otherwise return zero. */
12675 static int
12676 unmentioned_reg_p_1 (rtx *loc, void *expr)
12678 rtx x = *loc;
12680 if (x != NULL_RTX
12681 && (REG_P (x) || MEM_P (x))
12682 && ! reg_mentioned_p (x, (rtx) expr))
12683 return 1;
12684 return 0;
12687 /* Check for any register or memory mentioned in EQUIV that is not
12688 mentioned in EXPR. This is used to restrict EQUIV to "specializations"
12689 of EXPR where some registers may have been replaced by constants. */
12691 static bool
12692 unmentioned_reg_p (rtx equiv, rtx expr)
12694 return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
12697 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
12699 static int
12700 insn_cuid (rtx insn)
12702 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12703 && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
12704 insn = NEXT_INSN (insn);
12706 gcc_assert (INSN_UID (insn) <= max_uid_cuid);
12708 return INSN_CUID (insn);
12711 void
12712 dump_combine_stats (FILE *file)
12714 fprintf
12715 (file,
12716 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12717 combine_attempts, combine_merges, combine_extras, combine_successes);
12720 void
12721 dump_combine_total_stats (FILE *file)
12723 fprintf
12724 (file,
12725 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12726 total_attempts, total_merges, total_extras, total_successes);
12730 static bool
12731 gate_handle_combine (void)
12733 return (optimize > 0);
12736 /* Try combining insns through substitution. */
12737 static void
12738 rest_of_handle_combine (void)
12740 int rebuild_jump_labels_after_combine
12741 = combine_instructions (get_insns (), max_reg_num ());
12743 /* Combining insns may have turned an indirect jump into a
12744 direct jump. Rebuild the JUMP_LABEL fields of jumping
12745 instructions. */
12746 if (rebuild_jump_labels_after_combine)
12748 timevar_push (TV_JUMP);
12749 rebuild_jump_labels (get_insns ());
12750 timevar_pop (TV_JUMP);
12752 delete_dead_jumptables ();
12753 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
12757 struct tree_opt_pass pass_combine =
12759 "combine", /* name */
12760 gate_handle_combine, /* gate */
12761 rest_of_handle_combine, /* execute */
12762 NULL, /* sub */
12763 NULL, /* next */
12764 0, /* static_pass_number */
12765 TV_COMBINE, /* tv_id */
12766 0, /* properties_required */
12767 0, /* properties_provided */
12768 0, /* properties_destroyed */
12769 0, /* todo_flags_start */
12770 TODO_dump_func |
12771 TODO_ggc_collect, /* todo_flags_finish */
12772 'c' /* letter */