* config/arm/arm.h (REG_CLASS_NAMES): Add missing comma.
[official-gcc.git] / gcc / combine.c
blob14cd64b1e697351fd98a726f0fe0981b0711fee5
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
55 - reg_live_length is not updated
56 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
57 removed because there is no way to know which register it was
58 linking
60 To simplify substitution, we combine only when the earlier insn(s)
61 consist of only a single assignment. To simplify updating afterward,
62 we never combine when a subroutine call appears in the middle.
64 Since we do not represent assignments to CC0 explicitly except when that
65 is all an insn does, there is no LOG_LINKS entry in an insn that uses
66 the condition code for the insn that set the condition code.
67 Fortunately, these two insns must be consecutive.
68 Therefore, every JUMP_INSN is taken to have an implicit logical link
69 to the preceding insn. This is not quite right, since non-jumps can
70 also use the condition code; but in practice such insns would not
71 combine anyway. */
73 #include "config.h"
74 #include "system.h"
75 #include "coretypes.h"
76 #include "tm.h"
77 #include "rtl.h"
78 #include "tree.h"
79 #include "tm_p.h"
80 #include "flags.h"
81 #include "regs.h"
82 #include "hard-reg-set.h"
83 #include "basic-block.h"
84 #include "insn-config.h"
85 #include "function.h"
86 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
87 #include "expr.h"
88 #include "insn-attr.h"
89 #include "recog.h"
90 #include "real.h"
91 #include "toplev.h"
92 #include "target.h"
94 #ifndef SHIFT_COUNT_TRUNCATED
95 #define SHIFT_COUNT_TRUNCATED 0
96 #endif
98 /* Number of attempts to combine instructions in this function. */
100 static int combine_attempts;
102 /* Number of attempts that got as far as substitution in this function. */
104 static int combine_merges;
106 /* Number of instructions combined with added SETs in this function. */
108 static int combine_extras;
110 /* Number of instructions combined in this function. */
112 static int combine_successes;
114 /* Totals over entire compilation. */
116 static int total_attempts, total_merges, total_extras, total_successes;
119 /* Vector mapping INSN_UIDs to cuids.
120 The cuids are like uids but increase monotonically always.
121 Combine always uses cuids so that it can compare them.
122 But actually renumbering the uids, which we used to do,
123 proves to be a bad idea because it makes it hard to compare
124 the dumps produced by earlier passes with those from later passes. */
126 static int *uid_cuid;
127 static int max_uid_cuid;
129 /* Get the cuid of an insn. */
131 #define INSN_CUID(INSN) \
132 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
134 /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135 BITS_PER_WORD would invoke undefined behavior. Work around it. */
137 #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138 (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
140 #define nonzero_bits(X, M) \
141 cached_nonzero_bits (X, M, NULL_RTX, VOIDmode, 0)
143 #define num_sign_bit_copies(X, M) \
144 cached_num_sign_bit_copies (X, M, NULL_RTX, VOIDmode, 0)
146 /* Maximum register number, which is the size of the tables below. */
148 static unsigned int combine_max_regno;
150 /* Record last point of death of (hard or pseudo) register n. */
152 static rtx *reg_last_death;
154 /* Record last point of modification of (hard or pseudo) register n. */
156 static rtx *reg_last_set;
158 /* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
161 static int mem_last_set;
163 /* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
166 static int last_call_cuid;
168 /* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
174 static rtx subst_insn;
176 /* This is the lowest CUID that `subst' is currently dealing with.
177 get_last_value will not return a value if the register was set at or
178 after this CUID. If not for this mechanism, we could get confused if
179 I2 or I1 in try_combine were an insn that used the old value of a register
180 to obtain a new value. In that case, we might erroneously get the
181 new value of the register when we wanted the old one. */
183 static int subst_low_cuid;
185 /* This contains any hard registers that are used in newpat; reg_dead_at_p
186 must consider all these registers to be always live. */
188 static HARD_REG_SET newpat_used_regs;
190 /* This is an insn to which a LOG_LINKS entry has been added. If this
191 insn is the earlier than I2 or I3, combine should rescan starting at
192 that location. */
194 static rtx added_links_insn;
196 /* Basic block in which we are performing combines. */
197 static basic_block this_basic_block;
199 /* A bitmap indicating which blocks had registers go dead at entry.
200 After combine, we'll need to re-do global life analysis with
201 those blocks as starting points. */
202 static sbitmap refresh_blocks;
204 /* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if an
206 operation being processed is redundant given a prior operation performed
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
217 Therefore, we maintain the following arrays:
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to nonzero when it is not valid
225 to use the value of this register in some
226 register's value
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
233 Entry I in reg_last_set_value is valid if it is nonzero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
245 reg_last_set_invalid[i] is set nonzero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
248 /* Record last value assigned to (hard or pseudo) register n. */
250 static rtx *reg_last_set_value;
252 /* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
255 static int *reg_last_set_label;
257 /* Record the value of label_tick when an expression involving register n
258 is placed in reg_last_set_value. */
260 static int *reg_last_set_table_tick;
262 /* Set nonzero if references to register n in expressions should not be
263 used. */
265 static char *reg_last_set_invalid;
267 /* Incremented for each label. */
269 static int label_tick;
271 /* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
276 We record in the following array what we know about the nonzero
277 bits of a register, specifically which bits are known to be zero.
279 If an entry is zero, it means that we don't know anything special. */
281 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
283 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
286 static enum machine_mode nonzero_bits_mode;
288 /* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
291 static unsigned char *reg_sign_bit_copies;
293 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
298 static int nonzero_sign_valid;
300 /* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
305 static enum machine_mode *reg_last_set_mode;
306 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307 static char *reg_last_set_sign_bit_copies;
309 /* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
313 struct undo
315 struct undo *next;
316 int is_int;
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
321 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
324 other_insn is nonzero if we have modified some other insn in the process
325 of working on subst_insn. It must be verified too. */
327 struct undobuf
329 struct undo *undos;
330 struct undo *frees;
331 rtx other_insn;
334 static struct undobuf undobuf;
336 /* Number of times the pseudo being substituted for
337 was found and replaced. */
339 static int n_occurrences;
341 static void do_SUBST (rtx *, rtx);
342 static void do_SUBST_INT (int *, int);
343 static void init_reg_last_arrays (void);
344 static void setup_incoming_promotions (void);
345 static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
346 static int cant_combine_insn_p (rtx);
347 static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
348 static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
349 static int contains_muldiv (rtx);
350 static rtx try_combine (rtx, rtx, rtx, int *);
351 static void undo_all (void);
352 static void undo_commit (void);
353 static rtx *find_split_point (rtx *, rtx);
354 static rtx subst (rtx, rtx, rtx, int, int);
355 static rtx combine_simplify_rtx (rtx, enum machine_mode, int, int);
356 static rtx simplify_if_then_else (rtx);
357 static rtx simplify_set (rtx);
358 static rtx simplify_logical (rtx, int);
359 static rtx expand_compound_operation (rtx);
360 static rtx expand_field_assignment (rtx);
361 static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
362 rtx, unsigned HOST_WIDE_INT, int, int, int);
363 static rtx extract_left_shift (rtx, int);
364 static rtx make_compound_operation (rtx, enum rtx_code);
365 static int get_pos_from_mask (unsigned HOST_WIDE_INT,
366 unsigned HOST_WIDE_INT *);
367 static rtx force_to_mode (rtx, enum machine_mode,
368 unsigned HOST_WIDE_INT, rtx, int);
369 static rtx if_then_else_cond (rtx, rtx *, rtx *);
370 static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
371 static int rtx_equal_for_field_assignment_p (rtx, rtx);
372 static rtx make_field_assignment (rtx);
373 static rtx apply_distributive_law (rtx);
374 static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
375 unsigned HOST_WIDE_INT);
376 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
377 rtx, enum machine_mode,
378 unsigned HOST_WIDE_INT);
379 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
380 enum machine_mode,
381 unsigned HOST_WIDE_INT);
382 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
383 enum machine_mode,
384 unsigned int);
385 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
386 enum machine_mode, unsigned int);
387 static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
388 HOST_WIDE_INT, enum machine_mode, int *);
389 static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
390 int);
391 static int recog_for_combine (rtx *, rtx, rtx *);
392 static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
393 static rtx gen_binary (enum rtx_code, enum machine_mode, rtx, rtx);
394 static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
395 static void update_table_tick (rtx);
396 static void record_value_for_reg (rtx, rtx, rtx);
397 static void check_promoted_subreg (rtx, rtx);
398 static void record_dead_and_set_regs_1 (rtx, rtx, void *);
399 static void record_dead_and_set_regs (rtx);
400 static int get_last_value_validate (rtx *, rtx, int, int);
401 static rtx get_last_value (rtx);
402 static int use_crosses_set_p (rtx, int);
403 static void reg_dead_at_p_1 (rtx, rtx, void *);
404 static int reg_dead_at_p (rtx, rtx);
405 static void move_deaths (rtx, rtx, int, rtx, rtx *);
406 static int reg_bitfield_target_p (rtx, rtx);
407 static void distribute_notes (rtx, rtx, rtx, rtx);
408 static void distribute_links (rtx);
409 static void mark_used_regs_combine (rtx);
410 static int insn_cuid (rtx);
411 static void record_promoted_value (rtx, rtx);
412 static rtx reversed_comparison (rtx, enum machine_mode, rtx, rtx);
413 static enum rtx_code combine_reversed_comparison_code (rtx);
415 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
416 insn. The substitution can be undone by undo_all. If INTO is already
417 set to NEWVAL, do not record this change. Because computing NEWVAL might
418 also call SUBST, we have to compute it before we put anything into
419 the undo table. */
421 static void
422 do_SUBST (rtx *into, rtx newval)
424 struct undo *buf;
425 rtx oldval = *into;
427 if (oldval == newval)
428 return;
430 /* We'd like to catch as many invalid transformations here as
431 possible. Unfortunately, there are way too many mode changes
432 that are perfectly valid, so we'd waste too much effort for
433 little gain doing the checks here. Focus on catching invalid
434 transformations involving integer constants. */
435 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
436 && GET_CODE (newval) == CONST_INT)
438 /* Sanity check that we're replacing oldval with a CONST_INT
439 that is a valid sign-extension for the original mode. */
440 if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
441 GET_MODE (oldval)))
442 abort ();
444 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
445 CONST_INT is not valid, because after the replacement, the
446 original mode would be gone. Unfortunately, we can't tell
447 when do_SUBST is called to replace the operand thereof, so we
448 perform this test on oldval instead, checking whether an
449 invalid replacement took place before we got here. */
450 if ((GET_CODE (oldval) == SUBREG
451 && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
452 || (GET_CODE (oldval) == ZERO_EXTEND
453 && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
454 abort ();
457 if (undobuf.frees)
458 buf = undobuf.frees, undobuf.frees = buf->next;
459 else
460 buf = xmalloc (sizeof (struct undo));
462 buf->is_int = 0;
463 buf->where.r = into;
464 buf->old_contents.r = oldval;
465 *into = newval;
467 buf->next = undobuf.undos, undobuf.undos = buf;
470 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
472 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
473 for the value of a HOST_WIDE_INT value (including CONST_INT) is
474 not safe. */
476 static void
477 do_SUBST_INT (int *into, int newval)
479 struct undo *buf;
480 int oldval = *into;
482 if (oldval == newval)
483 return;
485 if (undobuf.frees)
486 buf = undobuf.frees, undobuf.frees = buf->next;
487 else
488 buf = xmalloc (sizeof (struct undo));
490 buf->is_int = 1;
491 buf->where.i = into;
492 buf->old_contents.i = oldval;
493 *into = newval;
495 buf->next = undobuf.undos, undobuf.undos = buf;
498 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
500 /* Main entry point for combiner. F is the first insn of the function.
501 NREGS is the first unused pseudo-reg number.
503 Return nonzero if the combiner has turned an indirect jump
504 instruction into a direct jump. */
506 combine_instructions (rtx f, unsigned int nregs)
508 rtx insn, next;
509 #ifdef HAVE_cc0
510 rtx prev;
511 #endif
512 int i;
513 rtx links, nextlinks;
515 int new_direct_jump_p = 0;
517 combine_attempts = 0;
518 combine_merges = 0;
519 combine_extras = 0;
520 combine_successes = 0;
522 combine_max_regno = nregs;
524 /* It is not safe to use ordinary gen_lowpart in combine.
525 See comments in gen_lowpart_for_combine. */
526 gen_lowpart = gen_lowpart_for_combine;
528 reg_nonzero_bits = xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT));
529 reg_sign_bit_copies = xcalloc (nregs, sizeof (unsigned char));
531 reg_last_death = xmalloc (nregs * sizeof (rtx));
532 reg_last_set = xmalloc (nregs * sizeof (rtx));
533 reg_last_set_value = xmalloc (nregs * sizeof (rtx));
534 reg_last_set_table_tick = xmalloc (nregs * sizeof (int));
535 reg_last_set_label = xmalloc (nregs * sizeof (int));
536 reg_last_set_invalid = xmalloc (nregs * sizeof (char));
537 reg_last_set_mode = xmalloc (nregs * sizeof (enum machine_mode));
538 reg_last_set_nonzero_bits = xmalloc (nregs * sizeof (HOST_WIDE_INT));
539 reg_last_set_sign_bit_copies = xmalloc (nregs * sizeof (char));
541 init_reg_last_arrays ();
543 init_recog_no_volatile ();
545 /* Compute maximum uid value so uid_cuid can be allocated. */
547 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
548 if (INSN_UID (insn) > i)
549 i = INSN_UID (insn);
551 uid_cuid = xmalloc ((i + 1) * sizeof (int));
552 max_uid_cuid = i;
554 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
556 /* Don't use reg_nonzero_bits when computing it. This can cause problems
557 when, for example, we have j <<= 1 in a loop. */
559 nonzero_sign_valid = 0;
561 /* Compute the mapping from uids to cuids.
562 Cuids are numbers assigned to insns, like uids,
563 except that cuids increase monotonically through the code.
565 Scan all SETs and see if we can deduce anything about what
566 bits are known to be zero for some registers and how many copies
567 of the sign bit are known to exist for those registers.
569 Also set any known values so that we can use it while searching
570 for what bits are known to be set. */
572 label_tick = 1;
574 setup_incoming_promotions ();
576 refresh_blocks = sbitmap_alloc (last_basic_block);
577 sbitmap_zero (refresh_blocks);
579 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
581 uid_cuid[INSN_UID (insn)] = ++i;
582 subst_low_cuid = i;
583 subst_insn = insn;
585 if (INSN_P (insn))
587 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
588 NULL);
589 record_dead_and_set_regs (insn);
591 #ifdef AUTO_INC_DEC
592 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
593 if (REG_NOTE_KIND (links) == REG_INC)
594 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
595 NULL);
596 #endif
599 if (GET_CODE (insn) == CODE_LABEL)
600 label_tick++;
603 nonzero_sign_valid = 1;
605 /* Now scan all the insns in forward order. */
607 label_tick = 1;
608 last_call_cuid = 0;
609 mem_last_set = 0;
610 init_reg_last_arrays ();
611 setup_incoming_promotions ();
613 FOR_EACH_BB (this_basic_block)
615 for (insn = BB_HEAD (this_basic_block);
616 insn != NEXT_INSN (BB_END (this_basic_block));
617 insn = next ? next : NEXT_INSN (insn))
619 next = 0;
621 if (GET_CODE (insn) == CODE_LABEL)
622 label_tick++;
624 else if (INSN_P (insn))
626 /* See if we know about function return values before this
627 insn based upon SUBREG flags. */
628 check_promoted_subreg (insn, PATTERN (insn));
630 /* Try this insn with each insn it links back to. */
632 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
633 if ((next = try_combine (insn, XEXP (links, 0),
634 NULL_RTX, &new_direct_jump_p)) != 0)
635 goto retry;
637 /* Try each sequence of three linked insns ending with this one. */
639 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
641 rtx link = XEXP (links, 0);
643 /* If the linked insn has been replaced by a note, then there
644 is no point in pursuing this chain any further. */
645 if (GET_CODE (link) == NOTE)
646 continue;
648 for (nextlinks = LOG_LINKS (link);
649 nextlinks;
650 nextlinks = XEXP (nextlinks, 1))
651 if ((next = try_combine (insn, link,
652 XEXP (nextlinks, 0),
653 &new_direct_jump_p)) != 0)
654 goto retry;
657 #ifdef HAVE_cc0
658 /* Try to combine a jump insn that uses CC0
659 with a preceding insn that sets CC0, and maybe with its
660 logical predecessor as well.
661 This is how we make decrement-and-branch insns.
662 We need this special code because data flow connections
663 via CC0 do not get entered in LOG_LINKS. */
665 if (GET_CODE (insn) == JUMP_INSN
666 && (prev = prev_nonnote_insn (insn)) != 0
667 && GET_CODE (prev) == INSN
668 && sets_cc0_p (PATTERN (prev)))
670 if ((next = try_combine (insn, prev,
671 NULL_RTX, &new_direct_jump_p)) != 0)
672 goto retry;
674 for (nextlinks = LOG_LINKS (prev); nextlinks;
675 nextlinks = XEXP (nextlinks, 1))
676 if ((next = try_combine (insn, prev,
677 XEXP (nextlinks, 0),
678 &new_direct_jump_p)) != 0)
679 goto retry;
682 /* Do the same for an insn that explicitly references CC0. */
683 if (GET_CODE (insn) == INSN
684 && (prev = prev_nonnote_insn (insn)) != 0
685 && GET_CODE (prev) == INSN
686 && sets_cc0_p (PATTERN (prev))
687 && GET_CODE (PATTERN (insn)) == SET
688 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
690 if ((next = try_combine (insn, prev,
691 NULL_RTX, &new_direct_jump_p)) != 0)
692 goto retry;
694 for (nextlinks = LOG_LINKS (prev); nextlinks;
695 nextlinks = XEXP (nextlinks, 1))
696 if ((next = try_combine (insn, prev,
697 XEXP (nextlinks, 0),
698 &new_direct_jump_p)) != 0)
699 goto retry;
702 /* Finally, see if any of the insns that this insn links to
703 explicitly references CC0. If so, try this insn, that insn,
704 and its predecessor if it sets CC0. */
705 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
706 if (GET_CODE (XEXP (links, 0)) == INSN
707 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
708 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
709 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
710 && GET_CODE (prev) == INSN
711 && sets_cc0_p (PATTERN (prev))
712 && (next = try_combine (insn, XEXP (links, 0),
713 prev, &new_direct_jump_p)) != 0)
714 goto retry;
715 #endif
717 /* Try combining an insn with two different insns whose results it
718 uses. */
719 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
720 for (nextlinks = XEXP (links, 1); nextlinks;
721 nextlinks = XEXP (nextlinks, 1))
722 if ((next = try_combine (insn, XEXP (links, 0),
723 XEXP (nextlinks, 0),
724 &new_direct_jump_p)) != 0)
725 goto retry;
727 if (GET_CODE (insn) != NOTE)
728 record_dead_and_set_regs (insn);
730 retry:
735 clear_bb_flags ();
737 EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i,
738 BASIC_BLOCK (i)->flags |= BB_DIRTY);
739 new_direct_jump_p |= purge_all_dead_edges (0);
740 delete_noop_moves (f);
742 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
743 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
744 | PROP_KILL_DEAD_CODE);
746 /* Clean up. */
747 sbitmap_free (refresh_blocks);
748 free (reg_nonzero_bits);
749 free (reg_sign_bit_copies);
750 free (reg_last_death);
751 free (reg_last_set);
752 free (reg_last_set_value);
753 free (reg_last_set_table_tick);
754 free (reg_last_set_label);
755 free (reg_last_set_invalid);
756 free (reg_last_set_mode);
757 free (reg_last_set_nonzero_bits);
758 free (reg_last_set_sign_bit_copies);
759 free (uid_cuid);
762 struct undo *undo, *next;
763 for (undo = undobuf.frees; undo; undo = next)
765 next = undo->next;
766 free (undo);
768 undobuf.frees = 0;
771 total_attempts += combine_attempts;
772 total_merges += combine_merges;
773 total_extras += combine_extras;
774 total_successes += combine_successes;
776 nonzero_sign_valid = 0;
777 gen_lowpart = gen_lowpart_general;
779 /* Make recognizer allow volatile MEMs again. */
780 init_recog ();
782 return new_direct_jump_p;
785 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
787 static void
788 init_reg_last_arrays (void)
790 unsigned int nregs = combine_max_regno;
792 memset (reg_last_death, 0, nregs * sizeof (rtx));
793 memset (reg_last_set, 0, nregs * sizeof (rtx));
794 memset (reg_last_set_value, 0, nregs * sizeof (rtx));
795 memset (reg_last_set_table_tick, 0, nregs * sizeof (int));
796 memset (reg_last_set_label, 0, nregs * sizeof (int));
797 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
798 memset (reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
799 memset (reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
800 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
803 /* Set up any promoted values for incoming argument registers. */
805 static void
806 setup_incoming_promotions (void)
808 unsigned int regno;
809 rtx reg;
810 enum machine_mode mode;
811 int unsignedp;
812 rtx first = get_insns ();
814 if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
816 #ifndef OUTGOING_REGNO
817 #define OUTGOING_REGNO(N) N
818 #endif
819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
820 /* Check whether this register can hold an incoming pointer
821 argument. FUNCTION_ARG_REGNO_P tests outgoing register
822 numbers, so translate if necessary due to register windows. */
823 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
824 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
826 record_value_for_reg
827 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
828 : SIGN_EXTEND),
829 GET_MODE (reg),
830 gen_rtx_CLOBBER (mode, const0_rtx)));
835 /* Called via note_stores. If X is a pseudo that is narrower than
836 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
838 If we are setting only a portion of X and we can't figure out what
839 portion, assume all bits will be used since we don't know what will
840 be happening.
842 Similarly, set how many bits of X are known to be copies of the sign bit
843 at all locations in the function. This is the smallest number implied
844 by any set of X. */
846 static void
847 set_nonzero_bits_and_sign_copies (rtx x, rtx set,
848 void *data ATTRIBUTE_UNUSED)
850 unsigned int num;
852 if (GET_CODE (x) == REG
853 && REGNO (x) >= FIRST_PSEUDO_REGISTER
854 /* If this register is undefined at the start of the file, we can't
855 say what its contents were. */
856 && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x))
857 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
859 if (set == 0 || GET_CODE (set) == CLOBBER)
861 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
862 reg_sign_bit_copies[REGNO (x)] = 1;
863 return;
866 /* If this is a complex assignment, see if we can convert it into a
867 simple assignment. */
868 set = expand_field_assignment (set);
870 /* If this is a simple assignment, or we have a paradoxical SUBREG,
871 set what we know about X. */
873 if (SET_DEST (set) == x
874 || (GET_CODE (SET_DEST (set)) == SUBREG
875 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
876 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
877 && SUBREG_REG (SET_DEST (set)) == x))
879 rtx src = SET_SRC (set);
881 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
882 /* If X is narrower than a word and SRC is a non-negative
883 constant that would appear negative in the mode of X,
884 sign-extend it for use in reg_nonzero_bits because some
885 machines (maybe most) will actually do the sign-extension
886 and this is the conservative approach.
888 ??? For 2.5, try to tighten up the MD files in this regard
889 instead of this kludge. */
891 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
892 && GET_CODE (src) == CONST_INT
893 && INTVAL (src) > 0
894 && 0 != (INTVAL (src)
895 & ((HOST_WIDE_INT) 1
896 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
897 src = GEN_INT (INTVAL (src)
898 | ((HOST_WIDE_INT) (-1)
899 << GET_MODE_BITSIZE (GET_MODE (x))));
900 #endif
902 /* Don't call nonzero_bits if it cannot change anything. */
903 if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
904 reg_nonzero_bits[REGNO (x)]
905 |= nonzero_bits (src, nonzero_bits_mode);
906 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
907 if (reg_sign_bit_copies[REGNO (x)] == 0
908 || reg_sign_bit_copies[REGNO (x)] > num)
909 reg_sign_bit_copies[REGNO (x)] = num;
911 else
913 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
914 reg_sign_bit_copies[REGNO (x)] = 1;
919 /* See if INSN can be combined into I3. PRED and SUCC are optionally
920 insns that were previously combined into I3 or that will be combined
921 into the merger of INSN and I3.
923 Return 0 if the combination is not allowed for any reason.
925 If the combination is allowed, *PDEST will be set to the single
926 destination of INSN and *PSRC to the single source, and this function
927 will return 1. */
929 static int
930 can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
931 rtx *pdest, rtx *psrc)
933 int i;
934 rtx set = 0, src, dest;
935 rtx p;
936 #ifdef AUTO_INC_DEC
937 rtx link;
938 #endif
939 int all_adjacent = (succ ? (next_active_insn (insn) == succ
940 && next_active_insn (succ) == i3)
941 : next_active_insn (insn) == i3);
943 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
944 or a PARALLEL consisting of such a SET and CLOBBERs.
946 If INSN has CLOBBER parallel parts, ignore them for our processing.
947 By definition, these happen during the execution of the insn. When it
948 is merged with another insn, all bets are off. If they are, in fact,
949 needed and aren't also supplied in I3, they may be added by
950 recog_for_combine. Otherwise, it won't match.
952 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
953 note.
955 Get the source and destination of INSN. If more than one, can't
956 combine. */
958 if (GET_CODE (PATTERN (insn)) == SET)
959 set = PATTERN (insn);
960 else if (GET_CODE (PATTERN (insn)) == PARALLEL
961 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
963 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
965 rtx elt = XVECEXP (PATTERN (insn), 0, i);
967 switch (GET_CODE (elt))
969 /* This is important to combine floating point insns
970 for the SH4 port. */
971 case USE:
972 /* Combining an isolated USE doesn't make sense.
973 We depend here on combinable_i3pat to reject them. */
974 /* The code below this loop only verifies that the inputs of
975 the SET in INSN do not change. We call reg_set_between_p
976 to verify that the REG in the USE does not change between
977 I3 and INSN.
978 If the USE in INSN was for a pseudo register, the matching
979 insn pattern will likely match any register; combining this
980 with any other USE would only be safe if we knew that the
981 used registers have identical values, or if there was
982 something to tell them apart, e.g. different modes. For
983 now, we forgo such complicated tests and simply disallow
984 combining of USES of pseudo registers with any other USE. */
985 if (GET_CODE (XEXP (elt, 0)) == REG
986 && GET_CODE (PATTERN (i3)) == PARALLEL)
988 rtx i3pat = PATTERN (i3);
989 int i = XVECLEN (i3pat, 0) - 1;
990 unsigned int regno = REGNO (XEXP (elt, 0));
994 rtx i3elt = XVECEXP (i3pat, 0, i);
996 if (GET_CODE (i3elt) == USE
997 && GET_CODE (XEXP (i3elt, 0)) == REG
998 && (REGNO (XEXP (i3elt, 0)) == regno
999 ? reg_set_between_p (XEXP (elt, 0),
1000 PREV_INSN (insn), i3)
1001 : regno >= FIRST_PSEUDO_REGISTER))
1002 return 0;
1004 while (--i >= 0);
1006 break;
1008 /* We can ignore CLOBBERs. */
1009 case CLOBBER:
1010 break;
1012 case SET:
1013 /* Ignore SETs whose result isn't used but not those that
1014 have side-effects. */
1015 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1016 && ! side_effects_p (elt))
1017 break;
1019 /* If we have already found a SET, this is a second one and
1020 so we cannot combine with this insn. */
1021 if (set)
1022 return 0;
1024 set = elt;
1025 break;
1027 default:
1028 /* Anything else means we can't combine. */
1029 return 0;
1033 if (set == 0
1034 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1035 so don't do anything with it. */
1036 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1037 return 0;
1039 else
1040 return 0;
1042 if (set == 0)
1043 return 0;
1045 set = expand_field_assignment (set);
1046 src = SET_SRC (set), dest = SET_DEST (set);
1048 /* Don't eliminate a store in the stack pointer. */
1049 if (dest == stack_pointer_rtx
1050 /* Don't combine with an insn that sets a register to itself if it has
1051 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1052 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1053 /* Can't merge an ASM_OPERANDS. */
1054 || GET_CODE (src) == ASM_OPERANDS
1055 /* Can't merge a function call. */
1056 || GET_CODE (src) == CALL
1057 /* Don't eliminate a function call argument. */
1058 || (GET_CODE (i3) == CALL_INSN
1059 && (find_reg_fusage (i3, USE, dest)
1060 || (GET_CODE (dest) == REG
1061 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1062 && global_regs[REGNO (dest)])))
1063 /* Don't substitute into an incremented register. */
1064 || FIND_REG_INC_NOTE (i3, dest)
1065 || (succ && FIND_REG_INC_NOTE (succ, dest))
1066 #if 0
1067 /* Don't combine the end of a libcall into anything. */
1068 /* ??? This gives worse code, and appears to be unnecessary, since no
1069 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1070 use REG_RETVAL notes for noconflict blocks, but other code here
1071 makes sure that those insns don't disappear. */
1072 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1073 #endif
1074 /* Make sure that DEST is not used after SUCC but before I3. */
1075 || (succ && ! all_adjacent
1076 && reg_used_between_p (dest, succ, i3))
1077 /* Make sure that the value that is to be substituted for the register
1078 does not use any registers whose values alter in between. However,
1079 If the insns are adjacent, a use can't cross a set even though we
1080 think it might (this can happen for a sequence of insns each setting
1081 the same destination; reg_last_set of that register might point to
1082 a NOTE). If INSN has a REG_EQUIV note, the register is always
1083 equivalent to the memory so the substitution is valid even if there
1084 are intervening stores. Also, don't move a volatile asm or
1085 UNSPEC_VOLATILE across any other insns. */
1086 || (! all_adjacent
1087 && (((GET_CODE (src) != MEM
1088 || ! find_reg_note (insn, REG_EQUIV, src))
1089 && use_crosses_set_p (src, INSN_CUID (insn)))
1090 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1091 || GET_CODE (src) == UNSPEC_VOLATILE))
1092 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1093 better register allocation by not doing the combine. */
1094 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1095 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1096 /* Don't combine across a CALL_INSN, because that would possibly
1097 change whether the life span of some REGs crosses calls or not,
1098 and it is a pain to update that information.
1099 Exception: if source is a constant, moving it later can't hurt.
1100 Accept that special case, because it helps -fforce-addr a lot. */
1101 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1102 return 0;
1104 /* DEST must either be a REG or CC0. */
1105 if (GET_CODE (dest) == REG)
1107 /* If register alignment is being enforced for multi-word items in all
1108 cases except for parameters, it is possible to have a register copy
1109 insn referencing a hard register that is not allowed to contain the
1110 mode being copied and which would not be valid as an operand of most
1111 insns. Eliminate this problem by not combining with such an insn.
1113 Also, on some machines we don't want to extend the life of a hard
1114 register. */
1116 if (GET_CODE (src) == REG
1117 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1118 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1119 /* Don't extend the life of a hard register unless it is
1120 user variable (if we have few registers) or it can't
1121 fit into the desired register (meaning something special
1122 is going on).
1123 Also avoid substituting a return register into I3, because
1124 reload can't handle a conflict with constraints of other
1125 inputs. */
1126 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1127 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1128 return 0;
1130 else if (GET_CODE (dest) != CC0)
1131 return 0;
1133 /* Don't substitute for a register intended as a clobberable operand.
1134 Similarly, don't substitute an expression containing a register that
1135 will be clobbered in I3. */
1136 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1137 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1138 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1139 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1140 src)
1141 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1142 return 0;
1144 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1145 or not), reject, unless nothing volatile comes between it and I3 */
1147 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1149 /* Make sure succ doesn't contain a volatile reference. */
1150 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1151 return 0;
1153 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1154 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1155 return 0;
1158 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1159 to be an explicit register variable, and was chosen for a reason. */
1161 if (GET_CODE (src) == ASM_OPERANDS
1162 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1163 return 0;
1165 /* If there are any volatile insns between INSN and I3, reject, because
1166 they might affect machine state. */
1168 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1169 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1170 return 0;
1172 /* If INSN or I2 contains an autoincrement or autodecrement,
1173 make sure that register is not used between there and I3,
1174 and not already used in I3 either.
1175 Also insist that I3 not be a jump; if it were one
1176 and the incremented register were spilled, we would lose. */
1178 #ifdef AUTO_INC_DEC
1179 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1180 if (REG_NOTE_KIND (link) == REG_INC
1181 && (GET_CODE (i3) == JUMP_INSN
1182 || reg_used_between_p (XEXP (link, 0), insn, i3)
1183 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1184 return 0;
1185 #endif
1187 #ifdef HAVE_cc0
1188 /* Don't combine an insn that follows a CC0-setting insn.
1189 An insn that uses CC0 must not be separated from the one that sets it.
1190 We do, however, allow I2 to follow a CC0-setting insn if that insn
1191 is passed as I1; in that case it will be deleted also.
1192 We also allow combining in this case if all the insns are adjacent
1193 because that would leave the two CC0 insns adjacent as well.
1194 It would be more logical to test whether CC0 occurs inside I1 or I2,
1195 but that would be much slower, and this ought to be equivalent. */
1197 p = prev_nonnote_insn (insn);
1198 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1199 && ! all_adjacent)
1200 return 0;
1201 #endif
1203 /* If we get here, we have passed all the tests and the combination is
1204 to be allowed. */
1206 *pdest = dest;
1207 *psrc = src;
1209 return 1;
1212 /* LOC is the location within I3 that contains its pattern or the component
1213 of a PARALLEL of the pattern. We validate that it is valid for combining.
1215 One problem is if I3 modifies its output, as opposed to replacing it
1216 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1217 so would produce an insn that is not equivalent to the original insns.
1219 Consider:
1221 (set (reg:DI 101) (reg:DI 100))
1222 (set (subreg:SI (reg:DI 101) 0) <foo>)
1224 This is NOT equivalent to:
1226 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1227 (set (reg:DI 101) (reg:DI 100))])
1229 Not only does this modify 100 (in which case it might still be valid
1230 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1232 We can also run into a problem if I2 sets a register that I1
1233 uses and I1 gets directly substituted into I3 (not via I2). In that
1234 case, we would be getting the wrong value of I2DEST into I3, so we
1235 must reject the combination. This case occurs when I2 and I1 both
1236 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1237 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1238 of a SET must prevent combination from occurring.
1240 Before doing the above check, we first try to expand a field assignment
1241 into a set of logical operations.
1243 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1244 we place a register that is both set and used within I3. If more than one
1245 such register is detected, we fail.
1247 Return 1 if the combination is valid, zero otherwise. */
1249 static int
1250 combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1251 int i1_not_in_src, rtx *pi3dest_killed)
1253 rtx x = *loc;
1255 if (GET_CODE (x) == SET)
1257 rtx set = x ;
1258 rtx dest = SET_DEST (set);
1259 rtx src = SET_SRC (set);
1260 rtx inner_dest = dest;
1262 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1263 || GET_CODE (inner_dest) == SUBREG
1264 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1265 inner_dest = XEXP (inner_dest, 0);
1267 /* Check for the case where I3 modifies its output, as discussed
1268 above. We don't want to prevent pseudos from being combined
1269 into the address of a MEM, so only prevent the combination if
1270 i1 or i2 set the same MEM. */
1271 if ((inner_dest != dest &&
1272 (GET_CODE (inner_dest) != MEM
1273 || rtx_equal_p (i2dest, inner_dest)
1274 || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1275 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1276 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1278 /* This is the same test done in can_combine_p except we can't test
1279 all_adjacent; we don't have to, since this instruction will stay
1280 in place, thus we are not considering increasing the lifetime of
1281 INNER_DEST.
1283 Also, if this insn sets a function argument, combining it with
1284 something that might need a spill could clobber a previous
1285 function argument; the all_adjacent test in can_combine_p also
1286 checks this; here, we do a more specific test for this case. */
1288 || (GET_CODE (inner_dest) == REG
1289 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1290 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1291 GET_MODE (inner_dest))))
1292 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1293 return 0;
1295 /* If DEST is used in I3, it is being killed in this insn,
1296 so record that for later.
1297 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1298 STACK_POINTER_REGNUM, since these are always considered to be
1299 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1300 if (pi3dest_killed && GET_CODE (dest) == REG
1301 && reg_referenced_p (dest, PATTERN (i3))
1302 && REGNO (dest) != FRAME_POINTER_REGNUM
1303 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1304 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1305 #endif
1306 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1307 && (REGNO (dest) != ARG_POINTER_REGNUM
1308 || ! fixed_regs [REGNO (dest)])
1309 #endif
1310 && REGNO (dest) != STACK_POINTER_REGNUM)
1312 if (*pi3dest_killed)
1313 return 0;
1315 *pi3dest_killed = dest;
1319 else if (GET_CODE (x) == PARALLEL)
1321 int i;
1323 for (i = 0; i < XVECLEN (x, 0); i++)
1324 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1325 i1_not_in_src, pi3dest_killed))
1326 return 0;
1329 return 1;
1332 /* Return 1 if X is an arithmetic expression that contains a multiplication
1333 and division. We don't count multiplications by powers of two here. */
1335 static int
1336 contains_muldiv (rtx x)
1338 switch (GET_CODE (x))
1340 case MOD: case DIV: case UMOD: case UDIV:
1341 return 1;
1343 case MULT:
1344 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1345 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1346 default:
1347 switch (GET_RTX_CLASS (GET_CODE (x)))
1349 case 'c': case '<': case '2':
1350 return contains_muldiv (XEXP (x, 0))
1351 || contains_muldiv (XEXP (x, 1));
1353 case '1':
1354 return contains_muldiv (XEXP (x, 0));
1356 default:
1357 return 0;
1362 /* Determine whether INSN can be used in a combination. Return nonzero if
1363 not. This is used in try_combine to detect early some cases where we
1364 can't perform combinations. */
1366 static int
1367 cant_combine_insn_p (rtx insn)
1369 rtx set;
1370 rtx src, dest;
1372 /* If this isn't really an insn, we can't do anything.
1373 This can occur when flow deletes an insn that it has merged into an
1374 auto-increment address. */
1375 if (! INSN_P (insn))
1376 return 1;
1378 /* Never combine loads and stores involving hard regs that are likely
1379 to be spilled. The register allocator can usually handle such
1380 reg-reg moves by tying. If we allow the combiner to make
1381 substitutions of likely-spilled regs, we may abort in reload.
1382 As an exception, we allow combinations involving fixed regs; these are
1383 not available to the register allocator so there's no risk involved. */
1385 set = single_set (insn);
1386 if (! set)
1387 return 0;
1388 src = SET_SRC (set);
1389 dest = SET_DEST (set);
1390 if (GET_CODE (src) == SUBREG)
1391 src = SUBREG_REG (src);
1392 if (GET_CODE (dest) == SUBREG)
1393 dest = SUBREG_REG (dest);
1394 if (REG_P (src) && REG_P (dest)
1395 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1396 && ! fixed_regs[REGNO (src)]
1397 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1398 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1399 && ! fixed_regs[REGNO (dest)]
1400 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1401 return 1;
1403 return 0;
1406 /* Adjust INSN after we made a change to its destination.
1408 Changing the destination can invalidate notes that say something about
1409 the results of the insn and a LOG_LINK pointing to the insn. */
1411 static void
1412 adjust_for_new_dest (rtx insn)
1414 rtx *loc;
1416 /* For notes, be conservative and simply remove them. */
1417 loc = &REG_NOTES (insn);
1418 while (*loc)
1420 enum reg_note kind = REG_NOTE_KIND (*loc);
1421 if (kind == REG_EQUAL || kind == REG_EQUIV)
1422 *loc = XEXP (*loc, 1);
1423 else
1424 loc = &XEXP (*loc, 1);
1427 /* The new insn will have a destination that was previously the destination
1428 of an insn just above it. Call distribute_links to make a LOG_LINK from
1429 the next use of that destination. */
1430 distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1433 /* Try to combine the insns I1 and I2 into I3.
1434 Here I1 and I2 appear earlier than I3.
1435 I1 can be zero; then we combine just I2 into I3.
1437 If we are combining three insns and the resulting insn is not recognized,
1438 try splitting it into two insns. If that happens, I2 and I3 are retained
1439 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1440 are pseudo-deleted.
1442 Return 0 if the combination does not work. Then nothing is changed.
1443 If we did the combination, return the insn at which combine should
1444 resume scanning.
1446 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1447 new direct jump instruction. */
1449 static rtx
1450 try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1452 /* New patterns for I3 and I2, respectively. */
1453 rtx newpat, newi2pat = 0;
1454 int substed_i2 = 0, substed_i1 = 0;
1455 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1456 int added_sets_1, added_sets_2;
1457 /* Total number of SETs to put into I3. */
1458 int total_sets;
1459 /* Nonzero is I2's body now appears in I3. */
1460 int i2_is_used;
1461 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1462 int insn_code_number, i2_code_number = 0, other_code_number = 0;
1463 /* Contains I3 if the destination of I3 is used in its source, which means
1464 that the old life of I3 is being killed. If that usage is placed into
1465 I2 and not in I3, a REG_DEAD note must be made. */
1466 rtx i3dest_killed = 0;
1467 /* SET_DEST and SET_SRC of I2 and I1. */
1468 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1469 /* PATTERN (I2), or a copy of it in certain cases. */
1470 rtx i2pat;
1471 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1472 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1473 int i1_feeds_i3 = 0;
1474 /* Notes that must be added to REG_NOTES in I3 and I2. */
1475 rtx new_i3_notes, new_i2_notes;
1476 /* Notes that we substituted I3 into I2 instead of the normal case. */
1477 int i3_subst_into_i2 = 0;
1478 /* Notes that I1, I2 or I3 is a MULT operation. */
1479 int have_mult = 0;
1481 int maxreg;
1482 rtx temp;
1483 rtx link;
1484 int i;
1486 /* Exit early if one of the insns involved can't be used for
1487 combinations. */
1488 if (cant_combine_insn_p (i3)
1489 || cant_combine_insn_p (i2)
1490 || (i1 && cant_combine_insn_p (i1))
1491 /* We also can't do anything if I3 has a
1492 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1493 libcall. */
1494 #if 0
1495 /* ??? This gives worse code, and appears to be unnecessary, since no
1496 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1497 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1498 #endif
1500 return 0;
1502 combine_attempts++;
1503 undobuf.other_insn = 0;
1505 /* Reset the hard register usage information. */
1506 CLEAR_HARD_REG_SET (newpat_used_regs);
1508 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1509 code below, set I1 to be the earlier of the two insns. */
1510 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1511 temp = i1, i1 = i2, i2 = temp;
1513 added_links_insn = 0;
1515 /* First check for one important special-case that the code below will
1516 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
1517 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1518 we may be able to replace that destination with the destination of I3.
1519 This occurs in the common code where we compute both a quotient and
1520 remainder into a structure, in which case we want to do the computation
1521 directly into the structure to avoid register-register copies.
1523 Note that this case handles both multiple sets in I2 and also
1524 cases where I2 has a number of CLOBBER or PARALLELs.
1526 We make very conservative checks below and only try to handle the
1527 most common cases of this. For example, we only handle the case
1528 where I2 and I3 are adjacent to avoid making difficult register
1529 usage tests. */
1531 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1532 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1533 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1534 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1535 && GET_CODE (PATTERN (i2)) == PARALLEL
1536 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1537 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1538 below would need to check what is inside (and reg_overlap_mentioned_p
1539 doesn't support those codes anyway). Don't allow those destinations;
1540 the resulting insn isn't likely to be recognized anyway. */
1541 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1542 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1543 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1544 SET_DEST (PATTERN (i3)))
1545 && next_real_insn (i2) == i3)
1547 rtx p2 = PATTERN (i2);
1549 /* Make sure that the destination of I3,
1550 which we are going to substitute into one output of I2,
1551 is not used within another output of I2. We must avoid making this:
1552 (parallel [(set (mem (reg 69)) ...)
1553 (set (reg 69) ...)])
1554 which is not well-defined as to order of actions.
1555 (Besides, reload can't handle output reloads for this.)
1557 The problem can also happen if the dest of I3 is a memory ref,
1558 if another dest in I2 is an indirect memory ref. */
1559 for (i = 0; i < XVECLEN (p2, 0); i++)
1560 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1561 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1562 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1563 SET_DEST (XVECEXP (p2, 0, i))))
1564 break;
1566 if (i == XVECLEN (p2, 0))
1567 for (i = 0; i < XVECLEN (p2, 0); i++)
1568 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1569 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1570 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1572 combine_merges++;
1574 subst_insn = i3;
1575 subst_low_cuid = INSN_CUID (i2);
1577 added_sets_2 = added_sets_1 = 0;
1578 i2dest = SET_SRC (PATTERN (i3));
1580 /* Replace the dest in I2 with our dest and make the resulting
1581 insn the new pattern for I3. Then skip to where we
1582 validate the pattern. Everything was set up above. */
1583 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1584 SET_DEST (PATTERN (i3)));
1586 newpat = p2;
1587 i3_subst_into_i2 = 1;
1588 goto validate_replacement;
1592 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1593 one of those words to another constant, merge them by making a new
1594 constant. */
1595 if (i1 == 0
1596 && (temp = single_set (i2)) != 0
1597 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1598 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1599 && GET_CODE (SET_DEST (temp)) == REG
1600 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1601 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1602 && GET_CODE (PATTERN (i3)) == SET
1603 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1604 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1605 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1606 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1607 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1609 HOST_WIDE_INT lo, hi;
1611 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1612 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1613 else
1615 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1616 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1619 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1621 /* We don't handle the case of the target word being wider
1622 than a host wide int. */
1623 if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1624 abort ();
1626 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1627 lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1628 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1630 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1631 hi = INTVAL (SET_SRC (PATTERN (i3)));
1632 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1634 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1635 >> (HOST_BITS_PER_WIDE_INT - 1));
1637 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1638 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1639 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1640 (INTVAL (SET_SRC (PATTERN (i3)))));
1641 if (hi == sign)
1642 hi = lo < 0 ? -1 : 0;
1644 else
1645 /* We don't handle the case of the higher word not fitting
1646 entirely in either hi or lo. */
1647 abort ();
1649 combine_merges++;
1650 subst_insn = i3;
1651 subst_low_cuid = INSN_CUID (i2);
1652 added_sets_2 = added_sets_1 = 0;
1653 i2dest = SET_DEST (temp);
1655 SUBST (SET_SRC (temp),
1656 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1658 newpat = PATTERN (i2);
1659 goto validate_replacement;
1662 #ifndef HAVE_cc0
1663 /* If we have no I1 and I2 looks like:
1664 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1665 (set Y OP)])
1666 make up a dummy I1 that is
1667 (set Y OP)
1668 and change I2 to be
1669 (set (reg:CC X) (compare:CC Y (const_int 0)))
1671 (We can ignore any trailing CLOBBERs.)
1673 This undoes a previous combination and allows us to match a branch-and-
1674 decrement insn. */
1676 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1677 && XVECLEN (PATTERN (i2), 0) >= 2
1678 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1679 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1680 == MODE_CC)
1681 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1682 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1683 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1684 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1685 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1686 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1688 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1689 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1690 break;
1692 if (i == 1)
1694 /* We make I1 with the same INSN_UID as I2. This gives it
1695 the same INSN_CUID for value tracking. Our fake I1 will
1696 never appear in the insn stream so giving it the same INSN_UID
1697 as I2 will not cause a problem. */
1699 i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1700 BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
1701 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1702 NULL_RTX);
1704 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1705 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1706 SET_DEST (PATTERN (i1)));
1709 #endif
1711 /* Verify that I2 and I1 are valid for combining. */
1712 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1713 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1715 undo_all ();
1716 return 0;
1719 /* Record whether I2DEST is used in I2SRC and similarly for the other
1720 cases. Knowing this will help in register status updating below. */
1721 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1722 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1723 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1725 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1726 in I2SRC. */
1727 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1729 /* Ensure that I3's pattern can be the destination of combines. */
1730 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1731 i1 && i2dest_in_i1src && i1_feeds_i3,
1732 &i3dest_killed))
1734 undo_all ();
1735 return 0;
1738 /* See if any of the insns is a MULT operation. Unless one is, we will
1739 reject a combination that is, since it must be slower. Be conservative
1740 here. */
1741 if (GET_CODE (i2src) == MULT
1742 || (i1 != 0 && GET_CODE (i1src) == MULT)
1743 || (GET_CODE (PATTERN (i3)) == SET
1744 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1745 have_mult = 1;
1747 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1748 We used to do this EXCEPT in one case: I3 has a post-inc in an
1749 output operand. However, that exception can give rise to insns like
1750 mov r3,(r3)+
1751 which is a famous insn on the PDP-11 where the value of r3 used as the
1752 source was model-dependent. Avoid this sort of thing. */
1754 #if 0
1755 if (!(GET_CODE (PATTERN (i3)) == SET
1756 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1757 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1758 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1759 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1760 /* It's not the exception. */
1761 #endif
1762 #ifdef AUTO_INC_DEC
1763 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1764 if (REG_NOTE_KIND (link) == REG_INC
1765 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1766 || (i1 != 0
1767 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1769 undo_all ();
1770 return 0;
1772 #endif
1774 /* See if the SETs in I1 or I2 need to be kept around in the merged
1775 instruction: whenever the value set there is still needed past I3.
1776 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1778 For the SET in I1, we have two cases: If I1 and I2 independently
1779 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1780 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1781 in I1 needs to be kept around unless I1DEST dies or is set in either
1782 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1783 I1DEST. If so, we know I1 feeds into I2. */
1785 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1787 added_sets_1
1788 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1789 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1791 /* If the set in I2 needs to be kept around, we must make a copy of
1792 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1793 PATTERN (I2), we are only substituting for the original I1DEST, not into
1794 an already-substituted copy. This also prevents making self-referential
1795 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1796 I2DEST. */
1798 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1799 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1800 : PATTERN (i2));
1802 if (added_sets_2)
1803 i2pat = copy_rtx (i2pat);
1805 combine_merges++;
1807 /* Substitute in the latest insn for the regs set by the earlier ones. */
1809 maxreg = max_reg_num ();
1811 subst_insn = i3;
1813 /* It is possible that the source of I2 or I1 may be performing an
1814 unneeded operation, such as a ZERO_EXTEND of something that is known
1815 to have the high part zero. Handle that case by letting subst look at
1816 the innermost one of them.
1818 Another way to do this would be to have a function that tries to
1819 simplify a single insn instead of merging two or more insns. We don't
1820 do this because of the potential of infinite loops and because
1821 of the potential extra memory required. However, doing it the way
1822 we are is a bit of a kludge and doesn't catch all cases.
1824 But only do this if -fexpensive-optimizations since it slows things down
1825 and doesn't usually win. */
1827 if (flag_expensive_optimizations)
1829 /* Pass pc_rtx so no substitutions are done, just simplifications.
1830 The cases that we are interested in here do not involve the few
1831 cases were is_replaced is checked. */
1832 if (i1)
1834 subst_low_cuid = INSN_CUID (i1);
1835 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1837 else
1839 subst_low_cuid = INSN_CUID (i2);
1840 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1844 #ifndef HAVE_cc0
1845 /* Many machines that don't use CC0 have insns that can both perform an
1846 arithmetic operation and set the condition code. These operations will
1847 be represented as a PARALLEL with the first element of the vector
1848 being a COMPARE of an arithmetic operation with the constant zero.
1849 The second element of the vector will set some pseudo to the result
1850 of the same arithmetic operation. If we simplify the COMPARE, we won't
1851 match such a pattern and so will generate an extra insn. Here we test
1852 for this case, where both the comparison and the operation result are
1853 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1854 I2SRC. Later we will make the PARALLEL that contains I2. */
1856 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1857 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1858 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1859 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1861 #ifdef SELECT_CC_MODE
1862 rtx *cc_use;
1863 enum machine_mode compare_mode;
1864 #endif
1866 newpat = PATTERN (i3);
1867 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1869 i2_is_used = 1;
1871 #ifdef SELECT_CC_MODE
1872 /* See if a COMPARE with the operand we substituted in should be done
1873 with the mode that is currently being used. If not, do the same
1874 processing we do in `subst' for a SET; namely, if the destination
1875 is used only once, try to replace it with a register of the proper
1876 mode and also replace the COMPARE. */
1877 if (undobuf.other_insn == 0
1878 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1879 &undobuf.other_insn))
1880 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1881 i2src, const0_rtx))
1882 != GET_MODE (SET_DEST (newpat))))
1884 unsigned int regno = REGNO (SET_DEST (newpat));
1885 rtx new_dest = gen_rtx_REG (compare_mode, regno);
1887 if (regno < FIRST_PSEUDO_REGISTER
1888 || (REG_N_SETS (regno) == 1 && ! added_sets_2
1889 && ! REG_USERVAR_P (SET_DEST (newpat))))
1891 if (regno >= FIRST_PSEUDO_REGISTER)
1892 SUBST (regno_reg_rtx[regno], new_dest);
1894 SUBST (SET_DEST (newpat), new_dest);
1895 SUBST (XEXP (*cc_use, 0), new_dest);
1896 SUBST (SET_SRC (newpat),
1897 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1899 else
1900 undobuf.other_insn = 0;
1902 #endif
1904 else
1905 #endif
1907 n_occurrences = 0; /* `subst' counts here */
1909 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1910 need to make a unique copy of I2SRC each time we substitute it
1911 to avoid self-referential rtl. */
1913 subst_low_cuid = INSN_CUID (i2);
1914 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1915 ! i1_feeds_i3 && i1dest_in_i1src);
1916 substed_i2 = 1;
1918 /* Record whether i2's body now appears within i3's body. */
1919 i2_is_used = n_occurrences;
1922 /* If we already got a failure, don't try to do more. Otherwise,
1923 try to substitute in I1 if we have it. */
1925 if (i1 && GET_CODE (newpat) != CLOBBER)
1927 /* Before we can do this substitution, we must redo the test done
1928 above (see detailed comments there) that ensures that I1DEST
1929 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1931 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1932 0, (rtx*) 0))
1934 undo_all ();
1935 return 0;
1938 n_occurrences = 0;
1939 subst_low_cuid = INSN_CUID (i1);
1940 newpat = subst (newpat, i1dest, i1src, 0, 0);
1941 substed_i1 = 1;
1944 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1945 to count all the ways that I2SRC and I1SRC can be used. */
1946 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1947 && i2_is_used + added_sets_2 > 1)
1948 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1949 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1950 > 1))
1951 /* Fail if we tried to make a new register (we used to abort, but there's
1952 really no reason to). */
1953 || max_reg_num () != maxreg
1954 /* Fail if we couldn't do something and have a CLOBBER. */
1955 || GET_CODE (newpat) == CLOBBER
1956 /* Fail if this new pattern is a MULT and we didn't have one before
1957 at the outer level. */
1958 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1959 && ! have_mult))
1961 undo_all ();
1962 return 0;
1965 /* If the actions of the earlier insns must be kept
1966 in addition to substituting them into the latest one,
1967 we must make a new PARALLEL for the latest insn
1968 to hold additional the SETs. */
1970 if (added_sets_1 || added_sets_2)
1972 combine_extras++;
1974 if (GET_CODE (newpat) == PARALLEL)
1976 rtvec old = XVEC (newpat, 0);
1977 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1978 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1979 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
1980 sizeof (old->elem[0]) * old->num_elem);
1982 else
1984 rtx old = newpat;
1985 total_sets = 1 + added_sets_1 + added_sets_2;
1986 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1987 XVECEXP (newpat, 0, 0) = old;
1990 if (added_sets_1)
1991 XVECEXP (newpat, 0, --total_sets)
1992 = (GET_CODE (PATTERN (i1)) == PARALLEL
1993 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
1995 if (added_sets_2)
1997 /* If there is no I1, use I2's body as is. We used to also not do
1998 the subst call below if I2 was substituted into I3,
1999 but that could lose a simplification. */
2000 if (i1 == 0)
2001 XVECEXP (newpat, 0, --total_sets) = i2pat;
2002 else
2003 /* See comment where i2pat is assigned. */
2004 XVECEXP (newpat, 0, --total_sets)
2005 = subst (i2pat, i1dest, i1src, 0, 0);
2009 /* We come here when we are replacing a destination in I2 with the
2010 destination of I3. */
2011 validate_replacement:
2013 /* Note which hard regs this insn has as inputs. */
2014 mark_used_regs_combine (newpat);
2016 /* Is the result of combination a valid instruction? */
2017 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2019 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2020 the second SET's destination is a register that is unused. In that case,
2021 we just need the first SET. This can occur when simplifying a divmod
2022 insn. We *must* test for this case here because the code below that
2023 splits two independent SETs doesn't handle this case correctly when it
2024 updates the register status. Also check the case where the first
2025 SET's destination is unused. That would not cause incorrect code, but
2026 does cause an unneeded insn to remain. */
2028 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2029 && XVECLEN (newpat, 0) == 2
2030 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2031 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2032 && asm_noperands (newpat) < 0)
2034 rtx set0 = XVECEXP (newpat, 0, 0);
2035 rtx set1 = XVECEXP (newpat, 0, 1);
2037 if (((GET_CODE (SET_DEST (set1)) == REG
2038 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2039 || (GET_CODE (SET_DEST (set1)) == SUBREG
2040 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2041 && ! side_effects_p (SET_SRC (set1)))
2043 newpat = set0;
2044 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2047 else if (((GET_CODE (SET_DEST (set0)) == REG
2048 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2049 || (GET_CODE (SET_DEST (set0)) == SUBREG
2050 && find_reg_note (i3, REG_UNUSED,
2051 SUBREG_REG (SET_DEST (set0)))))
2052 && ! side_effects_p (SET_SRC (set0)))
2054 newpat = set1;
2055 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2057 if (insn_code_number >= 0)
2059 /* If we will be able to accept this, we have made a
2060 change to the destination of I3. This requires us to
2061 do a few adjustments. */
2063 PATTERN (i3) = newpat;
2064 adjust_for_new_dest (i3);
2069 /* If we were combining three insns and the result is a simple SET
2070 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2071 insns. There are two ways to do this. It can be split using a
2072 machine-specific method (like when you have an addition of a large
2073 constant) or by combine in the function find_split_point. */
2075 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2076 && asm_noperands (newpat) < 0)
2078 rtx m_split, *split;
2079 rtx ni2dest = i2dest;
2081 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2082 use I2DEST as a scratch register will help. In the latter case,
2083 convert I2DEST to the mode of the source of NEWPAT if we can. */
2085 m_split = split_insns (newpat, i3);
2087 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2088 inputs of NEWPAT. */
2090 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2091 possible to try that as a scratch reg. This would require adding
2092 more code to make it work though. */
2094 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2096 /* If I2DEST is a hard register or the only use of a pseudo,
2097 we can change its mode. */
2098 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2099 && GET_MODE (SET_DEST (newpat)) != VOIDmode
2100 && GET_CODE (i2dest) == REG
2101 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2102 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2103 && ! REG_USERVAR_P (i2dest))))
2104 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2105 REGNO (i2dest));
2107 m_split = split_insns (gen_rtx_PARALLEL
2108 (VOIDmode,
2109 gen_rtvec (2, newpat,
2110 gen_rtx_CLOBBER (VOIDmode,
2111 ni2dest))),
2112 i3);
2113 /* If the split with the mode-changed register didn't work, try
2114 the original register. */
2115 if (! m_split && ni2dest != i2dest)
2117 ni2dest = i2dest;
2118 m_split = split_insns (gen_rtx_PARALLEL
2119 (VOIDmode,
2120 gen_rtvec (2, newpat,
2121 gen_rtx_CLOBBER (VOIDmode,
2122 i2dest))),
2123 i3);
2127 if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2129 m_split = PATTERN (m_split);
2130 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2131 if (insn_code_number >= 0)
2132 newpat = m_split;
2134 else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2135 && (next_real_insn (i2) == i3
2136 || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2138 rtx i2set, i3set;
2139 rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2140 newi2pat = PATTERN (m_split);
2142 i3set = single_set (NEXT_INSN (m_split));
2143 i2set = single_set (m_split);
2145 /* In case we changed the mode of I2DEST, replace it in the
2146 pseudo-register table here. We can't do it above in case this
2147 code doesn't get executed and we do a split the other way. */
2149 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2150 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2152 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2154 /* If I2 or I3 has multiple SETs, we won't know how to track
2155 register status, so don't use these insns. If I2's destination
2156 is used between I2 and I3, we also can't use these insns. */
2158 if (i2_code_number >= 0 && i2set && i3set
2159 && (next_real_insn (i2) == i3
2160 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2161 insn_code_number = recog_for_combine (&newi3pat, i3,
2162 &new_i3_notes);
2163 if (insn_code_number >= 0)
2164 newpat = newi3pat;
2166 /* It is possible that both insns now set the destination of I3.
2167 If so, we must show an extra use of it. */
2169 if (insn_code_number >= 0)
2171 rtx new_i3_dest = SET_DEST (i3set);
2172 rtx new_i2_dest = SET_DEST (i2set);
2174 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2175 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2176 || GET_CODE (new_i3_dest) == SUBREG)
2177 new_i3_dest = XEXP (new_i3_dest, 0);
2179 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2180 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2181 || GET_CODE (new_i2_dest) == SUBREG)
2182 new_i2_dest = XEXP (new_i2_dest, 0);
2184 if (GET_CODE (new_i3_dest) == REG
2185 && GET_CODE (new_i2_dest) == REG
2186 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2187 REG_N_SETS (REGNO (new_i2_dest))++;
2191 /* If we can split it and use I2DEST, go ahead and see if that
2192 helps things be recognized. Verify that none of the registers
2193 are set between I2 and I3. */
2194 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2195 #ifdef HAVE_cc0
2196 && GET_CODE (i2dest) == REG
2197 #endif
2198 /* We need I2DEST in the proper mode. If it is a hard register
2199 or the only use of a pseudo, we can change its mode. */
2200 && (GET_MODE (*split) == GET_MODE (i2dest)
2201 || GET_MODE (*split) == VOIDmode
2202 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2203 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2204 && ! REG_USERVAR_P (i2dest)))
2205 && (next_real_insn (i2) == i3
2206 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2207 /* We can't overwrite I2DEST if its value is still used by
2208 NEWPAT. */
2209 && ! reg_referenced_p (i2dest, newpat))
2211 rtx newdest = i2dest;
2212 enum rtx_code split_code = GET_CODE (*split);
2213 enum machine_mode split_mode = GET_MODE (*split);
2215 /* Get NEWDEST as a register in the proper mode. We have already
2216 validated that we can do this. */
2217 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2219 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2221 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2222 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2225 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2226 an ASHIFT. This can occur if it was inside a PLUS and hence
2227 appeared to be a memory address. This is a kludge. */
2228 if (split_code == MULT
2229 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2230 && INTVAL (XEXP (*split, 1)) > 0
2231 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2233 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2234 XEXP (*split, 0), GEN_INT (i)));
2235 /* Update split_code because we may not have a multiply
2236 anymore. */
2237 split_code = GET_CODE (*split);
2240 #ifdef INSN_SCHEDULING
2241 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2242 be written as a ZERO_EXTEND. */
2243 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2245 #ifdef LOAD_EXTEND_OP
2246 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2247 what it really is. */
2248 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2249 == SIGN_EXTEND)
2250 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2251 SUBREG_REG (*split)));
2252 else
2253 #endif
2254 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2255 SUBREG_REG (*split)));
2257 #endif
2259 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2260 SUBST (*split, newdest);
2261 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2263 /* If the split point was a MULT and we didn't have one before,
2264 don't use one now. */
2265 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2266 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2270 /* Check for a case where we loaded from memory in a narrow mode and
2271 then sign extended it, but we need both registers. In that case,
2272 we have a PARALLEL with both loads from the same memory location.
2273 We can split this into a load from memory followed by a register-register
2274 copy. This saves at least one insn, more if register allocation can
2275 eliminate the copy.
2277 We cannot do this if the destination of the first assignment is a
2278 condition code register or cc0. We eliminate this case by making sure
2279 the SET_DEST and SET_SRC have the same mode.
2281 We cannot do this if the destination of the second assignment is
2282 a register that we have already assumed is zero-extended. Similarly
2283 for a SUBREG of such a register. */
2285 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2286 && GET_CODE (newpat) == PARALLEL
2287 && XVECLEN (newpat, 0) == 2
2288 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2289 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2290 && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2291 == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2292 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2293 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2294 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2295 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2296 INSN_CUID (i2))
2297 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2298 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2299 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2300 (GET_CODE (temp) == REG
2301 && reg_nonzero_bits[REGNO (temp)] != 0
2302 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2303 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2304 && (reg_nonzero_bits[REGNO (temp)]
2305 != GET_MODE_MASK (word_mode))))
2306 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2307 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2308 (GET_CODE (temp) == REG
2309 && reg_nonzero_bits[REGNO (temp)] != 0
2310 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2311 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2312 && (reg_nonzero_bits[REGNO (temp)]
2313 != GET_MODE_MASK (word_mode)))))
2314 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2315 SET_SRC (XVECEXP (newpat, 0, 1)))
2316 && ! find_reg_note (i3, REG_UNUSED,
2317 SET_DEST (XVECEXP (newpat, 0, 0))))
2319 rtx ni2dest;
2321 newi2pat = XVECEXP (newpat, 0, 0);
2322 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2323 newpat = XVECEXP (newpat, 0, 1);
2324 SUBST (SET_SRC (newpat),
2325 gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
2326 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2328 if (i2_code_number >= 0)
2329 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2331 if (insn_code_number >= 0)
2333 rtx insn;
2334 rtx link;
2336 /* If we will be able to accept this, we have made a change to the
2337 destination of I3. This requires us to do a few adjustments. */
2338 PATTERN (i3) = newpat;
2339 adjust_for_new_dest (i3);
2341 /* I3 now uses what used to be its destination and which is
2342 now I2's destination. That means we need a LOG_LINK from
2343 I3 to I2. But we used to have one, so we still will.
2345 However, some later insn might be using I2's dest and have
2346 a LOG_LINK pointing at I3. We must remove this link.
2347 The simplest way to remove the link is to point it at I1,
2348 which we know will be a NOTE. */
2350 for (insn = NEXT_INSN (i3);
2351 insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2352 || insn != BB_HEAD (this_basic_block->next_bb));
2353 insn = NEXT_INSN (insn))
2355 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2357 for (link = LOG_LINKS (insn); link;
2358 link = XEXP (link, 1))
2359 if (XEXP (link, 0) == i3)
2360 XEXP (link, 0) = i1;
2362 break;
2368 /* Similarly, check for a case where we have a PARALLEL of two independent
2369 SETs but we started with three insns. In this case, we can do the sets
2370 as two separate insns. This case occurs when some SET allows two
2371 other insns to combine, but the destination of that SET is still live. */
2373 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2374 && GET_CODE (newpat) == PARALLEL
2375 && XVECLEN (newpat, 0) == 2
2376 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2377 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2378 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2379 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2380 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2381 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2382 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2383 INSN_CUID (i2))
2384 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2385 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2386 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2387 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2388 XVECEXP (newpat, 0, 0))
2389 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2390 XVECEXP (newpat, 0, 1))
2391 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2392 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2394 /* Normally, it doesn't matter which of the two is done first,
2395 but it does if one references cc0. In that case, it has to
2396 be first. */
2397 #ifdef HAVE_cc0
2398 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2400 newi2pat = XVECEXP (newpat, 0, 0);
2401 newpat = XVECEXP (newpat, 0, 1);
2403 else
2404 #endif
2406 newi2pat = XVECEXP (newpat, 0, 1);
2407 newpat = XVECEXP (newpat, 0, 0);
2410 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2412 if (i2_code_number >= 0)
2413 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2416 /* If it still isn't recognized, fail and change things back the way they
2417 were. */
2418 if ((insn_code_number < 0
2419 /* Is the result a reasonable ASM_OPERANDS? */
2420 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2422 undo_all ();
2423 return 0;
2426 /* If we had to change another insn, make sure it is valid also. */
2427 if (undobuf.other_insn)
2429 rtx other_pat = PATTERN (undobuf.other_insn);
2430 rtx new_other_notes;
2431 rtx note, next;
2433 CLEAR_HARD_REG_SET (newpat_used_regs);
2435 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2436 &new_other_notes);
2438 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2440 undo_all ();
2441 return 0;
2444 PATTERN (undobuf.other_insn) = other_pat;
2446 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2447 are still valid. Then add any non-duplicate notes added by
2448 recog_for_combine. */
2449 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2451 next = XEXP (note, 1);
2453 if (REG_NOTE_KIND (note) == REG_UNUSED
2454 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2456 if (GET_CODE (XEXP (note, 0)) == REG)
2457 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2459 remove_note (undobuf.other_insn, note);
2463 for (note = new_other_notes; note; note = XEXP (note, 1))
2464 if (GET_CODE (XEXP (note, 0)) == REG)
2465 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2467 distribute_notes (new_other_notes, undobuf.other_insn,
2468 undobuf.other_insn, NULL_RTX);
2470 #ifdef HAVE_cc0
2471 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
2472 they are adjacent to each other or not. */
2474 rtx p = prev_nonnote_insn (i3);
2475 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2476 && sets_cc0_p (newi2pat))
2478 undo_all ();
2479 return 0;
2482 #endif
2484 /* We now know that we can do this combination. Merge the insns and
2485 update the status of registers and LOG_LINKS. */
2488 rtx i3notes, i2notes, i1notes = 0;
2489 rtx i3links, i2links, i1links = 0;
2490 rtx midnotes = 0;
2491 unsigned int regno;
2493 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2494 clear them. */
2495 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2496 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2497 if (i1)
2498 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2500 /* Ensure that we do not have something that should not be shared but
2501 occurs multiple times in the new insns. Check this by first
2502 resetting all the `used' flags and then copying anything is shared. */
2504 reset_used_flags (i3notes);
2505 reset_used_flags (i2notes);
2506 reset_used_flags (i1notes);
2507 reset_used_flags (newpat);
2508 reset_used_flags (newi2pat);
2509 if (undobuf.other_insn)
2510 reset_used_flags (PATTERN (undobuf.other_insn));
2512 i3notes = copy_rtx_if_shared (i3notes);
2513 i2notes = copy_rtx_if_shared (i2notes);
2514 i1notes = copy_rtx_if_shared (i1notes);
2515 newpat = copy_rtx_if_shared (newpat);
2516 newi2pat = copy_rtx_if_shared (newi2pat);
2517 if (undobuf.other_insn)
2518 reset_used_flags (PATTERN (undobuf.other_insn));
2520 INSN_CODE (i3) = insn_code_number;
2521 PATTERN (i3) = newpat;
2523 if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2525 rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2527 reset_used_flags (call_usage);
2528 call_usage = copy_rtx (call_usage);
2530 if (substed_i2)
2531 replace_rtx (call_usage, i2dest, i2src);
2533 if (substed_i1)
2534 replace_rtx (call_usage, i1dest, i1src);
2536 CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2539 if (undobuf.other_insn)
2540 INSN_CODE (undobuf.other_insn) = other_code_number;
2542 /* We had one special case above where I2 had more than one set and
2543 we replaced a destination of one of those sets with the destination
2544 of I3. In that case, we have to update LOG_LINKS of insns later
2545 in this basic block. Note that this (expensive) case is rare.
2547 Also, in this case, we must pretend that all REG_NOTEs for I2
2548 actually came from I3, so that REG_UNUSED notes from I2 will be
2549 properly handled. */
2551 if (i3_subst_into_i2)
2553 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2554 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2555 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2556 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2557 && ! find_reg_note (i2, REG_UNUSED,
2558 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2559 for (temp = NEXT_INSN (i2);
2560 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2561 || BB_HEAD (this_basic_block) != temp);
2562 temp = NEXT_INSN (temp))
2563 if (temp != i3 && INSN_P (temp))
2564 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2565 if (XEXP (link, 0) == i2)
2566 XEXP (link, 0) = i3;
2568 if (i3notes)
2570 rtx link = i3notes;
2571 while (XEXP (link, 1))
2572 link = XEXP (link, 1);
2573 XEXP (link, 1) = i2notes;
2575 else
2576 i3notes = i2notes;
2577 i2notes = 0;
2580 LOG_LINKS (i3) = 0;
2581 REG_NOTES (i3) = 0;
2582 LOG_LINKS (i2) = 0;
2583 REG_NOTES (i2) = 0;
2585 if (newi2pat)
2587 INSN_CODE (i2) = i2_code_number;
2588 PATTERN (i2) = newi2pat;
2590 else
2592 PUT_CODE (i2, NOTE);
2593 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2594 NOTE_SOURCE_FILE (i2) = 0;
2597 if (i1)
2599 LOG_LINKS (i1) = 0;
2600 REG_NOTES (i1) = 0;
2601 PUT_CODE (i1, NOTE);
2602 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2603 NOTE_SOURCE_FILE (i1) = 0;
2606 /* Get death notes for everything that is now used in either I3 or
2607 I2 and used to die in a previous insn. If we built two new
2608 patterns, move from I1 to I2 then I2 to I3 so that we get the
2609 proper movement on registers that I2 modifies. */
2611 if (newi2pat)
2613 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2614 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2616 else
2617 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2618 i3, &midnotes);
2620 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2621 if (i3notes)
2622 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX);
2623 if (i2notes)
2624 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX);
2625 if (i1notes)
2626 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX);
2627 if (midnotes)
2628 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2630 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2631 know these are REG_UNUSED and want them to go to the desired insn,
2632 so we always pass it as i3. We have not counted the notes in
2633 reg_n_deaths yet, so we need to do so now. */
2635 if (newi2pat && new_i2_notes)
2637 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2638 if (GET_CODE (XEXP (temp, 0)) == REG)
2639 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2641 distribute_notes (new_i2_notes, i2, i2, NULL_RTX);
2644 if (new_i3_notes)
2646 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2647 if (GET_CODE (XEXP (temp, 0)) == REG)
2648 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2650 distribute_notes (new_i3_notes, i3, i3, NULL_RTX);
2653 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2654 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2655 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2656 in that case, it might delete I2. Similarly for I2 and I1.
2657 Show an additional death due to the REG_DEAD note we make here. If
2658 we discard it in distribute_notes, we will decrement it again. */
2660 if (i3dest_killed)
2662 if (GET_CODE (i3dest_killed) == REG)
2663 REG_N_DEATHS (REGNO (i3dest_killed))++;
2665 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2666 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2667 NULL_RTX),
2668 NULL_RTX, i2, NULL_RTX);
2669 else
2670 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2671 NULL_RTX),
2672 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2675 if (i2dest_in_i2src)
2677 if (GET_CODE (i2dest) == REG)
2678 REG_N_DEATHS (REGNO (i2dest))++;
2680 if (newi2pat && reg_set_p (i2dest, newi2pat))
2681 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2682 NULL_RTX, i2, NULL_RTX);
2683 else
2684 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2685 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2688 if (i1dest_in_i1src)
2690 if (GET_CODE (i1dest) == REG)
2691 REG_N_DEATHS (REGNO (i1dest))++;
2693 if (newi2pat && reg_set_p (i1dest, newi2pat))
2694 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2695 NULL_RTX, i2, NULL_RTX);
2696 else
2697 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2698 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2701 distribute_links (i3links);
2702 distribute_links (i2links);
2703 distribute_links (i1links);
2705 if (GET_CODE (i2dest) == REG)
2707 rtx link;
2708 rtx i2_insn = 0, i2_val = 0, set;
2710 /* The insn that used to set this register doesn't exist, and
2711 this life of the register may not exist either. See if one of
2712 I3's links points to an insn that sets I2DEST. If it does,
2713 that is now the last known value for I2DEST. If we don't update
2714 this and I2 set the register to a value that depended on its old
2715 contents, we will get confused. If this insn is used, thing
2716 will be set correctly in combine_instructions. */
2718 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2719 if ((set = single_set (XEXP (link, 0))) != 0
2720 && rtx_equal_p (i2dest, SET_DEST (set)))
2721 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2723 record_value_for_reg (i2dest, i2_insn, i2_val);
2725 /* If the reg formerly set in I2 died only once and that was in I3,
2726 zero its use count so it won't make `reload' do any work. */
2727 if (! added_sets_2
2728 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2729 && ! i2dest_in_i2src)
2731 regno = REGNO (i2dest);
2732 REG_N_SETS (regno)--;
2736 if (i1 && GET_CODE (i1dest) == REG)
2738 rtx link;
2739 rtx i1_insn = 0, i1_val = 0, set;
2741 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2742 if ((set = single_set (XEXP (link, 0))) != 0
2743 && rtx_equal_p (i1dest, SET_DEST (set)))
2744 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2746 record_value_for_reg (i1dest, i1_insn, i1_val);
2748 regno = REGNO (i1dest);
2749 if (! added_sets_1 && ! i1dest_in_i1src)
2750 REG_N_SETS (regno)--;
2753 /* Update reg_nonzero_bits et al for any changes that may have been made
2754 to this insn. The order of set_nonzero_bits_and_sign_copies() is
2755 important. Because newi2pat can affect nonzero_bits of newpat */
2756 if (newi2pat)
2757 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2758 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2760 /* Set new_direct_jump_p if a new return or simple jump instruction
2761 has been created.
2763 If I3 is now an unconditional jump, ensure that it has a
2764 BARRIER following it since it may have initially been a
2765 conditional jump. It may also be the last nonnote insn. */
2767 if (returnjump_p (i3) || any_uncondjump_p (i3))
2769 *new_direct_jump_p = 1;
2770 mark_jump_label (PATTERN (i3), i3, 0);
2772 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2773 || GET_CODE (temp) != BARRIER)
2774 emit_barrier_after (i3);
2777 if (undobuf.other_insn != NULL_RTX
2778 && (returnjump_p (undobuf.other_insn)
2779 || any_uncondjump_p (undobuf.other_insn)))
2781 *new_direct_jump_p = 1;
2783 if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
2784 || GET_CODE (temp) != BARRIER)
2785 emit_barrier_after (undobuf.other_insn);
2788 /* An NOOP jump does not need barrier, but it does need cleaning up
2789 of CFG. */
2790 if (GET_CODE (newpat) == SET
2791 && SET_SRC (newpat) == pc_rtx
2792 && SET_DEST (newpat) == pc_rtx)
2793 *new_direct_jump_p = 1;
2796 combine_successes++;
2797 undo_commit ();
2799 if (added_links_insn
2800 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2801 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2802 return added_links_insn;
2803 else
2804 return newi2pat ? i2 : i3;
2807 /* Undo all the modifications recorded in undobuf. */
2809 static void
2810 undo_all (void)
2812 struct undo *undo, *next;
2814 for (undo = undobuf.undos; undo; undo = next)
2816 next = undo->next;
2817 if (undo->is_int)
2818 *undo->where.i = undo->old_contents.i;
2819 else
2820 *undo->where.r = undo->old_contents.r;
2822 undo->next = undobuf.frees;
2823 undobuf.frees = undo;
2826 undobuf.undos = 0;
2829 /* We've committed to accepting the changes we made. Move all
2830 of the undos to the free list. */
2832 static void
2833 undo_commit (void)
2835 struct undo *undo, *next;
2837 for (undo = undobuf.undos; undo; undo = next)
2839 next = undo->next;
2840 undo->next = undobuf.frees;
2841 undobuf.frees = undo;
2843 undobuf.undos = 0;
2847 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2848 where we have an arithmetic expression and return that point. LOC will
2849 be inside INSN.
2851 try_combine will call this function to see if an insn can be split into
2852 two insns. */
2854 static rtx *
2855 find_split_point (rtx *loc, rtx insn)
2857 rtx x = *loc;
2858 enum rtx_code code = GET_CODE (x);
2859 rtx *split;
2860 unsigned HOST_WIDE_INT len = 0;
2861 HOST_WIDE_INT pos = 0;
2862 int unsignedp = 0;
2863 rtx inner = NULL_RTX;
2865 /* First special-case some codes. */
2866 switch (code)
2868 case SUBREG:
2869 #ifdef INSN_SCHEDULING
2870 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2871 point. */
2872 if (GET_CODE (SUBREG_REG (x)) == MEM)
2873 return loc;
2874 #endif
2875 return find_split_point (&SUBREG_REG (x), insn);
2877 case MEM:
2878 #ifdef HAVE_lo_sum
2879 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2880 using LO_SUM and HIGH. */
2881 if (GET_CODE (XEXP (x, 0)) == CONST
2882 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2884 SUBST (XEXP (x, 0),
2885 gen_rtx_LO_SUM (Pmode,
2886 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2887 XEXP (x, 0)));
2888 return &XEXP (XEXP (x, 0), 0);
2890 #endif
2892 /* If we have a PLUS whose second operand is a constant and the
2893 address is not valid, perhaps will can split it up using
2894 the machine-specific way to split large constants. We use
2895 the first pseudo-reg (one of the virtual regs) as a placeholder;
2896 it will not remain in the result. */
2897 if (GET_CODE (XEXP (x, 0)) == PLUS
2898 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2899 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2901 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2902 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2903 subst_insn);
2905 /* This should have produced two insns, each of which sets our
2906 placeholder. If the source of the second is a valid address,
2907 we can make put both sources together and make a split point
2908 in the middle. */
2910 if (seq
2911 && NEXT_INSN (seq) != NULL_RTX
2912 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
2913 && GET_CODE (seq) == INSN
2914 && GET_CODE (PATTERN (seq)) == SET
2915 && SET_DEST (PATTERN (seq)) == reg
2916 && ! reg_mentioned_p (reg,
2917 SET_SRC (PATTERN (seq)))
2918 && GET_CODE (NEXT_INSN (seq)) == INSN
2919 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
2920 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
2921 && memory_address_p (GET_MODE (x),
2922 SET_SRC (PATTERN (NEXT_INSN (seq)))))
2924 rtx src1 = SET_SRC (PATTERN (seq));
2925 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
2927 /* Replace the placeholder in SRC2 with SRC1. If we can
2928 find where in SRC2 it was placed, that can become our
2929 split point and we can replace this address with SRC2.
2930 Just try two obvious places. */
2932 src2 = replace_rtx (src2, reg, src1);
2933 split = 0;
2934 if (XEXP (src2, 0) == src1)
2935 split = &XEXP (src2, 0);
2936 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2937 && XEXP (XEXP (src2, 0), 0) == src1)
2938 split = &XEXP (XEXP (src2, 0), 0);
2940 if (split)
2942 SUBST (XEXP (x, 0), src2);
2943 return split;
2947 /* If that didn't work, perhaps the first operand is complex and
2948 needs to be computed separately, so make a split point there.
2949 This will occur on machines that just support REG + CONST
2950 and have a constant moved through some previous computation. */
2952 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2953 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2954 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2955 == 'o')))
2956 return &XEXP (XEXP (x, 0), 0);
2958 break;
2960 case SET:
2961 #ifdef HAVE_cc0
2962 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2963 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2964 we need to put the operand into a register. So split at that
2965 point. */
2967 if (SET_DEST (x) == cc0_rtx
2968 && GET_CODE (SET_SRC (x)) != COMPARE
2969 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2970 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2971 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2972 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2973 return &SET_SRC (x);
2974 #endif
2976 /* See if we can split SET_SRC as it stands. */
2977 split = find_split_point (&SET_SRC (x), insn);
2978 if (split && split != &SET_SRC (x))
2979 return split;
2981 /* See if we can split SET_DEST as it stands. */
2982 split = find_split_point (&SET_DEST (x), insn);
2983 if (split && split != &SET_DEST (x))
2984 return split;
2986 /* See if this is a bitfield assignment with everything constant. If
2987 so, this is an IOR of an AND, so split it into that. */
2988 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2989 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2990 <= HOST_BITS_PER_WIDE_INT)
2991 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2992 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2993 && GET_CODE (SET_SRC (x)) == CONST_INT
2994 && ((INTVAL (XEXP (SET_DEST (x), 1))
2995 + INTVAL (XEXP (SET_DEST (x), 2)))
2996 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2997 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2999 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3000 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3001 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3002 rtx dest = XEXP (SET_DEST (x), 0);
3003 enum machine_mode mode = GET_MODE (dest);
3004 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3006 if (BITS_BIG_ENDIAN)
3007 pos = GET_MODE_BITSIZE (mode) - len - pos;
3009 if (src == mask)
3010 SUBST (SET_SRC (x),
3011 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3012 else
3013 SUBST (SET_SRC (x),
3014 gen_binary (IOR, mode,
3015 gen_binary (AND, mode, dest,
3016 gen_int_mode (~(mask << pos),
3017 mode)),
3018 GEN_INT (src << pos)));
3020 SUBST (SET_DEST (x), dest);
3022 split = find_split_point (&SET_SRC (x), insn);
3023 if (split && split != &SET_SRC (x))
3024 return split;
3027 /* Otherwise, see if this is an operation that we can split into two.
3028 If so, try to split that. */
3029 code = GET_CODE (SET_SRC (x));
3031 switch (code)
3033 case AND:
3034 /* If we are AND'ing with a large constant that is only a single
3035 bit and the result is only being used in a context where we
3036 need to know if it is zero or nonzero, replace it with a bit
3037 extraction. This will avoid the large constant, which might
3038 have taken more than one insn to make. If the constant were
3039 not a valid argument to the AND but took only one insn to make,
3040 this is no worse, but if it took more than one insn, it will
3041 be better. */
3043 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3044 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3045 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3046 && GET_CODE (SET_DEST (x)) == REG
3047 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3048 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3049 && XEXP (*split, 0) == SET_DEST (x)
3050 && XEXP (*split, 1) == const0_rtx)
3052 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3053 XEXP (SET_SRC (x), 0),
3054 pos, NULL_RTX, 1, 1, 0, 0);
3055 if (extraction != 0)
3057 SUBST (SET_SRC (x), extraction);
3058 return find_split_point (loc, insn);
3061 break;
3063 case NE:
3064 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3065 is known to be on, this can be converted into a NEG of a shift. */
3066 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3067 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3068 && 1 <= (pos = exact_log2
3069 (nonzero_bits (XEXP (SET_SRC (x), 0),
3070 GET_MODE (XEXP (SET_SRC (x), 0))))))
3072 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3074 SUBST (SET_SRC (x),
3075 gen_rtx_NEG (mode,
3076 gen_rtx_LSHIFTRT (mode,
3077 XEXP (SET_SRC (x), 0),
3078 GEN_INT (pos))));
3080 split = find_split_point (&SET_SRC (x), insn);
3081 if (split && split != &SET_SRC (x))
3082 return split;
3084 break;
3086 case SIGN_EXTEND:
3087 inner = XEXP (SET_SRC (x), 0);
3089 /* We can't optimize if either mode is a partial integer
3090 mode as we don't know how many bits are significant
3091 in those modes. */
3092 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3093 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3094 break;
3096 pos = 0;
3097 len = GET_MODE_BITSIZE (GET_MODE (inner));
3098 unsignedp = 0;
3099 break;
3101 case SIGN_EXTRACT:
3102 case ZERO_EXTRACT:
3103 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3104 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3106 inner = XEXP (SET_SRC (x), 0);
3107 len = INTVAL (XEXP (SET_SRC (x), 1));
3108 pos = INTVAL (XEXP (SET_SRC (x), 2));
3110 if (BITS_BIG_ENDIAN)
3111 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3112 unsignedp = (code == ZERO_EXTRACT);
3114 break;
3116 default:
3117 break;
3120 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3122 enum machine_mode mode = GET_MODE (SET_SRC (x));
3124 /* For unsigned, we have a choice of a shift followed by an
3125 AND or two shifts. Use two shifts for field sizes where the
3126 constant might be too large. We assume here that we can
3127 always at least get 8-bit constants in an AND insn, which is
3128 true for every current RISC. */
3130 if (unsignedp && len <= 8)
3132 SUBST (SET_SRC (x),
3133 gen_rtx_AND (mode,
3134 gen_rtx_LSHIFTRT
3135 (mode, gen_lowpart (mode, inner),
3136 GEN_INT (pos)),
3137 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3139 split = find_split_point (&SET_SRC (x), insn);
3140 if (split && split != &SET_SRC (x))
3141 return split;
3143 else
3145 SUBST (SET_SRC (x),
3146 gen_rtx_fmt_ee
3147 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3148 gen_rtx_ASHIFT (mode,
3149 gen_lowpart (mode, inner),
3150 GEN_INT (GET_MODE_BITSIZE (mode)
3151 - len - pos)),
3152 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3154 split = find_split_point (&SET_SRC (x), insn);
3155 if (split && split != &SET_SRC (x))
3156 return split;
3160 /* See if this is a simple operation with a constant as the second
3161 operand. It might be that this constant is out of range and hence
3162 could be used as a split point. */
3163 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3164 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3165 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3166 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3167 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3168 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3169 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3170 == 'o'))))
3171 return &XEXP (SET_SRC (x), 1);
3173 /* Finally, see if this is a simple operation with its first operand
3174 not in a register. The operation might require this operand in a
3175 register, so return it as a split point. We can always do this
3176 because if the first operand were another operation, we would have
3177 already found it as a split point. */
3178 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3179 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3180 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3181 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3182 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3183 return &XEXP (SET_SRC (x), 0);
3185 return 0;
3187 case AND:
3188 case IOR:
3189 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3190 it is better to write this as (not (ior A B)) so we can split it.
3191 Similarly for IOR. */
3192 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3194 SUBST (*loc,
3195 gen_rtx_NOT (GET_MODE (x),
3196 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3197 GET_MODE (x),
3198 XEXP (XEXP (x, 0), 0),
3199 XEXP (XEXP (x, 1), 0))));
3200 return find_split_point (loc, insn);
3203 /* Many RISC machines have a large set of logical insns. If the
3204 second operand is a NOT, put it first so we will try to split the
3205 other operand first. */
3206 if (GET_CODE (XEXP (x, 1)) == NOT)
3208 rtx tem = XEXP (x, 0);
3209 SUBST (XEXP (x, 0), XEXP (x, 1));
3210 SUBST (XEXP (x, 1), tem);
3212 break;
3214 default:
3215 break;
3218 /* Otherwise, select our actions depending on our rtx class. */
3219 switch (GET_RTX_CLASS (code))
3221 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3222 case '3':
3223 split = find_split_point (&XEXP (x, 2), insn);
3224 if (split)
3225 return split;
3226 /* ... fall through ... */
3227 case '2':
3228 case 'c':
3229 case '<':
3230 split = find_split_point (&XEXP (x, 1), insn);
3231 if (split)
3232 return split;
3233 /* ... fall through ... */
3234 case '1':
3235 /* Some machines have (and (shift ...) ...) insns. If X is not
3236 an AND, but XEXP (X, 0) is, use it as our split point. */
3237 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3238 return &XEXP (x, 0);
3240 split = find_split_point (&XEXP (x, 0), insn);
3241 if (split)
3242 return split;
3243 return loc;
3246 /* Otherwise, we don't have a split point. */
3247 return 0;
3250 /* Throughout X, replace FROM with TO, and return the result.
3251 The result is TO if X is FROM;
3252 otherwise the result is X, but its contents may have been modified.
3253 If they were modified, a record was made in undobuf so that
3254 undo_all will (among other things) return X to its original state.
3256 If the number of changes necessary is too much to record to undo,
3257 the excess changes are not made, so the result is invalid.
3258 The changes already made can still be undone.
3259 undobuf.num_undo is incremented for such changes, so by testing that
3260 the caller can tell whether the result is valid.
3262 `n_occurrences' is incremented each time FROM is replaced.
3264 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3266 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
3267 by copying if `n_occurrences' is nonzero. */
3269 static rtx
3270 subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3272 enum rtx_code code = GET_CODE (x);
3273 enum machine_mode op0_mode = VOIDmode;
3274 const char *fmt;
3275 int len, i;
3276 rtx new;
3278 /* Two expressions are equal if they are identical copies of a shared
3279 RTX or if they are both registers with the same register number
3280 and mode. */
3282 #define COMBINE_RTX_EQUAL_P(X,Y) \
3283 ((X) == (Y) \
3284 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3285 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3287 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3289 n_occurrences++;
3290 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3293 /* If X and FROM are the same register but different modes, they will
3294 not have been seen as equal above. However, flow.c will make a
3295 LOG_LINKS entry for that case. If we do nothing, we will try to
3296 rerecognize our original insn and, when it succeeds, we will
3297 delete the feeding insn, which is incorrect.
3299 So force this insn not to match in this (rare) case. */
3300 if (! in_dest && code == REG && GET_CODE (from) == REG
3301 && REGNO (x) == REGNO (from))
3302 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3304 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3305 of which may contain things that can be combined. */
3306 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3307 return x;
3309 /* It is possible to have a subexpression appear twice in the insn.
3310 Suppose that FROM is a register that appears within TO.
3311 Then, after that subexpression has been scanned once by `subst',
3312 the second time it is scanned, TO may be found. If we were
3313 to scan TO here, we would find FROM within it and create a
3314 self-referent rtl structure which is completely wrong. */
3315 if (COMBINE_RTX_EQUAL_P (x, to))
3316 return to;
3318 /* Parallel asm_operands need special attention because all of the
3319 inputs are shared across the arms. Furthermore, unsharing the
3320 rtl results in recognition failures. Failure to handle this case
3321 specially can result in circular rtl.
3323 Solve this by doing a normal pass across the first entry of the
3324 parallel, and only processing the SET_DESTs of the subsequent
3325 entries. Ug. */
3327 if (code == PARALLEL
3328 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3329 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3331 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3333 /* If this substitution failed, this whole thing fails. */
3334 if (GET_CODE (new) == CLOBBER
3335 && XEXP (new, 0) == const0_rtx)
3336 return new;
3338 SUBST (XVECEXP (x, 0, 0), new);
3340 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3342 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3344 if (GET_CODE (dest) != REG
3345 && GET_CODE (dest) != CC0
3346 && GET_CODE (dest) != PC)
3348 new = subst (dest, from, to, 0, unique_copy);
3350 /* If this substitution failed, this whole thing fails. */
3351 if (GET_CODE (new) == CLOBBER
3352 && XEXP (new, 0) == const0_rtx)
3353 return new;
3355 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3359 else
3361 len = GET_RTX_LENGTH (code);
3362 fmt = GET_RTX_FORMAT (code);
3364 /* We don't need to process a SET_DEST that is a register, CC0,
3365 or PC, so set up to skip this common case. All other cases
3366 where we want to suppress replacing something inside a
3367 SET_SRC are handled via the IN_DEST operand. */
3368 if (code == SET
3369 && (GET_CODE (SET_DEST (x)) == REG
3370 || GET_CODE (SET_DEST (x)) == CC0
3371 || GET_CODE (SET_DEST (x)) == PC))
3372 fmt = "ie";
3374 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3375 constant. */
3376 if (fmt[0] == 'e')
3377 op0_mode = GET_MODE (XEXP (x, 0));
3379 for (i = 0; i < len; i++)
3381 if (fmt[i] == 'E')
3383 int j;
3384 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3386 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3388 new = (unique_copy && n_occurrences
3389 ? copy_rtx (to) : to);
3390 n_occurrences++;
3392 else
3394 new = subst (XVECEXP (x, i, j), from, to, 0,
3395 unique_copy);
3397 /* If this substitution failed, this whole thing
3398 fails. */
3399 if (GET_CODE (new) == CLOBBER
3400 && XEXP (new, 0) == const0_rtx)
3401 return new;
3404 SUBST (XVECEXP (x, i, j), new);
3407 else if (fmt[i] == 'e')
3409 /* If this is a register being set, ignore it. */
3410 new = XEXP (x, i);
3411 if (in_dest
3412 && (code == SUBREG || code == STRICT_LOW_PART
3413 || code == ZERO_EXTRACT)
3414 && i == 0
3415 && GET_CODE (new) == REG)
3418 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3420 /* In general, don't install a subreg involving two
3421 modes not tieable. It can worsen register
3422 allocation, and can even make invalid reload
3423 insns, since the reg inside may need to be copied
3424 from in the outside mode, and that may be invalid
3425 if it is an fp reg copied in integer mode.
3427 We allow two exceptions to this: It is valid if
3428 it is inside another SUBREG and the mode of that
3429 SUBREG and the mode of the inside of TO is
3430 tieable and it is valid if X is a SET that copies
3431 FROM to CC0. */
3433 if (GET_CODE (to) == SUBREG
3434 && ! MODES_TIEABLE_P (GET_MODE (to),
3435 GET_MODE (SUBREG_REG (to)))
3436 && ! (code == SUBREG
3437 && MODES_TIEABLE_P (GET_MODE (x),
3438 GET_MODE (SUBREG_REG (to))))
3439 #ifdef HAVE_cc0
3440 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3441 #endif
3443 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3445 #ifdef CANNOT_CHANGE_MODE_CLASS
3446 if (code == SUBREG
3447 && GET_CODE (to) == REG
3448 && REGNO (to) < FIRST_PSEUDO_REGISTER
3449 && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3450 GET_MODE (to),
3451 GET_MODE (x)))
3452 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3453 #endif
3455 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3456 n_occurrences++;
3458 else
3459 /* If we are in a SET_DEST, suppress most cases unless we
3460 have gone inside a MEM, in which case we want to
3461 simplify the address. We assume here that things that
3462 are actually part of the destination have their inner
3463 parts in the first expression. This is true for SUBREG,
3464 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3465 things aside from REG and MEM that should appear in a
3466 SET_DEST. */
3467 new = subst (XEXP (x, i), from, to,
3468 (((in_dest
3469 && (code == SUBREG || code == STRICT_LOW_PART
3470 || code == ZERO_EXTRACT))
3471 || code == SET)
3472 && i == 0), unique_copy);
3474 /* If we found that we will have to reject this combination,
3475 indicate that by returning the CLOBBER ourselves, rather than
3476 an expression containing it. This will speed things up as
3477 well as prevent accidents where two CLOBBERs are considered
3478 to be equal, thus producing an incorrect simplification. */
3480 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3481 return new;
3483 if (GET_CODE (x) == SUBREG
3484 && (GET_CODE (new) == CONST_INT
3485 || GET_CODE (new) == CONST_DOUBLE))
3487 enum machine_mode mode = GET_MODE (x);
3489 x = simplify_subreg (GET_MODE (x), new,
3490 GET_MODE (SUBREG_REG (x)),
3491 SUBREG_BYTE (x));
3492 if (! x)
3493 x = gen_rtx_CLOBBER (mode, const0_rtx);
3495 else if (GET_CODE (new) == CONST_INT
3496 && GET_CODE (x) == ZERO_EXTEND)
3498 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3499 new, GET_MODE (XEXP (x, 0)));
3500 if (! x)
3501 abort ();
3503 else
3504 SUBST (XEXP (x, i), new);
3509 /* Try to simplify X. If the simplification changed the code, it is likely
3510 that further simplification will help, so loop, but limit the number
3511 of repetitions that will be performed. */
3513 for (i = 0; i < 4; i++)
3515 /* If X is sufficiently simple, don't bother trying to do anything
3516 with it. */
3517 if (code != CONST_INT && code != REG && code != CLOBBER)
3518 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3520 if (GET_CODE (x) == code)
3521 break;
3523 code = GET_CODE (x);
3525 /* We no longer know the original mode of operand 0 since we
3526 have changed the form of X) */
3527 op0_mode = VOIDmode;
3530 return x;
3533 /* Simplify X, a piece of RTL. We just operate on the expression at the
3534 outer level; call `subst' to simplify recursively. Return the new
3535 expression.
3537 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3538 will be the iteration even if an expression with a code different from
3539 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
3541 static rtx
3542 combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int last,
3543 int in_dest)
3545 enum rtx_code code = GET_CODE (x);
3546 enum machine_mode mode = GET_MODE (x);
3547 rtx temp;
3548 rtx reversed;
3549 int i;
3551 /* If this is a commutative operation, put a constant last and a complex
3552 expression first. We don't need to do this for comparisons here. */
3553 if (GET_RTX_CLASS (code) == 'c'
3554 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3556 temp = XEXP (x, 0);
3557 SUBST (XEXP (x, 0), XEXP (x, 1));
3558 SUBST (XEXP (x, 1), temp);
3561 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3562 sign extension of a PLUS with a constant, reverse the order of the sign
3563 extension and the addition. Note that this not the same as the original
3564 code, but overflow is undefined for signed values. Also note that the
3565 PLUS will have been partially moved "inside" the sign-extension, so that
3566 the first operand of X will really look like:
3567 (ashiftrt (plus (ashift A C4) C5) C4).
3568 We convert this to
3569 (plus (ashiftrt (ashift A C4) C2) C4)
3570 and replace the first operand of X with that expression. Later parts
3571 of this function may simplify the expression further.
3573 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3574 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3575 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3577 We do this to simplify address expressions. */
3579 if ((code == PLUS || code == MINUS || code == MULT)
3580 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3581 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3582 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3583 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3584 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3585 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3586 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3587 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3588 XEXP (XEXP (XEXP (x, 0), 0), 1),
3589 XEXP (XEXP (x, 0), 1))) != 0)
3591 rtx new
3592 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3593 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3594 INTVAL (XEXP (XEXP (x, 0), 1)));
3596 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3597 INTVAL (XEXP (XEXP (x, 0), 1)));
3599 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3602 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3603 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3604 things. Check for cases where both arms are testing the same
3605 condition.
3607 Don't do anything if all operands are very simple. */
3609 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3610 || GET_RTX_CLASS (code) == '<')
3611 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3612 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3613 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3614 == 'o')))
3615 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3616 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3617 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3618 == 'o')))))
3619 || (GET_RTX_CLASS (code) == '1'
3620 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3621 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3622 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3623 == 'o'))))))
3625 rtx cond, true_rtx, false_rtx;
3627 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3628 if (cond != 0
3629 /* If everything is a comparison, what we have is highly unlikely
3630 to be simpler, so don't use it. */
3631 && ! (GET_RTX_CLASS (code) == '<'
3632 && (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3633 || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3635 rtx cop1 = const0_rtx;
3636 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3638 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3639 return x;
3641 /* Simplify the alternative arms; this may collapse the true and
3642 false arms to store-flag values. Be careful to use copy_rtx
3643 here since true_rtx or false_rtx might share RTL with x as a
3644 result of the if_then_else_cond call above. */
3645 true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
3646 false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
3648 /* If true_rtx and false_rtx are not general_operands, an if_then_else
3649 is unlikely to be simpler. */
3650 if (general_operand (true_rtx, VOIDmode)
3651 && general_operand (false_rtx, VOIDmode))
3653 enum rtx_code reversed;
3655 /* Restarting if we generate a store-flag expression will cause
3656 us to loop. Just drop through in this case. */
3658 /* If the result values are STORE_FLAG_VALUE and zero, we can
3659 just make the comparison operation. */
3660 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3661 x = gen_binary (cond_code, mode, cond, cop1);
3662 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3663 && ((reversed = reversed_comparison_code_parts
3664 (cond_code, cond, cop1, NULL))
3665 != UNKNOWN))
3666 x = gen_binary (reversed, mode, cond, cop1);
3668 /* Likewise, we can make the negate of a comparison operation
3669 if the result values are - STORE_FLAG_VALUE and zero. */
3670 else if (GET_CODE (true_rtx) == CONST_INT
3671 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3672 && false_rtx == const0_rtx)
3673 x = simplify_gen_unary (NEG, mode,
3674 gen_binary (cond_code, mode, cond,
3675 cop1),
3676 mode);
3677 else if (GET_CODE (false_rtx) == CONST_INT
3678 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3679 && true_rtx == const0_rtx
3680 && ((reversed = reversed_comparison_code_parts
3681 (cond_code, cond, cop1, NULL))
3682 != UNKNOWN))
3683 x = simplify_gen_unary (NEG, mode,
3684 gen_binary (reversed, mode,
3685 cond, cop1),
3686 mode);
3687 else
3688 return gen_rtx_IF_THEN_ELSE (mode,
3689 gen_binary (cond_code, VOIDmode,
3690 cond, cop1),
3691 true_rtx, false_rtx);
3693 code = GET_CODE (x);
3694 op0_mode = VOIDmode;
3699 /* Try to fold this expression in case we have constants that weren't
3700 present before. */
3701 temp = 0;
3702 switch (GET_RTX_CLASS (code))
3704 case '1':
3705 if (op0_mode == VOIDmode)
3706 op0_mode = GET_MODE (XEXP (x, 0));
3707 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3708 break;
3709 case '<':
3711 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3712 if (cmp_mode == VOIDmode)
3714 cmp_mode = GET_MODE (XEXP (x, 1));
3715 if (cmp_mode == VOIDmode)
3716 cmp_mode = op0_mode;
3718 temp = simplify_relational_operation (code, cmp_mode,
3719 XEXP (x, 0), XEXP (x, 1));
3721 #ifdef FLOAT_STORE_FLAG_VALUE
3722 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3724 if (temp == const0_rtx)
3725 temp = CONST0_RTX (mode);
3726 else
3727 temp = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE (mode),
3728 mode);
3730 #endif
3731 break;
3732 case 'c':
3733 case '2':
3734 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3735 break;
3736 case 'b':
3737 case '3':
3738 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3739 XEXP (x, 1), XEXP (x, 2));
3740 break;
3743 if (temp)
3745 x = temp;
3746 code = GET_CODE (temp);
3747 op0_mode = VOIDmode;
3748 mode = GET_MODE (temp);
3751 /* First see if we can apply the inverse distributive law. */
3752 if (code == PLUS || code == MINUS
3753 || code == AND || code == IOR || code == XOR)
3755 x = apply_distributive_law (x);
3756 code = GET_CODE (x);
3757 op0_mode = VOIDmode;
3760 /* If CODE is an associative operation not otherwise handled, see if we
3761 can associate some operands. This can win if they are constants or
3762 if they are logically related (i.e. (a & b) & a). */
3763 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3764 || code == AND || code == IOR || code == XOR
3765 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3766 && ((INTEGRAL_MODE_P (mode) && code != DIV)
3767 || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3769 if (GET_CODE (XEXP (x, 0)) == code)
3771 rtx other = XEXP (XEXP (x, 0), 0);
3772 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3773 rtx inner_op1 = XEXP (x, 1);
3774 rtx inner;
3776 /* Make sure we pass the constant operand if any as the second
3777 one if this is a commutative operation. */
3778 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3780 rtx tem = inner_op0;
3781 inner_op0 = inner_op1;
3782 inner_op1 = tem;
3784 inner = simplify_binary_operation (code == MINUS ? PLUS
3785 : code == DIV ? MULT
3786 : code,
3787 mode, inner_op0, inner_op1);
3789 /* For commutative operations, try the other pair if that one
3790 didn't simplify. */
3791 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3793 other = XEXP (XEXP (x, 0), 1);
3794 inner = simplify_binary_operation (code, mode,
3795 XEXP (XEXP (x, 0), 0),
3796 XEXP (x, 1));
3799 if (inner)
3800 return gen_binary (code, mode, other, inner);
3804 /* A little bit of algebraic simplification here. */
3805 switch (code)
3807 case MEM:
3808 /* Ensure that our address has any ASHIFTs converted to MULT in case
3809 address-recognizing predicates are called later. */
3810 temp = make_compound_operation (XEXP (x, 0), MEM);
3811 SUBST (XEXP (x, 0), temp);
3812 break;
3814 case SUBREG:
3815 if (op0_mode == VOIDmode)
3816 op0_mode = GET_MODE (SUBREG_REG (x));
3818 /* See if this can be moved to simplify_subreg. */
3819 if (CONSTANT_P (SUBREG_REG (x))
3820 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
3821 /* Don't call gen_lowpart if the inner mode
3822 is VOIDmode and we cannot simplify it, as SUBREG without
3823 inner mode is invalid. */
3824 && (GET_MODE (SUBREG_REG (x)) != VOIDmode
3825 || gen_lowpart_common (mode, SUBREG_REG (x))))
3826 return gen_lowpart (mode, SUBREG_REG (x));
3828 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3829 break;
3831 rtx temp;
3832 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3833 SUBREG_BYTE (x));
3834 if (temp)
3835 return temp;
3838 /* Don't change the mode of the MEM if that would change the meaning
3839 of the address. */
3840 if (GET_CODE (SUBREG_REG (x)) == MEM
3841 && (MEM_VOLATILE_P (SUBREG_REG (x))
3842 || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3843 return gen_rtx_CLOBBER (mode, const0_rtx);
3845 /* Note that we cannot do any narrowing for non-constants since
3846 we might have been counting on using the fact that some bits were
3847 zero. We now do this in the SET. */
3849 break;
3851 case NOT:
3852 if (GET_CODE (XEXP (x, 0)) == SUBREG
3853 && subreg_lowpart_p (XEXP (x, 0))
3854 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3855 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3856 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3857 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3859 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3861 x = gen_rtx_ROTATE (inner_mode,
3862 simplify_gen_unary (NOT, inner_mode, const1_rtx,
3863 inner_mode),
3864 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3865 return gen_lowpart (mode, x);
3868 /* Apply De Morgan's laws to reduce number of patterns for machines
3869 with negating logical insns (and-not, nand, etc.). If result has
3870 only one NOT, put it first, since that is how the patterns are
3871 coded. */
3873 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3875 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3876 enum machine_mode op_mode;
3878 op_mode = GET_MODE (in1);
3879 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3881 op_mode = GET_MODE (in2);
3882 if (op_mode == VOIDmode)
3883 op_mode = mode;
3884 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3886 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3888 rtx tem = in2;
3889 in2 = in1; in1 = tem;
3892 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3893 mode, in1, in2);
3895 break;
3897 case NEG:
3898 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3899 if (GET_CODE (XEXP (x, 0)) == XOR
3900 && XEXP (XEXP (x, 0), 1) == const1_rtx
3901 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3902 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3904 temp = expand_compound_operation (XEXP (x, 0));
3906 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3907 replaced by (lshiftrt X C). This will convert
3908 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3910 if (GET_CODE (temp) == ASHIFTRT
3911 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3912 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3913 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3914 INTVAL (XEXP (temp, 1)));
3916 /* If X has only a single bit that might be nonzero, say, bit I, convert
3917 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3918 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3919 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3920 or a SUBREG of one since we'd be making the expression more
3921 complex if it was just a register. */
3923 if (GET_CODE (temp) != REG
3924 && ! (GET_CODE (temp) == SUBREG
3925 && GET_CODE (SUBREG_REG (temp)) == REG)
3926 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3928 rtx temp1 = simplify_shift_const
3929 (NULL_RTX, ASHIFTRT, mode,
3930 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3931 GET_MODE_BITSIZE (mode) - 1 - i),
3932 GET_MODE_BITSIZE (mode) - 1 - i);
3934 /* If all we did was surround TEMP with the two shifts, we
3935 haven't improved anything, so don't use it. Otherwise,
3936 we are better off with TEMP1. */
3937 if (GET_CODE (temp1) != ASHIFTRT
3938 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3939 || XEXP (XEXP (temp1, 0), 0) != temp)
3940 return temp1;
3942 break;
3944 case TRUNCATE:
3945 /* We can't handle truncation to a partial integer mode here
3946 because we don't know the real bitsize of the partial
3947 integer mode. */
3948 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3949 break;
3951 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3952 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3953 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3954 SUBST (XEXP (x, 0),
3955 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3956 GET_MODE_MASK (mode), NULL_RTX, 0));
3958 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3959 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3960 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3961 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3962 return XEXP (XEXP (x, 0), 0);
3964 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3965 (OP:SI foo:SI) if OP is NEG or ABS. */
3966 if ((GET_CODE (XEXP (x, 0)) == ABS
3967 || GET_CODE (XEXP (x, 0)) == NEG)
3968 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3969 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3970 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3971 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
3972 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
3974 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3975 (truncate:SI x). */
3976 if (GET_CODE (XEXP (x, 0)) == SUBREG
3977 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3978 && subreg_lowpart_p (XEXP (x, 0)))
3979 return SUBREG_REG (XEXP (x, 0));
3981 /* If we know that the value is already truncated, we can
3982 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
3983 is nonzero for the corresponding modes. But don't do this
3984 for an (LSHIFTRT (MULT ...)) since this will cause problems
3985 with the umulXi3_highpart patterns. */
3986 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3987 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3988 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3989 >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
3990 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
3991 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
3992 return gen_lowpart (mode, XEXP (x, 0));
3994 /* A truncate of a comparison can be replaced with a subreg if
3995 STORE_FLAG_VALUE permits. This is like the previous test,
3996 but it works even if the comparison is done in a mode larger
3997 than HOST_BITS_PER_WIDE_INT. */
3998 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3999 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4000 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4001 return gen_lowpart (mode, XEXP (x, 0));
4003 /* Similarly, a truncate of a register whose value is a
4004 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4005 permits. */
4006 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4007 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4008 && (temp = get_last_value (XEXP (x, 0)))
4009 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4010 return gen_lowpart (mode, XEXP (x, 0));
4012 break;
4014 case FLOAT_TRUNCATE:
4015 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4016 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4017 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4018 return XEXP (XEXP (x, 0), 0);
4020 /* (float_truncate:SF (float_truncate:DF foo:XF))
4021 = (float_truncate:SF foo:XF).
4022 This may eliminate double rounding, so it is unsafe.
4024 (float_truncate:SF (float_extend:XF foo:DF))
4025 = (float_truncate:SF foo:DF).
4027 (float_truncate:DF (float_extend:XF foo:SF))
4028 = (float_extend:SF foo:DF). */
4029 if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
4030 && flag_unsafe_math_optimizations)
4031 || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
4032 return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
4033 0)))
4034 > GET_MODE_SIZE (mode)
4035 ? FLOAT_TRUNCATE : FLOAT_EXTEND,
4036 mode,
4037 XEXP (XEXP (x, 0), 0), mode);
4039 /* (float_truncate (float x)) is (float x) */
4040 if (GET_CODE (XEXP (x, 0)) == FLOAT
4041 && (flag_unsafe_math_optimizations
4042 || ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4043 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4044 - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4045 GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4046 return simplify_gen_unary (FLOAT, mode,
4047 XEXP (XEXP (x, 0), 0),
4048 GET_MODE (XEXP (XEXP (x, 0), 0)));
4050 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4051 (OP:SF foo:SF) if OP is NEG or ABS. */
4052 if ((GET_CODE (XEXP (x, 0)) == ABS
4053 || GET_CODE (XEXP (x, 0)) == NEG)
4054 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4055 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4056 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4057 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4059 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4060 is (float_truncate:SF x). */
4061 if (GET_CODE (XEXP (x, 0)) == SUBREG
4062 && subreg_lowpart_p (XEXP (x, 0))
4063 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4064 return SUBREG_REG (XEXP (x, 0));
4065 break;
4066 case FLOAT_EXTEND:
4067 /* (float_extend (float_extend x)) is (float_extend x)
4069 (float_extend (float x)) is (float x) assuming that double
4070 rounding can't happen.
4072 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4073 || (GET_CODE (XEXP (x, 0)) == FLOAT
4074 && ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4075 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4076 - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4077 GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4078 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4079 XEXP (XEXP (x, 0), 0),
4080 GET_MODE (XEXP (XEXP (x, 0), 0)));
4082 break;
4083 #ifdef HAVE_cc0
4084 case COMPARE:
4085 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4086 using cc0, in which case we want to leave it as a COMPARE
4087 so we can distinguish it from a register-register-copy. */
4088 if (XEXP (x, 1) == const0_rtx)
4089 return XEXP (x, 0);
4091 /* x - 0 is the same as x unless x's mode has signed zeros and
4092 allows rounding towards -infinity. Under those conditions,
4093 0 - 0 is -0. */
4094 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4095 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4096 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4097 return XEXP (x, 0);
4098 break;
4099 #endif
4101 case CONST:
4102 /* (const (const X)) can become (const X). Do it this way rather than
4103 returning the inner CONST since CONST can be shared with a
4104 REG_EQUAL note. */
4105 if (GET_CODE (XEXP (x, 0)) == CONST)
4106 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4107 break;
4109 #ifdef HAVE_lo_sum
4110 case LO_SUM:
4111 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4112 can add in an offset. find_split_point will split this address up
4113 again if it doesn't match. */
4114 if (GET_CODE (XEXP (x, 0)) == HIGH
4115 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4116 return XEXP (x, 1);
4117 break;
4118 #endif
4120 case PLUS:
4121 /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4123 if (GET_CODE (XEXP (x, 0)) == MULT
4124 && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4126 rtx in1, in2;
4128 in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4129 in2 = XEXP (XEXP (x, 0), 1);
4130 return gen_binary (MINUS, mode, XEXP (x, 1),
4131 gen_binary (MULT, mode, in1, in2));
4134 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4135 outermost. That's because that's the way indexed addresses are
4136 supposed to appear. This code used to check many more cases, but
4137 they are now checked elsewhere. */
4138 if (GET_CODE (XEXP (x, 0)) == PLUS
4139 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4140 return gen_binary (PLUS, mode,
4141 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4142 XEXP (x, 1)),
4143 XEXP (XEXP (x, 0), 1));
4145 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4146 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4147 bit-field and can be replaced by either a sign_extend or a
4148 sign_extract. The `and' may be a zero_extend and the two
4149 <c>, -<c> constants may be reversed. */
4150 if (GET_CODE (XEXP (x, 0)) == XOR
4151 && GET_CODE (XEXP (x, 1)) == CONST_INT
4152 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4153 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4154 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4155 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4156 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4157 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4158 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4159 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4160 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4161 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4162 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4163 == (unsigned int) i + 1))))
4164 return simplify_shift_const
4165 (NULL_RTX, ASHIFTRT, mode,
4166 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4167 XEXP (XEXP (XEXP (x, 0), 0), 0),
4168 GET_MODE_BITSIZE (mode) - (i + 1)),
4169 GET_MODE_BITSIZE (mode) - (i + 1));
4171 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4172 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4173 is 1. This produces better code than the alternative immediately
4174 below. */
4175 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4176 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4177 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4178 && (reversed = reversed_comparison (XEXP (x, 0), mode,
4179 XEXP (XEXP (x, 0), 0),
4180 XEXP (XEXP (x, 0), 1))))
4181 return
4182 simplify_gen_unary (NEG, mode, reversed, mode);
4184 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4185 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4186 the bitsize of the mode - 1. This allows simplification of
4187 "a = (b & 8) == 0;" */
4188 if (XEXP (x, 1) == constm1_rtx
4189 && GET_CODE (XEXP (x, 0)) != REG
4190 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4191 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4192 && nonzero_bits (XEXP (x, 0), mode) == 1)
4193 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4194 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4195 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4196 GET_MODE_BITSIZE (mode) - 1),
4197 GET_MODE_BITSIZE (mode) - 1);
4199 /* If we are adding two things that have no bits in common, convert
4200 the addition into an IOR. This will often be further simplified,
4201 for example in cases like ((a & 1) + (a & 2)), which can
4202 become a & 3. */
4204 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4205 && (nonzero_bits (XEXP (x, 0), mode)
4206 & nonzero_bits (XEXP (x, 1), mode)) == 0)
4208 /* Try to simplify the expression further. */
4209 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4210 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4212 /* If we could, great. If not, do not go ahead with the IOR
4213 replacement, since PLUS appears in many special purpose
4214 address arithmetic instructions. */
4215 if (GET_CODE (temp) != CLOBBER && temp != tor)
4216 return temp;
4218 break;
4220 case MINUS:
4221 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4222 by reversing the comparison code if valid. */
4223 if (STORE_FLAG_VALUE == 1
4224 && XEXP (x, 0) == const1_rtx
4225 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4226 && (reversed = reversed_comparison (XEXP (x, 1), mode,
4227 XEXP (XEXP (x, 1), 0),
4228 XEXP (XEXP (x, 1), 1))))
4229 return reversed;
4231 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4232 (and <foo> (const_int pow2-1)) */
4233 if (GET_CODE (XEXP (x, 1)) == AND
4234 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4235 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4236 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4237 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4238 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4240 /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4242 if (GET_CODE (XEXP (x, 1)) == MULT
4243 && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4245 rtx in1, in2;
4247 in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4248 in2 = XEXP (XEXP (x, 1), 1);
4249 return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2),
4250 XEXP (x, 0));
4253 /* Canonicalize (minus (neg A) (mult B C)) to
4254 (minus (mult (neg B) C) A). */
4255 if (GET_CODE (XEXP (x, 1)) == MULT
4256 && GET_CODE (XEXP (x, 0)) == NEG)
4258 rtx in1, in2;
4260 in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4261 in2 = XEXP (XEXP (x, 1), 1);
4262 return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2),
4263 XEXP (XEXP (x, 0), 0));
4266 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4267 integers. */
4268 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4269 return gen_binary (MINUS, mode,
4270 gen_binary (MINUS, mode, XEXP (x, 0),
4271 XEXP (XEXP (x, 1), 0)),
4272 XEXP (XEXP (x, 1), 1));
4273 break;
4275 case MULT:
4276 /* If we have (mult (plus A B) C), apply the distributive law and then
4277 the inverse distributive law to see if things simplify. This
4278 occurs mostly in addresses, often when unrolling loops. */
4280 if (GET_CODE (XEXP (x, 0)) == PLUS)
4282 x = apply_distributive_law
4283 (gen_binary (PLUS, mode,
4284 gen_binary (MULT, mode,
4285 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4286 gen_binary (MULT, mode,
4287 XEXP (XEXP (x, 0), 1),
4288 copy_rtx (XEXP (x, 1)))));
4290 if (GET_CODE (x) != MULT)
4291 return x;
4293 /* Try simplify a*(b/c) as (a*b)/c. */
4294 if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4295 && GET_CODE (XEXP (x, 0)) == DIV)
4297 rtx tem = simplify_binary_operation (MULT, mode,
4298 XEXP (XEXP (x, 0), 0),
4299 XEXP (x, 1));
4300 if (tem)
4301 return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4303 break;
4305 case UDIV:
4306 /* If this is a divide by a power of two, treat it as a shift if
4307 its first operand is a shift. */
4308 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4309 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4310 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4311 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4312 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4313 || GET_CODE (XEXP (x, 0)) == ROTATE
4314 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4315 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4316 break;
4318 case EQ: case NE:
4319 case GT: case GTU: case GE: case GEU:
4320 case LT: case LTU: case LE: case LEU:
4321 case UNEQ: case LTGT:
4322 case UNGT: case UNGE:
4323 case UNLT: case UNLE:
4324 case UNORDERED: case ORDERED:
4325 /* If the first operand is a condition code, we can't do anything
4326 with it. */
4327 if (GET_CODE (XEXP (x, 0)) == COMPARE
4328 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4329 && ! CC0_P (XEXP (x, 0))))
4331 rtx op0 = XEXP (x, 0);
4332 rtx op1 = XEXP (x, 1);
4333 enum rtx_code new_code;
4335 if (GET_CODE (op0) == COMPARE)
4336 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4338 /* Simplify our comparison, if possible. */
4339 new_code = simplify_comparison (code, &op0, &op1);
4341 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4342 if only the low-order bit is possibly nonzero in X (such as when
4343 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4344 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4345 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4346 (plus X 1).
4348 Remove any ZERO_EXTRACT we made when thinking this was a
4349 comparison. It may now be simpler to use, e.g., an AND. If a
4350 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4351 the call to make_compound_operation in the SET case. */
4353 if (STORE_FLAG_VALUE == 1
4354 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4355 && op1 == const0_rtx
4356 && mode == GET_MODE (op0)
4357 && nonzero_bits (op0, mode) == 1)
4358 return gen_lowpart (mode,
4359 expand_compound_operation (op0));
4361 else if (STORE_FLAG_VALUE == 1
4362 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4363 && op1 == const0_rtx
4364 && mode == GET_MODE (op0)
4365 && (num_sign_bit_copies (op0, mode)
4366 == GET_MODE_BITSIZE (mode)))
4368 op0 = expand_compound_operation (op0);
4369 return simplify_gen_unary (NEG, mode,
4370 gen_lowpart (mode, op0),
4371 mode);
4374 else if (STORE_FLAG_VALUE == 1
4375 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4376 && op1 == const0_rtx
4377 && mode == GET_MODE (op0)
4378 && nonzero_bits (op0, mode) == 1)
4380 op0 = expand_compound_operation (op0);
4381 return gen_binary (XOR, mode,
4382 gen_lowpart (mode, op0),
4383 const1_rtx);
4386 else if (STORE_FLAG_VALUE == 1
4387 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4388 && op1 == const0_rtx
4389 && mode == GET_MODE (op0)
4390 && (num_sign_bit_copies (op0, mode)
4391 == GET_MODE_BITSIZE (mode)))
4393 op0 = expand_compound_operation (op0);
4394 return plus_constant (gen_lowpart (mode, op0), 1);
4397 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4398 those above. */
4399 if (STORE_FLAG_VALUE == -1
4400 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4401 && op1 == const0_rtx
4402 && (num_sign_bit_copies (op0, mode)
4403 == GET_MODE_BITSIZE (mode)))
4404 return gen_lowpart (mode,
4405 expand_compound_operation (op0));
4407 else if (STORE_FLAG_VALUE == -1
4408 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4409 && op1 == const0_rtx
4410 && mode == GET_MODE (op0)
4411 && nonzero_bits (op0, mode) == 1)
4413 op0 = expand_compound_operation (op0);
4414 return simplify_gen_unary (NEG, mode,
4415 gen_lowpart (mode, op0),
4416 mode);
4419 else if (STORE_FLAG_VALUE == -1
4420 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4421 && op1 == const0_rtx
4422 && mode == GET_MODE (op0)
4423 && (num_sign_bit_copies (op0, mode)
4424 == GET_MODE_BITSIZE (mode)))
4426 op0 = expand_compound_operation (op0);
4427 return simplify_gen_unary (NOT, mode,
4428 gen_lowpart (mode, op0),
4429 mode);
4432 /* If X is 0/1, (eq X 0) is X-1. */
4433 else if (STORE_FLAG_VALUE == -1
4434 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4435 && op1 == const0_rtx
4436 && mode == GET_MODE (op0)
4437 && nonzero_bits (op0, mode) == 1)
4439 op0 = expand_compound_operation (op0);
4440 return plus_constant (gen_lowpart (mode, op0), -1);
4443 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4444 one bit that might be nonzero, we can convert (ne x 0) to
4445 (ashift x c) where C puts the bit in the sign bit. Remove any
4446 AND with STORE_FLAG_VALUE when we are done, since we are only
4447 going to test the sign bit. */
4448 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4449 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4450 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4451 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4452 && op1 == const0_rtx
4453 && mode == GET_MODE (op0)
4454 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4456 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4457 expand_compound_operation (op0),
4458 GET_MODE_BITSIZE (mode) - 1 - i);
4459 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4460 return XEXP (x, 0);
4461 else
4462 return x;
4465 /* If the code changed, return a whole new comparison. */
4466 if (new_code != code)
4467 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4469 /* Otherwise, keep this operation, but maybe change its operands.
4470 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4471 SUBST (XEXP (x, 0), op0);
4472 SUBST (XEXP (x, 1), op1);
4474 break;
4476 case IF_THEN_ELSE:
4477 return simplify_if_then_else (x);
4479 case ZERO_EXTRACT:
4480 case SIGN_EXTRACT:
4481 case ZERO_EXTEND:
4482 case SIGN_EXTEND:
4483 /* If we are processing SET_DEST, we are done. */
4484 if (in_dest)
4485 return x;
4487 return expand_compound_operation (x);
4489 case SET:
4490 return simplify_set (x);
4492 case AND:
4493 case IOR:
4494 case XOR:
4495 return simplify_logical (x, last);
4497 case ABS:
4498 /* (abs (neg <foo>)) -> (abs <foo>) */
4499 if (GET_CODE (XEXP (x, 0)) == NEG)
4500 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4502 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4503 do nothing. */
4504 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4505 break;
4507 /* If operand is something known to be positive, ignore the ABS. */
4508 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4509 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4510 <= HOST_BITS_PER_WIDE_INT)
4511 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4512 & ((HOST_WIDE_INT) 1
4513 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4514 == 0)))
4515 return XEXP (x, 0);
4517 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4518 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4519 return gen_rtx_NEG (mode, XEXP (x, 0));
4521 break;
4523 case FFS:
4524 /* (ffs (*_extend <X>)) = (ffs <X>) */
4525 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4526 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4527 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4528 break;
4530 case POPCOUNT:
4531 case PARITY:
4532 /* (pop* (zero_extend <X>)) = (pop* <X>) */
4533 if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4534 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4535 break;
4537 case FLOAT:
4538 /* (float (sign_extend <X>)) = (float <X>). */
4539 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4540 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4541 break;
4543 case ASHIFT:
4544 case LSHIFTRT:
4545 case ASHIFTRT:
4546 case ROTATE:
4547 case ROTATERT:
4548 /* If this is a shift by a constant amount, simplify it. */
4549 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4550 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4551 INTVAL (XEXP (x, 1)));
4553 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4554 SUBST (XEXP (x, 1),
4555 force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4556 ((HOST_WIDE_INT) 1
4557 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4558 - 1,
4559 NULL_RTX, 0));
4560 break;
4562 case VEC_SELECT:
4564 rtx op0 = XEXP (x, 0);
4565 rtx op1 = XEXP (x, 1);
4566 int len;
4568 if (GET_CODE (op1) != PARALLEL)
4569 abort ();
4570 len = XVECLEN (op1, 0);
4571 if (len == 1
4572 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4573 && GET_CODE (op0) == VEC_CONCAT)
4575 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4577 /* Try to find the element in the VEC_CONCAT. */
4578 for (;;)
4580 if (GET_MODE (op0) == GET_MODE (x))
4581 return op0;
4582 if (GET_CODE (op0) == VEC_CONCAT)
4584 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4585 if (op0_size < offset)
4586 op0 = XEXP (op0, 0);
4587 else
4589 offset -= op0_size;
4590 op0 = XEXP (op0, 1);
4593 else
4594 break;
4599 break;
4601 default:
4602 break;
4605 return x;
4608 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4610 static rtx
4611 simplify_if_then_else (rtx x)
4613 enum machine_mode mode = GET_MODE (x);
4614 rtx cond = XEXP (x, 0);
4615 rtx true_rtx = XEXP (x, 1);
4616 rtx false_rtx = XEXP (x, 2);
4617 enum rtx_code true_code = GET_CODE (cond);
4618 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4619 rtx temp;
4620 int i;
4621 enum rtx_code false_code;
4622 rtx reversed;
4624 /* Simplify storing of the truth value. */
4625 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4626 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4628 /* Also when the truth value has to be reversed. */
4629 if (comparison_p
4630 && true_rtx == const0_rtx && false_rtx == const_true_rtx
4631 && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4632 XEXP (cond, 1))))
4633 return reversed;
4635 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4636 in it is being compared against certain values. Get the true and false
4637 comparisons and see if that says anything about the value of each arm. */
4639 if (comparison_p
4640 && ((false_code = combine_reversed_comparison_code (cond))
4641 != UNKNOWN)
4642 && GET_CODE (XEXP (cond, 0)) == REG)
4644 HOST_WIDE_INT nzb;
4645 rtx from = XEXP (cond, 0);
4646 rtx true_val = XEXP (cond, 1);
4647 rtx false_val = true_val;
4648 int swapped = 0;
4650 /* If FALSE_CODE is EQ, swap the codes and arms. */
4652 if (false_code == EQ)
4654 swapped = 1, true_code = EQ, false_code = NE;
4655 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4658 /* If we are comparing against zero and the expression being tested has
4659 only a single bit that might be nonzero, that is its value when it is
4660 not equal to zero. Similarly if it is known to be -1 or 0. */
4662 if (true_code == EQ && true_val == const0_rtx
4663 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4664 false_code = EQ, false_val = GEN_INT (nzb);
4665 else if (true_code == EQ && true_val == const0_rtx
4666 && (num_sign_bit_copies (from, GET_MODE (from))
4667 == GET_MODE_BITSIZE (GET_MODE (from))))
4668 false_code = EQ, false_val = constm1_rtx;
4670 /* Now simplify an arm if we know the value of the register in the
4671 branch and it is used in the arm. Be careful due to the potential
4672 of locally-shared RTL. */
4674 if (reg_mentioned_p (from, true_rtx))
4675 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4676 from, true_val),
4677 pc_rtx, pc_rtx, 0, 0);
4678 if (reg_mentioned_p (from, false_rtx))
4679 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4680 from, false_val),
4681 pc_rtx, pc_rtx, 0, 0);
4683 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4684 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4686 true_rtx = XEXP (x, 1);
4687 false_rtx = XEXP (x, 2);
4688 true_code = GET_CODE (cond);
4691 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4692 reversed, do so to avoid needing two sets of patterns for
4693 subtract-and-branch insns. Similarly if we have a constant in the true
4694 arm, the false arm is the same as the first operand of the comparison, or
4695 the false arm is more complicated than the true arm. */
4697 if (comparison_p
4698 && combine_reversed_comparison_code (cond) != UNKNOWN
4699 && (true_rtx == pc_rtx
4700 || (CONSTANT_P (true_rtx)
4701 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4702 || true_rtx == const0_rtx
4703 || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4704 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4705 || (GET_CODE (true_rtx) == SUBREG
4706 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4707 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4708 || reg_mentioned_p (true_rtx, false_rtx)
4709 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4711 true_code = reversed_comparison_code (cond, NULL);
4712 SUBST (XEXP (x, 0),
4713 reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4714 XEXP (cond, 1)));
4716 SUBST (XEXP (x, 1), false_rtx);
4717 SUBST (XEXP (x, 2), true_rtx);
4719 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4720 cond = XEXP (x, 0);
4722 /* It is possible that the conditional has been simplified out. */
4723 true_code = GET_CODE (cond);
4724 comparison_p = GET_RTX_CLASS (true_code) == '<';
4727 /* If the two arms are identical, we don't need the comparison. */
4729 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4730 return true_rtx;
4732 /* Convert a == b ? b : a to "a". */
4733 if (true_code == EQ && ! side_effects_p (cond)
4734 && !HONOR_NANS (mode)
4735 && rtx_equal_p (XEXP (cond, 0), false_rtx)
4736 && rtx_equal_p (XEXP (cond, 1), true_rtx))
4737 return false_rtx;
4738 else if (true_code == NE && ! side_effects_p (cond)
4739 && !HONOR_NANS (mode)
4740 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4741 && rtx_equal_p (XEXP (cond, 1), false_rtx))
4742 return true_rtx;
4744 /* Look for cases where we have (abs x) or (neg (abs X)). */
4746 if (GET_MODE_CLASS (mode) == MODE_INT
4747 && GET_CODE (false_rtx) == NEG
4748 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4749 && comparison_p
4750 && rtx_equal_p (true_rtx, XEXP (cond, 0))
4751 && ! side_effects_p (true_rtx))
4752 switch (true_code)
4754 case GT:
4755 case GE:
4756 return simplify_gen_unary (ABS, mode, true_rtx, mode);
4757 case LT:
4758 case LE:
4759 return
4760 simplify_gen_unary (NEG, mode,
4761 simplify_gen_unary (ABS, mode, true_rtx, mode),
4762 mode);
4763 default:
4764 break;
4767 /* Look for MIN or MAX. */
4769 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4770 && comparison_p
4771 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4772 && rtx_equal_p (XEXP (cond, 1), false_rtx)
4773 && ! side_effects_p (cond))
4774 switch (true_code)
4776 case GE:
4777 case GT:
4778 return gen_binary (SMAX, mode, true_rtx, false_rtx);
4779 case LE:
4780 case LT:
4781 return gen_binary (SMIN, mode, true_rtx, false_rtx);
4782 case GEU:
4783 case GTU:
4784 return gen_binary (UMAX, mode, true_rtx, false_rtx);
4785 case LEU:
4786 case LTU:
4787 return gen_binary (UMIN, mode, true_rtx, false_rtx);
4788 default:
4789 break;
4792 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4793 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4794 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4795 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4796 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4797 neither 1 or -1, but it isn't worth checking for. */
4799 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4800 && comparison_p
4801 && GET_MODE_CLASS (mode) == MODE_INT
4802 && ! side_effects_p (x))
4804 rtx t = make_compound_operation (true_rtx, SET);
4805 rtx f = make_compound_operation (false_rtx, SET);
4806 rtx cond_op0 = XEXP (cond, 0);
4807 rtx cond_op1 = XEXP (cond, 1);
4808 enum rtx_code op = NIL, extend_op = NIL;
4809 enum machine_mode m = mode;
4810 rtx z = 0, c1 = NULL_RTX;
4812 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4813 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4814 || GET_CODE (t) == ASHIFT
4815 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4816 && rtx_equal_p (XEXP (t, 0), f))
4817 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4819 /* If an identity-zero op is commutative, check whether there
4820 would be a match if we swapped the operands. */
4821 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4822 || GET_CODE (t) == XOR)
4823 && rtx_equal_p (XEXP (t, 1), f))
4824 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4825 else if (GET_CODE (t) == SIGN_EXTEND
4826 && (GET_CODE (XEXP (t, 0)) == PLUS
4827 || GET_CODE (XEXP (t, 0)) == MINUS
4828 || GET_CODE (XEXP (t, 0)) == IOR
4829 || GET_CODE (XEXP (t, 0)) == XOR
4830 || GET_CODE (XEXP (t, 0)) == ASHIFT
4831 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4832 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4833 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4834 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4835 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4836 && (num_sign_bit_copies (f, GET_MODE (f))
4837 > (unsigned int)
4838 (GET_MODE_BITSIZE (mode)
4839 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4841 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4842 extend_op = SIGN_EXTEND;
4843 m = GET_MODE (XEXP (t, 0));
4845 else if (GET_CODE (t) == SIGN_EXTEND
4846 && (GET_CODE (XEXP (t, 0)) == PLUS
4847 || GET_CODE (XEXP (t, 0)) == IOR
4848 || GET_CODE (XEXP (t, 0)) == XOR)
4849 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4850 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4851 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4852 && (num_sign_bit_copies (f, GET_MODE (f))
4853 > (unsigned int)
4854 (GET_MODE_BITSIZE (mode)
4855 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4857 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4858 extend_op = SIGN_EXTEND;
4859 m = GET_MODE (XEXP (t, 0));
4861 else if (GET_CODE (t) == ZERO_EXTEND
4862 && (GET_CODE (XEXP (t, 0)) == PLUS
4863 || GET_CODE (XEXP (t, 0)) == MINUS
4864 || GET_CODE (XEXP (t, 0)) == IOR
4865 || GET_CODE (XEXP (t, 0)) == XOR
4866 || GET_CODE (XEXP (t, 0)) == ASHIFT
4867 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4868 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4869 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4870 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4871 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4872 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4873 && ((nonzero_bits (f, GET_MODE (f))
4874 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4875 == 0))
4877 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4878 extend_op = ZERO_EXTEND;
4879 m = GET_MODE (XEXP (t, 0));
4881 else if (GET_CODE (t) == ZERO_EXTEND
4882 && (GET_CODE (XEXP (t, 0)) == PLUS
4883 || GET_CODE (XEXP (t, 0)) == IOR
4884 || GET_CODE (XEXP (t, 0)) == XOR)
4885 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4886 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4887 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4888 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4889 && ((nonzero_bits (f, GET_MODE (f))
4890 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4891 == 0))
4893 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4894 extend_op = ZERO_EXTEND;
4895 m = GET_MODE (XEXP (t, 0));
4898 if (z)
4900 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4901 pc_rtx, pc_rtx, 0, 0);
4902 temp = gen_binary (MULT, m, temp,
4903 gen_binary (MULT, m, c1, const_true_rtx));
4904 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4905 temp = gen_binary (op, m, gen_lowpart (m, z), temp);
4907 if (extend_op != NIL)
4908 temp = simplify_gen_unary (extend_op, mode, temp, m);
4910 return temp;
4914 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4915 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4916 negation of a single bit, we can convert this operation to a shift. We
4917 can actually do this more generally, but it doesn't seem worth it. */
4919 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4920 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4921 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4922 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4923 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4924 == GET_MODE_BITSIZE (mode))
4925 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4926 return
4927 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4928 gen_lowpart (mode, XEXP (cond, 0)), i);
4930 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
4931 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4932 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4933 && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
4934 == nonzero_bits (XEXP (cond, 0), mode)
4935 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
4936 return XEXP (cond, 0);
4938 return x;
4941 /* Simplify X, a SET expression. Return the new expression. */
4943 static rtx
4944 simplify_set (rtx x)
4946 rtx src = SET_SRC (x);
4947 rtx dest = SET_DEST (x);
4948 enum machine_mode mode
4949 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4950 rtx other_insn;
4951 rtx *cc_use;
4953 /* (set (pc) (return)) gets written as (return). */
4954 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4955 return src;
4957 /* Now that we know for sure which bits of SRC we are using, see if we can
4958 simplify the expression for the object knowing that we only need the
4959 low-order bits. */
4961 if (GET_MODE_CLASS (mode) == MODE_INT
4962 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4964 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4965 SUBST (SET_SRC (x), src);
4968 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4969 the comparison result and try to simplify it unless we already have used
4970 undobuf.other_insn. */
4971 if ((GET_MODE_CLASS (mode) == MODE_CC
4972 || GET_CODE (src) == COMPARE
4973 || CC0_P (dest))
4974 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4975 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4976 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4977 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4979 enum rtx_code old_code = GET_CODE (*cc_use);
4980 enum rtx_code new_code;
4981 rtx op0, op1, tmp;
4982 int other_changed = 0;
4983 enum machine_mode compare_mode = GET_MODE (dest);
4984 enum machine_mode tmp_mode;
4986 if (GET_CODE (src) == COMPARE)
4987 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4988 else
4989 op0 = src, op1 = const0_rtx;
4991 /* Check whether the comparison is known at compile time. */
4992 if (GET_MODE (op0) != VOIDmode)
4993 tmp_mode = GET_MODE (op0);
4994 else if (GET_MODE (op1) != VOIDmode)
4995 tmp_mode = GET_MODE (op1);
4996 else
4997 tmp_mode = compare_mode;
4998 tmp = simplify_relational_operation (old_code, tmp_mode, op0, op1);
4999 if (tmp != NULL_RTX)
5001 rtx pat = PATTERN (other_insn);
5002 undobuf.other_insn = other_insn;
5003 SUBST (*cc_use, tmp);
5005 /* Attempt to simplify CC user. */
5006 if (GET_CODE (pat) == SET)
5008 rtx new = simplify_rtx (SET_SRC (pat));
5009 if (new != NULL_RTX)
5010 SUBST (SET_SRC (pat), new);
5013 /* Convert X into a no-op move. */
5014 SUBST (SET_DEST (x), pc_rtx);
5015 SUBST (SET_SRC (x), pc_rtx);
5016 return x;
5019 /* Simplify our comparison, if possible. */
5020 new_code = simplify_comparison (old_code, &op0, &op1);
5022 #ifdef SELECT_CC_MODE
5023 /* If this machine has CC modes other than CCmode, check to see if we
5024 need to use a different CC mode here. */
5025 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5027 #ifndef HAVE_cc0
5028 /* If the mode changed, we have to change SET_DEST, the mode in the
5029 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5030 a hard register, just build new versions with the proper mode. If it
5031 is a pseudo, we lose unless it is only time we set the pseudo, in
5032 which case we can safely change its mode. */
5033 if (compare_mode != GET_MODE (dest))
5035 unsigned int regno = REGNO (dest);
5036 rtx new_dest = gen_rtx_REG (compare_mode, regno);
5038 if (regno < FIRST_PSEUDO_REGISTER
5039 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5041 if (regno >= FIRST_PSEUDO_REGISTER)
5042 SUBST (regno_reg_rtx[regno], new_dest);
5044 SUBST (SET_DEST (x), new_dest);
5045 SUBST (XEXP (*cc_use, 0), new_dest);
5046 other_changed = 1;
5048 dest = new_dest;
5051 #endif /* cc0 */
5052 #endif /* SELECT_CC_MODE */
5054 /* If the code changed, we have to build a new comparison in
5055 undobuf.other_insn. */
5056 if (new_code != old_code)
5058 int other_changed_previously = other_changed;
5059 unsigned HOST_WIDE_INT mask;
5061 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5062 dest, const0_rtx));
5063 other_changed = 1;
5065 /* If the only change we made was to change an EQ into an NE or
5066 vice versa, OP0 has only one bit that might be nonzero, and OP1
5067 is zero, check if changing the user of the condition code will
5068 produce a valid insn. If it won't, we can keep the original code
5069 in that insn by surrounding our operation with an XOR. */
5071 if (((old_code == NE && new_code == EQ)
5072 || (old_code == EQ && new_code == NE))
5073 && ! other_changed_previously && op1 == const0_rtx
5074 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5075 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5077 rtx pat = PATTERN (other_insn), note = 0;
5079 if ((recog_for_combine (&pat, other_insn, &note) < 0
5080 && ! check_asm_operands (pat)))
5082 PUT_CODE (*cc_use, old_code);
5083 other_changed = 0;
5085 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5090 if (other_changed)
5091 undobuf.other_insn = other_insn;
5093 #ifdef HAVE_cc0
5094 /* If we are now comparing against zero, change our source if
5095 needed. If we do not use cc0, we always have a COMPARE. */
5096 if (op1 == const0_rtx && dest == cc0_rtx)
5098 SUBST (SET_SRC (x), op0);
5099 src = op0;
5101 else
5102 #endif
5104 /* Otherwise, if we didn't previously have a COMPARE in the
5105 correct mode, we need one. */
5106 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5108 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5109 src = SET_SRC (x);
5111 else
5113 /* Otherwise, update the COMPARE if needed. */
5114 SUBST (XEXP (src, 0), op0);
5115 SUBST (XEXP (src, 1), op1);
5118 else
5120 /* Get SET_SRC in a form where we have placed back any
5121 compound expressions. Then do the checks below. */
5122 src = make_compound_operation (src, SET);
5123 SUBST (SET_SRC (x), src);
5126 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5127 and X being a REG or (subreg (reg)), we may be able to convert this to
5128 (set (subreg:m2 x) (op)).
5130 We can always do this if M1 is narrower than M2 because that means that
5131 we only care about the low bits of the result.
5133 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5134 perform a narrower operation than requested since the high-order bits will
5135 be undefined. On machine where it is defined, this transformation is safe
5136 as long as M1 and M2 have the same number of words. */
5138 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5139 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5140 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5141 / UNITS_PER_WORD)
5142 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5143 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5144 #ifndef WORD_REGISTER_OPERATIONS
5145 && (GET_MODE_SIZE (GET_MODE (src))
5146 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5147 #endif
5148 #ifdef CANNOT_CHANGE_MODE_CLASS
5149 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5150 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5151 GET_MODE (SUBREG_REG (src)),
5152 GET_MODE (src)))
5153 #endif
5154 && (GET_CODE (dest) == REG
5155 || (GET_CODE (dest) == SUBREG
5156 && GET_CODE (SUBREG_REG (dest)) == REG)))
5158 SUBST (SET_DEST (x),
5159 gen_lowpart (GET_MODE (SUBREG_REG (src)),
5160 dest));
5161 SUBST (SET_SRC (x), SUBREG_REG (src));
5163 src = SET_SRC (x), dest = SET_DEST (x);
5166 #ifdef HAVE_cc0
5167 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5168 in SRC. */
5169 if (dest == cc0_rtx
5170 && GET_CODE (src) == SUBREG
5171 && subreg_lowpart_p (src)
5172 && (GET_MODE_BITSIZE (GET_MODE (src))
5173 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5175 rtx inner = SUBREG_REG (src);
5176 enum machine_mode inner_mode = GET_MODE (inner);
5178 /* Here we make sure that we don't have a sign bit on. */
5179 if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5180 && (nonzero_bits (inner, inner_mode)
5181 < ((unsigned HOST_WIDE_INT) 1
5182 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5184 SUBST (SET_SRC (x), inner);
5185 src = SET_SRC (x);
5188 #endif
5190 #ifdef LOAD_EXTEND_OP
5191 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5192 would require a paradoxical subreg. Replace the subreg with a
5193 zero_extend to avoid the reload that would otherwise be required. */
5195 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5196 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5197 && SUBREG_BYTE (src) == 0
5198 && (GET_MODE_SIZE (GET_MODE (src))
5199 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5200 && GET_CODE (SUBREG_REG (src)) == MEM)
5202 SUBST (SET_SRC (x),
5203 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5204 GET_MODE (src), SUBREG_REG (src)));
5206 src = SET_SRC (x);
5208 #endif
5210 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5211 are comparing an item known to be 0 or -1 against 0, use a logical
5212 operation instead. Check for one of the arms being an IOR of the other
5213 arm with some value. We compute three terms to be IOR'ed together. In
5214 practice, at most two will be nonzero. Then we do the IOR's. */
5216 if (GET_CODE (dest) != PC
5217 && GET_CODE (src) == IF_THEN_ELSE
5218 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5219 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5220 && XEXP (XEXP (src, 0), 1) == const0_rtx
5221 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5222 #ifdef HAVE_conditional_move
5223 && ! can_conditionally_move_p (GET_MODE (src))
5224 #endif
5225 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5226 GET_MODE (XEXP (XEXP (src, 0), 0)))
5227 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5228 && ! side_effects_p (src))
5230 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5231 ? XEXP (src, 1) : XEXP (src, 2));
5232 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5233 ? XEXP (src, 2) : XEXP (src, 1));
5234 rtx term1 = const0_rtx, term2, term3;
5236 if (GET_CODE (true_rtx) == IOR
5237 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5238 term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5239 else if (GET_CODE (true_rtx) == IOR
5240 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5241 term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5242 else if (GET_CODE (false_rtx) == IOR
5243 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5244 term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5245 else if (GET_CODE (false_rtx) == IOR
5246 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5247 term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5249 term2 = gen_binary (AND, GET_MODE (src),
5250 XEXP (XEXP (src, 0), 0), true_rtx);
5251 term3 = gen_binary (AND, GET_MODE (src),
5252 simplify_gen_unary (NOT, GET_MODE (src),
5253 XEXP (XEXP (src, 0), 0),
5254 GET_MODE (src)),
5255 false_rtx);
5257 SUBST (SET_SRC (x),
5258 gen_binary (IOR, GET_MODE (src),
5259 gen_binary (IOR, GET_MODE (src), term1, term2),
5260 term3));
5262 src = SET_SRC (x);
5265 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5266 whole thing fail. */
5267 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5268 return src;
5269 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5270 return dest;
5271 else
5272 /* Convert this into a field assignment operation, if possible. */
5273 return make_field_assignment (x);
5276 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5277 result. LAST is nonzero if this is the last retry. */
5279 static rtx
5280 simplify_logical (rtx x, int last)
5282 enum machine_mode mode = GET_MODE (x);
5283 rtx op0 = XEXP (x, 0);
5284 rtx op1 = XEXP (x, 1);
5285 rtx reversed;
5287 switch (GET_CODE (x))
5289 case AND:
5290 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5291 insn (and may simplify more). */
5292 if (GET_CODE (op0) == XOR
5293 && rtx_equal_p (XEXP (op0, 0), op1)
5294 && ! side_effects_p (op1))
5295 x = gen_binary (AND, mode,
5296 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5297 op1);
5299 if (GET_CODE (op0) == XOR
5300 && rtx_equal_p (XEXP (op0, 1), op1)
5301 && ! side_effects_p (op1))
5302 x = gen_binary (AND, mode,
5303 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5304 op1);
5306 /* Similarly for (~(A ^ B)) & A. */
5307 if (GET_CODE (op0) == NOT
5308 && GET_CODE (XEXP (op0, 0)) == XOR
5309 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5310 && ! side_effects_p (op1))
5311 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5313 if (GET_CODE (op0) == NOT
5314 && GET_CODE (XEXP (op0, 0)) == XOR
5315 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5316 && ! side_effects_p (op1))
5317 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5319 /* We can call simplify_and_const_int only if we don't lose
5320 any (sign) bits when converting INTVAL (op1) to
5321 "unsigned HOST_WIDE_INT". */
5322 if (GET_CODE (op1) == CONST_INT
5323 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5324 || INTVAL (op1) > 0))
5326 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5328 /* If we have (ior (and (X C1) C2)) and the next restart would be
5329 the last, simplify this by making C1 as small as possible
5330 and then exit. */
5331 if (last
5332 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5333 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5334 && GET_CODE (op1) == CONST_INT)
5335 return gen_binary (IOR, mode,
5336 gen_binary (AND, mode, XEXP (op0, 0),
5337 GEN_INT (INTVAL (XEXP (op0, 1))
5338 & ~INTVAL (op1))), op1);
5340 if (GET_CODE (x) != AND)
5341 return x;
5343 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5344 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5345 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5348 /* Convert (A | B) & A to A. */
5349 if (GET_CODE (op0) == IOR
5350 && (rtx_equal_p (XEXP (op0, 0), op1)
5351 || rtx_equal_p (XEXP (op0, 1), op1))
5352 && ! side_effects_p (XEXP (op0, 0))
5353 && ! side_effects_p (XEXP (op0, 1)))
5354 return op1;
5356 /* In the following group of tests (and those in case IOR below),
5357 we start with some combination of logical operations and apply
5358 the distributive law followed by the inverse distributive law.
5359 Most of the time, this results in no change. However, if some of
5360 the operands are the same or inverses of each other, simplifications
5361 will result.
5363 For example, (and (ior A B) (not B)) can occur as the result of
5364 expanding a bit field assignment. When we apply the distributive
5365 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5366 which then simplifies to (and (A (not B))).
5368 If we have (and (ior A B) C), apply the distributive law and then
5369 the inverse distributive law to see if things simplify. */
5371 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5373 x = apply_distributive_law
5374 (gen_binary (GET_CODE (op0), mode,
5375 gen_binary (AND, mode, XEXP (op0, 0), op1),
5376 gen_binary (AND, mode, XEXP (op0, 1),
5377 copy_rtx (op1))));
5378 if (GET_CODE (x) != AND)
5379 return x;
5382 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5383 return apply_distributive_law
5384 (gen_binary (GET_CODE (op1), mode,
5385 gen_binary (AND, mode, XEXP (op1, 0), op0),
5386 gen_binary (AND, mode, XEXP (op1, 1),
5387 copy_rtx (op0))));
5389 /* Similarly, taking advantage of the fact that
5390 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5392 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5393 return apply_distributive_law
5394 (gen_binary (XOR, mode,
5395 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5396 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5397 XEXP (op1, 1))));
5399 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5400 return apply_distributive_law
5401 (gen_binary (XOR, mode,
5402 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5403 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5404 break;
5406 case IOR:
5407 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
5408 if (GET_CODE (op1) == CONST_INT
5409 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5410 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5411 return op1;
5413 /* Convert (A & B) | A to A. */
5414 if (GET_CODE (op0) == AND
5415 && (rtx_equal_p (XEXP (op0, 0), op1)
5416 || rtx_equal_p (XEXP (op0, 1), op1))
5417 && ! side_effects_p (XEXP (op0, 0))
5418 && ! side_effects_p (XEXP (op0, 1)))
5419 return op1;
5421 /* If we have (ior (and A B) C), apply the distributive law and then
5422 the inverse distributive law to see if things simplify. */
5424 if (GET_CODE (op0) == AND)
5426 x = apply_distributive_law
5427 (gen_binary (AND, mode,
5428 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5429 gen_binary (IOR, mode, XEXP (op0, 1),
5430 copy_rtx (op1))));
5432 if (GET_CODE (x) != IOR)
5433 return x;
5436 if (GET_CODE (op1) == AND)
5438 x = apply_distributive_law
5439 (gen_binary (AND, mode,
5440 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5441 gen_binary (IOR, mode, XEXP (op1, 1),
5442 copy_rtx (op0))));
5444 if (GET_CODE (x) != IOR)
5445 return x;
5448 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5449 mode size to (rotate A CX). */
5451 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5452 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5453 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5454 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5455 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5456 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5457 == GET_MODE_BITSIZE (mode)))
5458 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5459 (GET_CODE (op0) == ASHIFT
5460 ? XEXP (op0, 1) : XEXP (op1, 1)));
5462 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5463 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5464 does not affect any of the bits in OP1, it can really be done
5465 as a PLUS and we can associate. We do this by seeing if OP1
5466 can be safely shifted left C bits. */
5467 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5468 && GET_CODE (XEXP (op0, 0)) == PLUS
5469 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5470 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5471 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5473 int count = INTVAL (XEXP (op0, 1));
5474 HOST_WIDE_INT mask = INTVAL (op1) << count;
5476 if (mask >> count == INTVAL (op1)
5477 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5479 SUBST (XEXP (XEXP (op0, 0), 1),
5480 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5481 return op0;
5484 break;
5486 case XOR:
5487 /* If we are XORing two things that have no bits in common,
5488 convert them into an IOR. This helps to detect rotation encoded
5489 using those methods and possibly other simplifications. */
5491 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5492 && (nonzero_bits (op0, mode)
5493 & nonzero_bits (op1, mode)) == 0)
5494 return (gen_binary (IOR, mode, op0, op1));
5496 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5497 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5498 (NOT y). */
5500 int num_negated = 0;
5502 if (GET_CODE (op0) == NOT)
5503 num_negated++, op0 = XEXP (op0, 0);
5504 if (GET_CODE (op1) == NOT)
5505 num_negated++, op1 = XEXP (op1, 0);
5507 if (num_negated == 2)
5509 SUBST (XEXP (x, 0), op0);
5510 SUBST (XEXP (x, 1), op1);
5512 else if (num_negated == 1)
5513 return
5514 simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5515 mode);
5518 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5519 correspond to a machine insn or result in further simplifications
5520 if B is a constant. */
5522 if (GET_CODE (op0) == AND
5523 && rtx_equal_p (XEXP (op0, 1), op1)
5524 && ! side_effects_p (op1))
5525 return gen_binary (AND, mode,
5526 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5527 op1);
5529 else if (GET_CODE (op0) == AND
5530 && rtx_equal_p (XEXP (op0, 0), op1)
5531 && ! side_effects_p (op1))
5532 return gen_binary (AND, mode,
5533 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5534 op1);
5536 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5537 comparison if STORE_FLAG_VALUE is 1. */
5538 if (STORE_FLAG_VALUE == 1
5539 && op1 == const1_rtx
5540 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5541 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5542 XEXP (op0, 1))))
5543 return reversed;
5545 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5546 is (lt foo (const_int 0)), so we can perform the above
5547 simplification if STORE_FLAG_VALUE is 1. */
5549 if (STORE_FLAG_VALUE == 1
5550 && op1 == const1_rtx
5551 && GET_CODE (op0) == LSHIFTRT
5552 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5553 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5554 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5556 /* (xor (comparison foo bar) (const_int sign-bit))
5557 when STORE_FLAG_VALUE is the sign bit. */
5558 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5559 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5560 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5561 && op1 == const_true_rtx
5562 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5563 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5564 XEXP (op0, 1))))
5565 return reversed;
5567 break;
5569 default:
5570 abort ();
5573 return x;
5576 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5577 operations" because they can be replaced with two more basic operations.
5578 ZERO_EXTEND is also considered "compound" because it can be replaced with
5579 an AND operation, which is simpler, though only one operation.
5581 The function expand_compound_operation is called with an rtx expression
5582 and will convert it to the appropriate shifts and AND operations,
5583 simplifying at each stage.
5585 The function make_compound_operation is called to convert an expression
5586 consisting of shifts and ANDs into the equivalent compound expression.
5587 It is the inverse of this function, loosely speaking. */
5589 static rtx
5590 expand_compound_operation (rtx x)
5592 unsigned HOST_WIDE_INT pos = 0, len;
5593 int unsignedp = 0;
5594 unsigned int modewidth;
5595 rtx tem;
5597 switch (GET_CODE (x))
5599 case ZERO_EXTEND:
5600 unsignedp = 1;
5601 case SIGN_EXTEND:
5602 /* We can't necessarily use a const_int for a multiword mode;
5603 it depends on implicitly extending the value.
5604 Since we don't know the right way to extend it,
5605 we can't tell whether the implicit way is right.
5607 Even for a mode that is no wider than a const_int,
5608 we can't win, because we need to sign extend one of its bits through
5609 the rest of it, and we don't know which bit. */
5610 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5611 return x;
5613 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5614 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5615 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5616 reloaded. If not for that, MEM's would very rarely be safe.
5618 Reject MODEs bigger than a word, because we might not be able
5619 to reference a two-register group starting with an arbitrary register
5620 (and currently gen_lowpart might crash for a SUBREG). */
5622 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5623 return x;
5625 /* Reject MODEs that aren't scalar integers because turning vector
5626 or complex modes into shifts causes problems. */
5628 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5629 return x;
5631 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5632 /* If the inner object has VOIDmode (the only way this can happen
5633 is if it is an ASM_OPERANDS), we can't do anything since we don't
5634 know how much masking to do. */
5635 if (len == 0)
5636 return x;
5638 break;
5640 case ZERO_EXTRACT:
5641 unsignedp = 1;
5642 case SIGN_EXTRACT:
5643 /* If the operand is a CLOBBER, just return it. */
5644 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5645 return XEXP (x, 0);
5647 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5648 || GET_CODE (XEXP (x, 2)) != CONST_INT
5649 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5650 return x;
5652 /* Reject MODEs that aren't scalar integers because turning vector
5653 or complex modes into shifts causes problems. */
5655 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5656 return x;
5658 len = INTVAL (XEXP (x, 1));
5659 pos = INTVAL (XEXP (x, 2));
5661 /* If this goes outside the object being extracted, replace the object
5662 with a (use (mem ...)) construct that only combine understands
5663 and is used only for this purpose. */
5664 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5665 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5667 if (BITS_BIG_ENDIAN)
5668 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5670 break;
5672 default:
5673 return x;
5675 /* Convert sign extension to zero extension, if we know that the high
5676 bit is not set, as this is easier to optimize. It will be converted
5677 back to cheaper alternative in make_extraction. */
5678 if (GET_CODE (x) == SIGN_EXTEND
5679 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5680 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5681 & ~(((unsigned HOST_WIDE_INT)
5682 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5683 >> 1))
5684 == 0)))
5686 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5687 rtx temp2 = expand_compound_operation (temp);
5689 /* Make sure this is a profitable operation. */
5690 if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5691 return temp2;
5692 else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5693 return temp;
5694 else
5695 return x;
5698 /* We can optimize some special cases of ZERO_EXTEND. */
5699 if (GET_CODE (x) == ZERO_EXTEND)
5701 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5702 know that the last value didn't have any inappropriate bits
5703 set. */
5704 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5705 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5706 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5707 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5708 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5709 return XEXP (XEXP (x, 0), 0);
5711 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5712 if (GET_CODE (XEXP (x, 0)) == SUBREG
5713 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5714 && subreg_lowpart_p (XEXP (x, 0))
5715 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5716 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5717 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5718 return SUBREG_REG (XEXP (x, 0));
5720 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5721 is a comparison and STORE_FLAG_VALUE permits. This is like
5722 the first case, but it works even when GET_MODE (x) is larger
5723 than HOST_WIDE_INT. */
5724 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5725 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5726 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5727 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5728 <= HOST_BITS_PER_WIDE_INT)
5729 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5730 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5731 return XEXP (XEXP (x, 0), 0);
5733 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5734 if (GET_CODE (XEXP (x, 0)) == SUBREG
5735 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5736 && subreg_lowpart_p (XEXP (x, 0))
5737 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5738 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5739 <= HOST_BITS_PER_WIDE_INT)
5740 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5741 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5742 return SUBREG_REG (XEXP (x, 0));
5746 /* If we reach here, we want to return a pair of shifts. The inner
5747 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5748 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5749 logical depending on the value of UNSIGNEDP.
5751 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5752 converted into an AND of a shift.
5754 We must check for the case where the left shift would have a negative
5755 count. This can happen in a case like (x >> 31) & 255 on machines
5756 that can't shift by a constant. On those machines, we would first
5757 combine the shift with the AND to produce a variable-position
5758 extraction. Then the constant of 31 would be substituted in to produce
5759 a such a position. */
5761 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5762 if (modewidth + len >= pos)
5763 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5764 GET_MODE (x),
5765 simplify_shift_const (NULL_RTX, ASHIFT,
5766 GET_MODE (x),
5767 XEXP (x, 0),
5768 modewidth - pos - len),
5769 modewidth - len);
5771 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5772 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5773 simplify_shift_const (NULL_RTX, LSHIFTRT,
5774 GET_MODE (x),
5775 XEXP (x, 0), pos),
5776 ((HOST_WIDE_INT) 1 << len) - 1);
5777 else
5778 /* Any other cases we can't handle. */
5779 return x;
5781 /* If we couldn't do this for some reason, return the original
5782 expression. */
5783 if (GET_CODE (tem) == CLOBBER)
5784 return x;
5786 return tem;
5789 /* X is a SET which contains an assignment of one object into
5790 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5791 or certain SUBREGS). If possible, convert it into a series of
5792 logical operations.
5794 We half-heartedly support variable positions, but do not at all
5795 support variable lengths. */
5797 static rtx
5798 expand_field_assignment (rtx x)
5800 rtx inner;
5801 rtx pos; /* Always counts from low bit. */
5802 int len;
5803 rtx mask;
5804 enum machine_mode compute_mode;
5806 /* Loop until we find something we can't simplify. */
5807 while (1)
5809 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5810 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5812 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5813 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5814 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5816 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5817 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5819 inner = XEXP (SET_DEST (x), 0);
5820 len = INTVAL (XEXP (SET_DEST (x), 1));
5821 pos = XEXP (SET_DEST (x), 2);
5823 /* If the position is constant and spans the width of INNER,
5824 surround INNER with a USE to indicate this. */
5825 if (GET_CODE (pos) == CONST_INT
5826 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5827 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5829 if (BITS_BIG_ENDIAN)
5831 if (GET_CODE (pos) == CONST_INT)
5832 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5833 - INTVAL (pos));
5834 else if (GET_CODE (pos) == MINUS
5835 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5836 && (INTVAL (XEXP (pos, 1))
5837 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5838 /* If position is ADJUST - X, new position is X. */
5839 pos = XEXP (pos, 0);
5840 else
5841 pos = gen_binary (MINUS, GET_MODE (pos),
5842 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5843 - len),
5844 pos);
5848 /* A SUBREG between two modes that occupy the same numbers of words
5849 can be done by moving the SUBREG to the source. */
5850 else if (GET_CODE (SET_DEST (x)) == SUBREG
5851 /* We need SUBREGs to compute nonzero_bits properly. */
5852 && nonzero_sign_valid
5853 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5854 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5855 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5856 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5858 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5859 gen_lowpart
5860 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5861 SET_SRC (x)));
5862 continue;
5864 else
5865 break;
5867 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5868 inner = SUBREG_REG (inner);
5870 compute_mode = GET_MODE (inner);
5872 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
5873 if (! SCALAR_INT_MODE_P (compute_mode))
5875 enum machine_mode imode;
5877 /* Don't do anything for vector or complex integral types. */
5878 if (! FLOAT_MODE_P (compute_mode))
5879 break;
5881 /* Try to find an integral mode to pun with. */
5882 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5883 if (imode == BLKmode)
5884 break;
5886 compute_mode = imode;
5887 inner = gen_lowpart (imode, inner);
5890 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5891 if (len < HOST_BITS_PER_WIDE_INT)
5892 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5893 else
5894 break;
5896 /* Now compute the equivalent expression. Make a copy of INNER
5897 for the SET_DEST in case it is a MEM into which we will substitute;
5898 we don't want shared RTL in that case. */
5899 x = gen_rtx_SET
5900 (VOIDmode, copy_rtx (inner),
5901 gen_binary (IOR, compute_mode,
5902 gen_binary (AND, compute_mode,
5903 simplify_gen_unary (NOT, compute_mode,
5904 gen_binary (ASHIFT,
5905 compute_mode,
5906 mask, pos),
5907 compute_mode),
5908 inner),
5909 gen_binary (ASHIFT, compute_mode,
5910 gen_binary (AND, compute_mode,
5911 gen_lowpart
5912 (compute_mode, SET_SRC (x)),
5913 mask),
5914 pos)));
5917 return x;
5920 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5921 it is an RTX that represents a variable starting position; otherwise,
5922 POS is the (constant) starting bit position (counted from the LSB).
5924 INNER may be a USE. This will occur when we started with a bitfield
5925 that went outside the boundary of the object in memory, which is
5926 allowed on most machines. To isolate this case, we produce a USE
5927 whose mode is wide enough and surround the MEM with it. The only
5928 code that understands the USE is this routine. If it is not removed,
5929 it will cause the resulting insn not to match.
5931 UNSIGNEDP is nonzero for an unsigned reference and zero for a
5932 signed reference.
5934 IN_DEST is nonzero if this is a reference in the destination of a
5935 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
5936 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5937 be used.
5939 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
5940 ZERO_EXTRACT should be built even for bits starting at bit 0.
5942 MODE is the desired mode of the result (if IN_DEST == 0).
5944 The result is an RTX for the extraction or NULL_RTX if the target
5945 can't handle it. */
5947 static rtx
5948 make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
5949 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
5950 int in_dest, int in_compare)
5952 /* This mode describes the size of the storage area
5953 to fetch the overall value from. Within that, we
5954 ignore the POS lowest bits, etc. */
5955 enum machine_mode is_mode = GET_MODE (inner);
5956 enum machine_mode inner_mode;
5957 enum machine_mode wanted_inner_mode = byte_mode;
5958 enum machine_mode wanted_inner_reg_mode = word_mode;
5959 enum machine_mode pos_mode = word_mode;
5960 enum machine_mode extraction_mode = word_mode;
5961 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5962 int spans_byte = 0;
5963 rtx new = 0;
5964 rtx orig_pos_rtx = pos_rtx;
5965 HOST_WIDE_INT orig_pos;
5967 /* Get some information about INNER and get the innermost object. */
5968 if (GET_CODE (inner) == USE)
5969 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5970 /* We don't need to adjust the position because we set up the USE
5971 to pretend that it was a full-word object. */
5972 spans_byte = 1, inner = XEXP (inner, 0);
5973 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5975 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5976 consider just the QI as the memory to extract from.
5977 The subreg adds or removes high bits; its mode is
5978 irrelevant to the meaning of this extraction,
5979 since POS and LEN count from the lsb. */
5980 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5981 is_mode = GET_MODE (SUBREG_REG (inner));
5982 inner = SUBREG_REG (inner);
5984 else if (GET_CODE (inner) == ASHIFT
5985 && GET_CODE (XEXP (inner, 1)) == CONST_INT
5986 && pos_rtx == 0 && pos == 0
5987 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
5989 /* We're extracting the least significant bits of an rtx
5990 (ashift X (const_int C)), where LEN > C. Extract the
5991 least significant (LEN - C) bits of X, giving an rtx
5992 whose mode is MODE, then shift it left C times. */
5993 new = make_extraction (mode, XEXP (inner, 0),
5994 0, 0, len - INTVAL (XEXP (inner, 1)),
5995 unsignedp, in_dest, in_compare);
5996 if (new != 0)
5997 return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6000 inner_mode = GET_MODE (inner);
6002 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6003 pos = INTVAL (pos_rtx), pos_rtx = 0;
6005 /* See if this can be done without an extraction. We never can if the
6006 width of the field is not the same as that of some integer mode. For
6007 registers, we can only avoid the extraction if the position is at the
6008 low-order bit and this is either not in the destination or we have the
6009 appropriate STRICT_LOW_PART operation available.
6011 For MEM, we can avoid an extract if the field starts on an appropriate
6012 boundary and we can change the mode of the memory reference. However,
6013 we cannot directly access the MEM if we have a USE and the underlying
6014 MEM is not TMODE. This combination means that MEM was being used in a
6015 context where bits outside its mode were being referenced; that is only
6016 valid in bit-field insns. */
6018 if (tmode != BLKmode
6019 && ! (spans_byte && inner_mode != tmode)
6020 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6021 && GET_CODE (inner) != MEM
6022 && (! in_dest
6023 || (GET_CODE (inner) == REG
6024 && have_insn_for (STRICT_LOW_PART, tmode))))
6025 || (GET_CODE (inner) == MEM && pos_rtx == 0
6026 && (pos
6027 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6028 : BITS_PER_UNIT)) == 0
6029 /* We can't do this if we are widening INNER_MODE (it
6030 may not be aligned, for one thing). */
6031 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6032 && (inner_mode == tmode
6033 || (! mode_dependent_address_p (XEXP (inner, 0))
6034 && ! MEM_VOLATILE_P (inner))))))
6036 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6037 field. If the original and current mode are the same, we need not
6038 adjust the offset. Otherwise, we do if bytes big endian.
6040 If INNER is not a MEM, get a piece consisting of just the field
6041 of interest (in this case POS % BITS_PER_WORD must be 0). */
6043 if (GET_CODE (inner) == MEM)
6045 HOST_WIDE_INT offset;
6047 /* POS counts from lsb, but make OFFSET count in memory order. */
6048 if (BYTES_BIG_ENDIAN)
6049 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6050 else
6051 offset = pos / BITS_PER_UNIT;
6053 new = adjust_address_nv (inner, tmode, offset);
6055 else if (GET_CODE (inner) == REG)
6057 if (tmode != inner_mode)
6059 /* We can't call gen_lowpart in a DEST since we
6060 always want a SUBREG (see below) and it would sometimes
6061 return a new hard register. */
6062 if (pos || in_dest)
6064 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6066 if (WORDS_BIG_ENDIAN
6067 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6068 final_word = ((GET_MODE_SIZE (inner_mode)
6069 - GET_MODE_SIZE (tmode))
6070 / UNITS_PER_WORD) - final_word;
6072 final_word *= UNITS_PER_WORD;
6073 if (BYTES_BIG_ENDIAN &&
6074 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6075 final_word += (GET_MODE_SIZE (inner_mode)
6076 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6078 /* Avoid creating invalid subregs, for example when
6079 simplifying (x>>32)&255. */
6080 if (final_word >= GET_MODE_SIZE (inner_mode))
6081 return NULL_RTX;
6083 new = gen_rtx_SUBREG (tmode, inner, final_word);
6085 else
6086 new = gen_lowpart (tmode, inner);
6088 else
6089 new = inner;
6091 else
6092 new = force_to_mode (inner, tmode,
6093 len >= HOST_BITS_PER_WIDE_INT
6094 ? ~(unsigned HOST_WIDE_INT) 0
6095 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6096 NULL_RTX, 0);
6098 /* If this extraction is going into the destination of a SET,
6099 make a STRICT_LOW_PART unless we made a MEM. */
6101 if (in_dest)
6102 return (GET_CODE (new) == MEM ? new
6103 : (GET_CODE (new) != SUBREG
6104 ? gen_rtx_CLOBBER (tmode, const0_rtx)
6105 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6107 if (mode == tmode)
6108 return new;
6110 if (GET_CODE (new) == CONST_INT)
6111 return gen_int_mode (INTVAL (new), mode);
6113 /* If we know that no extraneous bits are set, and that the high
6114 bit is not set, convert the extraction to the cheaper of
6115 sign and zero extension, that are equivalent in these cases. */
6116 if (flag_expensive_optimizations
6117 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6118 && ((nonzero_bits (new, tmode)
6119 & ~(((unsigned HOST_WIDE_INT)
6120 GET_MODE_MASK (tmode))
6121 >> 1))
6122 == 0)))
6124 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6125 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6127 /* Prefer ZERO_EXTENSION, since it gives more information to
6128 backends. */
6129 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6130 return temp;
6131 return temp1;
6134 /* Otherwise, sign- or zero-extend unless we already are in the
6135 proper mode. */
6137 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6138 mode, new));
6141 /* Unless this is a COMPARE or we have a funny memory reference,
6142 don't do anything with zero-extending field extracts starting at
6143 the low-order bit since they are simple AND operations. */
6144 if (pos_rtx == 0 && pos == 0 && ! in_dest
6145 && ! in_compare && ! spans_byte && unsignedp)
6146 return 0;
6148 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6149 we would be spanning bytes or if the position is not a constant and the
6150 length is not 1. In all other cases, we would only be going outside
6151 our object in cases when an original shift would have been
6152 undefined. */
6153 if (! spans_byte && GET_CODE (inner) == MEM
6154 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6155 || (pos_rtx != 0 && len != 1)))
6156 return 0;
6158 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6159 and the mode for the result. */
6160 if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6162 wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6163 pos_mode = mode_for_extraction (EP_insv, 2);
6164 extraction_mode = mode_for_extraction (EP_insv, 3);
6167 if (! in_dest && unsignedp
6168 && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6170 wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6171 pos_mode = mode_for_extraction (EP_extzv, 3);
6172 extraction_mode = mode_for_extraction (EP_extzv, 0);
6175 if (! in_dest && ! unsignedp
6176 && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6178 wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6179 pos_mode = mode_for_extraction (EP_extv, 3);
6180 extraction_mode = mode_for_extraction (EP_extv, 0);
6183 /* Never narrow an object, since that might not be safe. */
6185 if (mode != VOIDmode
6186 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6187 extraction_mode = mode;
6189 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6190 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6191 pos_mode = GET_MODE (pos_rtx);
6193 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6194 if we have to change the mode of memory and cannot, the desired mode is
6195 EXTRACTION_MODE. */
6196 if (GET_CODE (inner) != MEM)
6197 wanted_inner_mode = wanted_inner_reg_mode;
6198 else if (inner_mode != wanted_inner_mode
6199 && (mode_dependent_address_p (XEXP (inner, 0))
6200 || MEM_VOLATILE_P (inner)))
6201 wanted_inner_mode = extraction_mode;
6203 orig_pos = pos;
6205 if (BITS_BIG_ENDIAN)
6207 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6208 BITS_BIG_ENDIAN style. If position is constant, compute new
6209 position. Otherwise, build subtraction.
6210 Note that POS is relative to the mode of the original argument.
6211 If it's a MEM we need to recompute POS relative to that.
6212 However, if we're extracting from (or inserting into) a register,
6213 we want to recompute POS relative to wanted_inner_mode. */
6214 int width = (GET_CODE (inner) == MEM
6215 ? GET_MODE_BITSIZE (is_mode)
6216 : GET_MODE_BITSIZE (wanted_inner_mode));
6218 if (pos_rtx == 0)
6219 pos = width - len - pos;
6220 else
6221 pos_rtx
6222 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6223 /* POS may be less than 0 now, but we check for that below.
6224 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
6227 /* If INNER has a wider mode, make it smaller. If this is a constant
6228 extract, try to adjust the byte to point to the byte containing
6229 the value. */
6230 if (wanted_inner_mode != VOIDmode
6231 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6232 && ((GET_CODE (inner) == MEM
6233 && (inner_mode == wanted_inner_mode
6234 || (! mode_dependent_address_p (XEXP (inner, 0))
6235 && ! MEM_VOLATILE_P (inner))))))
6237 int offset = 0;
6239 /* The computations below will be correct if the machine is big
6240 endian in both bits and bytes or little endian in bits and bytes.
6241 If it is mixed, we must adjust. */
6243 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6244 adjust OFFSET to compensate. */
6245 if (BYTES_BIG_ENDIAN
6246 && ! spans_byte
6247 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6248 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6250 /* If this is a constant position, we can move to the desired byte. */
6251 if (pos_rtx == 0)
6253 offset += pos / BITS_PER_UNIT;
6254 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6257 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6258 && ! spans_byte
6259 && is_mode != wanted_inner_mode)
6260 offset = (GET_MODE_SIZE (is_mode)
6261 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6263 if (offset != 0 || inner_mode != wanted_inner_mode)
6264 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6267 /* If INNER is not memory, we can always get it into the proper mode. If we
6268 are changing its mode, POS must be a constant and smaller than the size
6269 of the new mode. */
6270 else if (GET_CODE (inner) != MEM)
6272 if (GET_MODE (inner) != wanted_inner_mode
6273 && (pos_rtx != 0
6274 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6275 return 0;
6277 inner = force_to_mode (inner, wanted_inner_mode,
6278 pos_rtx
6279 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6280 ? ~(unsigned HOST_WIDE_INT) 0
6281 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6282 << orig_pos),
6283 NULL_RTX, 0);
6286 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6287 have to zero extend. Otherwise, we can just use a SUBREG. */
6288 if (pos_rtx != 0
6289 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6291 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6293 /* If we know that no extraneous bits are set, and that the high
6294 bit is not set, convert extraction to cheaper one - either
6295 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6296 cases. */
6297 if (flag_expensive_optimizations
6298 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6299 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6300 & ~(((unsigned HOST_WIDE_INT)
6301 GET_MODE_MASK (GET_MODE (pos_rtx)))
6302 >> 1))
6303 == 0)))
6305 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6307 /* Prefer ZERO_EXTENSION, since it gives more information to
6308 backends. */
6309 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6310 temp = temp1;
6312 pos_rtx = temp;
6314 else if (pos_rtx != 0
6315 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6316 pos_rtx = gen_lowpart (pos_mode, pos_rtx);
6318 /* Make POS_RTX unless we already have it and it is correct. If we don't
6319 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6320 be a CONST_INT. */
6321 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6322 pos_rtx = orig_pos_rtx;
6324 else if (pos_rtx == 0)
6325 pos_rtx = GEN_INT (pos);
6327 /* Make the required operation. See if we can use existing rtx. */
6328 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6329 extraction_mode, inner, GEN_INT (len), pos_rtx);
6330 if (! in_dest)
6331 new = gen_lowpart (mode, new);
6333 return new;
6336 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6337 with any other operations in X. Return X without that shift if so. */
6339 static rtx
6340 extract_left_shift (rtx x, int count)
6342 enum rtx_code code = GET_CODE (x);
6343 enum machine_mode mode = GET_MODE (x);
6344 rtx tem;
6346 switch (code)
6348 case ASHIFT:
6349 /* This is the shift itself. If it is wide enough, we will return
6350 either the value being shifted if the shift count is equal to
6351 COUNT or a shift for the difference. */
6352 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6353 && INTVAL (XEXP (x, 1)) >= count)
6354 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6355 INTVAL (XEXP (x, 1)) - count);
6356 break;
6358 case NEG: case NOT:
6359 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6360 return simplify_gen_unary (code, mode, tem, mode);
6362 break;
6364 case PLUS: case IOR: case XOR: case AND:
6365 /* If we can safely shift this constant and we find the inner shift,
6366 make a new operation. */
6367 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6368 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6369 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6370 return gen_binary (code, mode, tem,
6371 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6373 break;
6375 default:
6376 break;
6379 return 0;
6382 /* Look at the expression rooted at X. Look for expressions
6383 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6384 Form these expressions.
6386 Return the new rtx, usually just X.
6388 Also, for machines like the VAX that don't have logical shift insns,
6389 try to convert logical to arithmetic shift operations in cases where
6390 they are equivalent. This undoes the canonicalizations to logical
6391 shifts done elsewhere.
6393 We try, as much as possible, to re-use rtl expressions to save memory.
6395 IN_CODE says what kind of expression we are processing. Normally, it is
6396 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6397 being kludges), it is MEM. When processing the arguments of a comparison
6398 or a COMPARE against zero, it is COMPARE. */
6400 static rtx
6401 make_compound_operation (rtx x, enum rtx_code in_code)
6403 enum rtx_code code = GET_CODE (x);
6404 enum machine_mode mode = GET_MODE (x);
6405 int mode_width = GET_MODE_BITSIZE (mode);
6406 rtx rhs, lhs;
6407 enum rtx_code next_code;
6408 int i;
6409 rtx new = 0;
6410 rtx tem;
6411 const char *fmt;
6413 /* Select the code to be used in recursive calls. Once we are inside an
6414 address, we stay there. If we have a comparison, set to COMPARE,
6415 but once inside, go back to our default of SET. */
6417 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6418 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6419 && XEXP (x, 1) == const0_rtx) ? COMPARE
6420 : in_code == COMPARE ? SET : in_code);
6422 /* Process depending on the code of this operation. If NEW is set
6423 nonzero, it will be returned. */
6425 switch (code)
6427 case ASHIFT:
6428 /* Convert shifts by constants into multiplications if inside
6429 an address. */
6430 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6431 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6432 && INTVAL (XEXP (x, 1)) >= 0)
6434 new = make_compound_operation (XEXP (x, 0), next_code);
6435 new = gen_rtx_MULT (mode, new,
6436 GEN_INT ((HOST_WIDE_INT) 1
6437 << INTVAL (XEXP (x, 1))));
6439 break;
6441 case AND:
6442 /* If the second operand is not a constant, we can't do anything
6443 with it. */
6444 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6445 break;
6447 /* If the constant is a power of two minus one and the first operand
6448 is a logical right shift, make an extraction. */
6449 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6450 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6452 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6453 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6454 0, in_code == COMPARE);
6457 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6458 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6459 && subreg_lowpart_p (XEXP (x, 0))
6460 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6461 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6463 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6464 next_code);
6465 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6466 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6467 0, in_code == COMPARE);
6469 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6470 else if ((GET_CODE (XEXP (x, 0)) == XOR
6471 || GET_CODE (XEXP (x, 0)) == IOR)
6472 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6473 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6474 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6476 /* Apply the distributive law, and then try to make extractions. */
6477 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6478 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6479 XEXP (x, 1)),
6480 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6481 XEXP (x, 1)));
6482 new = make_compound_operation (new, in_code);
6485 /* If we are have (and (rotate X C) M) and C is larger than the number
6486 of bits in M, this is an extraction. */
6488 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6489 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6490 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6491 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6493 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6494 new = make_extraction (mode, new,
6495 (GET_MODE_BITSIZE (mode)
6496 - INTVAL (XEXP (XEXP (x, 0), 1))),
6497 NULL_RTX, i, 1, 0, in_code == COMPARE);
6500 /* On machines without logical shifts, if the operand of the AND is
6501 a logical shift and our mask turns off all the propagated sign
6502 bits, we can replace the logical shift with an arithmetic shift. */
6503 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6504 && !have_insn_for (LSHIFTRT, mode)
6505 && have_insn_for (ASHIFTRT, mode)
6506 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6507 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6508 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6509 && mode_width <= HOST_BITS_PER_WIDE_INT)
6511 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6513 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6514 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6515 SUBST (XEXP (x, 0),
6516 gen_rtx_ASHIFTRT (mode,
6517 make_compound_operation
6518 (XEXP (XEXP (x, 0), 0), next_code),
6519 XEXP (XEXP (x, 0), 1)));
6522 /* If the constant is one less than a power of two, this might be
6523 representable by an extraction even if no shift is present.
6524 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6525 we are in a COMPARE. */
6526 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6527 new = make_extraction (mode,
6528 make_compound_operation (XEXP (x, 0),
6529 next_code),
6530 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6532 /* If we are in a comparison and this is an AND with a power of two,
6533 convert this into the appropriate bit extract. */
6534 else if (in_code == COMPARE
6535 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6536 new = make_extraction (mode,
6537 make_compound_operation (XEXP (x, 0),
6538 next_code),
6539 i, NULL_RTX, 1, 1, 0, 1);
6541 break;
6543 case LSHIFTRT:
6544 /* If the sign bit is known to be zero, replace this with an
6545 arithmetic shift. */
6546 if (have_insn_for (ASHIFTRT, mode)
6547 && ! have_insn_for (LSHIFTRT, mode)
6548 && mode_width <= HOST_BITS_PER_WIDE_INT
6549 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6551 new = gen_rtx_ASHIFTRT (mode,
6552 make_compound_operation (XEXP (x, 0),
6553 next_code),
6554 XEXP (x, 1));
6555 break;
6558 /* ... fall through ... */
6560 case ASHIFTRT:
6561 lhs = XEXP (x, 0);
6562 rhs = XEXP (x, 1);
6564 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6565 this is a SIGN_EXTRACT. */
6566 if (GET_CODE (rhs) == CONST_INT
6567 && GET_CODE (lhs) == ASHIFT
6568 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6569 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6571 new = make_compound_operation (XEXP (lhs, 0), next_code);
6572 new = make_extraction (mode, new,
6573 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6574 NULL_RTX, mode_width - INTVAL (rhs),
6575 code == LSHIFTRT, 0, in_code == COMPARE);
6576 break;
6579 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6580 If so, try to merge the shifts into a SIGN_EXTEND. We could
6581 also do this for some cases of SIGN_EXTRACT, but it doesn't
6582 seem worth the effort; the case checked for occurs on Alpha. */
6584 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6585 && ! (GET_CODE (lhs) == SUBREG
6586 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6587 && GET_CODE (rhs) == CONST_INT
6588 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6589 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6590 new = make_extraction (mode, make_compound_operation (new, next_code),
6591 0, NULL_RTX, mode_width - INTVAL (rhs),
6592 code == LSHIFTRT, 0, in_code == COMPARE);
6594 break;
6596 case SUBREG:
6597 /* Call ourselves recursively on the inner expression. If we are
6598 narrowing the object and it has a different RTL code from
6599 what it originally did, do this SUBREG as a force_to_mode. */
6601 tem = make_compound_operation (SUBREG_REG (x), in_code);
6602 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6603 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6604 && subreg_lowpart_p (x))
6606 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6607 NULL_RTX, 0);
6609 /* If we have something other than a SUBREG, we might have
6610 done an expansion, so rerun ourselves. */
6611 if (GET_CODE (newer) != SUBREG)
6612 newer = make_compound_operation (newer, in_code);
6614 return newer;
6617 /* If this is a paradoxical subreg, and the new code is a sign or
6618 zero extension, omit the subreg and widen the extension. If it
6619 is a regular subreg, we can still get rid of the subreg by not
6620 widening so much, or in fact removing the extension entirely. */
6621 if ((GET_CODE (tem) == SIGN_EXTEND
6622 || GET_CODE (tem) == ZERO_EXTEND)
6623 && subreg_lowpart_p (x))
6625 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6626 || (GET_MODE_SIZE (mode) >
6627 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6629 if (! SCALAR_INT_MODE_P (mode))
6630 break;
6631 tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6633 else
6634 tem = gen_lowpart (mode, XEXP (tem, 0));
6635 return tem;
6637 break;
6639 default:
6640 break;
6643 if (new)
6645 x = gen_lowpart (mode, new);
6646 code = GET_CODE (x);
6649 /* Now recursively process each operand of this operation. */
6650 fmt = GET_RTX_FORMAT (code);
6651 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6652 if (fmt[i] == 'e')
6654 new = make_compound_operation (XEXP (x, i), next_code);
6655 SUBST (XEXP (x, i), new);
6658 return x;
6661 /* Given M see if it is a value that would select a field of bits
6662 within an item, but not the entire word. Return -1 if not.
6663 Otherwise, return the starting position of the field, where 0 is the
6664 low-order bit.
6666 *PLEN is set to the length of the field. */
6668 static int
6669 get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6671 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6672 int pos = exact_log2 (m & -m);
6673 int len;
6675 if (pos < 0)
6676 return -1;
6678 /* Now shift off the low-order zero bits and see if we have a power of
6679 two minus 1. */
6680 len = exact_log2 ((m >> pos) + 1);
6682 if (len <= 0)
6683 return -1;
6685 *plen = len;
6686 return pos;
6689 /* See if X can be simplified knowing that we will only refer to it in
6690 MODE and will only refer to those bits that are nonzero in MASK.
6691 If other bits are being computed or if masking operations are done
6692 that select a superset of the bits in MASK, they can sometimes be
6693 ignored.
6695 Return a possibly simplified expression, but always convert X to
6696 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6698 Also, if REG is nonzero and X is a register equal in value to REG,
6699 replace X with REG.
6701 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6702 are all off in X. This is used when X will be complemented, by either
6703 NOT, NEG, or XOR. */
6705 static rtx
6706 force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6707 rtx reg, int just_select)
6709 enum rtx_code code = GET_CODE (x);
6710 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6711 enum machine_mode op_mode;
6712 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6713 rtx op0, op1, temp;
6715 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6716 code below will do the wrong thing since the mode of such an
6717 expression is VOIDmode.
6719 Also do nothing if X is a CLOBBER; this can happen if X was
6720 the return value from a call to gen_lowpart. */
6721 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6722 return x;
6724 /* We want to perform the operation is its present mode unless we know
6725 that the operation is valid in MODE, in which case we do the operation
6726 in MODE. */
6727 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6728 && have_insn_for (code, mode))
6729 ? mode : GET_MODE (x));
6731 /* It is not valid to do a right-shift in a narrower mode
6732 than the one it came in with. */
6733 if ((code == LSHIFTRT || code == ASHIFTRT)
6734 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6735 op_mode = GET_MODE (x);
6737 /* Truncate MASK to fit OP_MODE. */
6738 if (op_mode)
6739 mask &= GET_MODE_MASK (op_mode);
6741 /* When we have an arithmetic operation, or a shift whose count we
6742 do not know, we need to assume that all bits up to the highest-order
6743 bit in MASK will be needed. This is how we form such a mask. */
6744 if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
6745 fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
6746 else
6747 fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6748 - 1);
6750 /* Determine what bits of X are guaranteed to be (non)zero. */
6751 nonzero = nonzero_bits (x, mode);
6753 /* If none of the bits in X are needed, return a zero. */
6754 if (! just_select && (nonzero & mask) == 0)
6755 x = const0_rtx;
6757 /* If X is a CONST_INT, return a new one. Do this here since the
6758 test below will fail. */
6759 if (GET_CODE (x) == CONST_INT)
6761 if (SCALAR_INT_MODE_P (mode))
6762 return gen_int_mode (INTVAL (x) & mask, mode);
6763 else
6765 x = GEN_INT (INTVAL (x) & mask);
6766 return gen_lowpart_common (mode, x);
6770 /* If X is narrower than MODE and we want all the bits in X's mode, just
6771 get X in the proper mode. */
6772 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6773 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6774 return gen_lowpart (mode, x);
6776 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6777 MASK are already known to be zero in X, we need not do anything. */
6778 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6779 return x;
6781 switch (code)
6783 case CLOBBER:
6784 /* If X is a (clobber (const_int)), return it since we know we are
6785 generating something that won't match. */
6786 return x;
6788 case USE:
6789 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6790 spanned the boundary of the MEM. If we are now masking so it is
6791 within that boundary, we don't need the USE any more. */
6792 if (! BITS_BIG_ENDIAN
6793 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6794 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6795 break;
6797 case SIGN_EXTEND:
6798 case ZERO_EXTEND:
6799 case ZERO_EXTRACT:
6800 case SIGN_EXTRACT:
6801 x = expand_compound_operation (x);
6802 if (GET_CODE (x) != code)
6803 return force_to_mode (x, mode, mask, reg, next_select);
6804 break;
6806 case REG:
6807 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6808 || rtx_equal_p (reg, get_last_value (x))))
6809 x = reg;
6810 break;
6812 case SUBREG:
6813 if (subreg_lowpart_p (x)
6814 /* We can ignore the effect of this SUBREG if it narrows the mode or
6815 if the constant masks to zero all the bits the mode doesn't
6816 have. */
6817 && ((GET_MODE_SIZE (GET_MODE (x))
6818 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6819 || (0 == (mask
6820 & GET_MODE_MASK (GET_MODE (x))
6821 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6822 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6823 break;
6825 case AND:
6826 /* If this is an AND with a constant, convert it into an AND
6827 whose constant is the AND of that constant with MASK. If it
6828 remains an AND of MASK, delete it since it is redundant. */
6830 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6832 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6833 mask & INTVAL (XEXP (x, 1)));
6835 /* If X is still an AND, see if it is an AND with a mask that
6836 is just some low-order bits. If so, and it is MASK, we don't
6837 need it. */
6839 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6840 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6841 == mask))
6842 x = XEXP (x, 0);
6844 /* If it remains an AND, try making another AND with the bits
6845 in the mode mask that aren't in MASK turned on. If the
6846 constant in the AND is wide enough, this might make a
6847 cheaper constant. */
6849 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6850 && GET_MODE_MASK (GET_MODE (x)) != mask
6851 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6853 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6854 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6855 int width = GET_MODE_BITSIZE (GET_MODE (x));
6856 rtx y;
6858 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6859 number, sign extend it. */
6860 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6861 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6862 cval |= (HOST_WIDE_INT) -1 << width;
6864 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6865 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6866 x = y;
6869 break;
6872 goto binop;
6874 case PLUS:
6875 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6876 low-order bits (as in an alignment operation) and FOO is already
6877 aligned to that boundary, mask C1 to that boundary as well.
6878 This may eliminate that PLUS and, later, the AND. */
6881 unsigned int width = GET_MODE_BITSIZE (mode);
6882 unsigned HOST_WIDE_INT smask = mask;
6884 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6885 number, sign extend it. */
6887 if (width < HOST_BITS_PER_WIDE_INT
6888 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6889 smask |= (HOST_WIDE_INT) -1 << width;
6891 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6892 && exact_log2 (- smask) >= 0
6893 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6894 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6895 return force_to_mode (plus_constant (XEXP (x, 0),
6896 (INTVAL (XEXP (x, 1)) & smask)),
6897 mode, smask, reg, next_select);
6900 /* ... fall through ... */
6902 case MULT:
6903 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6904 most significant bit in MASK since carries from those bits will
6905 affect the bits we are interested in. */
6906 mask = fuller_mask;
6907 goto binop;
6909 case MINUS:
6910 /* If X is (minus C Y) where C's least set bit is larger than any bit
6911 in the mask, then we may replace with (neg Y). */
6912 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6913 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6914 & -INTVAL (XEXP (x, 0))))
6915 > mask))
6917 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6918 GET_MODE (x));
6919 return force_to_mode (x, mode, mask, reg, next_select);
6922 /* Similarly, if C contains every bit in the fuller_mask, then we may
6923 replace with (not Y). */
6924 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6925 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
6926 == INTVAL (XEXP (x, 0))))
6928 x = simplify_gen_unary (NOT, GET_MODE (x),
6929 XEXP (x, 1), GET_MODE (x));
6930 return force_to_mode (x, mode, mask, reg, next_select);
6933 mask = fuller_mask;
6934 goto binop;
6936 case IOR:
6937 case XOR:
6938 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6939 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6940 operation which may be a bitfield extraction. Ensure that the
6941 constant we form is not wider than the mode of X. */
6943 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6944 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6945 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6946 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6947 && GET_CODE (XEXP (x, 1)) == CONST_INT
6948 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6949 + floor_log2 (INTVAL (XEXP (x, 1))))
6950 < GET_MODE_BITSIZE (GET_MODE (x)))
6951 && (INTVAL (XEXP (x, 1))
6952 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6954 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6955 << INTVAL (XEXP (XEXP (x, 0), 1)));
6956 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6957 XEXP (XEXP (x, 0), 0), temp);
6958 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6959 XEXP (XEXP (x, 0), 1));
6960 return force_to_mode (x, mode, mask, reg, next_select);
6963 binop:
6964 /* For most binary operations, just propagate into the operation and
6965 change the mode if we have an operation of that mode. */
6967 op0 = gen_lowpart (op_mode,
6968 force_to_mode (XEXP (x, 0), mode, mask,
6969 reg, next_select));
6970 op1 = gen_lowpart (op_mode,
6971 force_to_mode (XEXP (x, 1), mode, mask,
6972 reg, next_select));
6974 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6975 x = gen_binary (code, op_mode, op0, op1);
6976 break;
6978 case ASHIFT:
6979 /* For left shifts, do the same, but just for the first operand.
6980 However, we cannot do anything with shifts where we cannot
6981 guarantee that the counts are smaller than the size of the mode
6982 because such a count will have a different meaning in a
6983 wider mode. */
6985 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6986 && INTVAL (XEXP (x, 1)) >= 0
6987 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6988 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6989 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6990 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6991 break;
6993 /* If the shift count is a constant and we can do arithmetic in
6994 the mode of the shift, refine which bits we need. Otherwise, use the
6995 conservative form of the mask. */
6996 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6997 && INTVAL (XEXP (x, 1)) >= 0
6998 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6999 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7000 mask >>= INTVAL (XEXP (x, 1));
7001 else
7002 mask = fuller_mask;
7004 op0 = gen_lowpart (op_mode,
7005 force_to_mode (XEXP (x, 0), op_mode,
7006 mask, reg, next_select));
7008 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7009 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
7010 break;
7012 case LSHIFTRT:
7013 /* Here we can only do something if the shift count is a constant,
7014 this shift constant is valid for the host, and we can do arithmetic
7015 in OP_MODE. */
7017 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7018 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7019 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7021 rtx inner = XEXP (x, 0);
7022 unsigned HOST_WIDE_INT inner_mask;
7024 /* Select the mask of the bits we need for the shift operand. */
7025 inner_mask = mask << INTVAL (XEXP (x, 1));
7027 /* We can only change the mode of the shift if we can do arithmetic
7028 in the mode of the shift and INNER_MASK is no wider than the
7029 width of OP_MODE. */
7030 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7031 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7032 op_mode = GET_MODE (x);
7034 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7036 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7037 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7040 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7041 shift and AND produces only copies of the sign bit (C2 is one less
7042 than a power of two), we can do this with just a shift. */
7044 if (GET_CODE (x) == LSHIFTRT
7045 && GET_CODE (XEXP (x, 1)) == CONST_INT
7046 /* The shift puts one of the sign bit copies in the least significant
7047 bit. */
7048 && ((INTVAL (XEXP (x, 1))
7049 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7050 >= GET_MODE_BITSIZE (GET_MODE (x)))
7051 && exact_log2 (mask + 1) >= 0
7052 /* Number of bits left after the shift must be more than the mask
7053 needs. */
7054 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7055 <= GET_MODE_BITSIZE (GET_MODE (x)))
7056 /* Must be more sign bit copies than the mask needs. */
7057 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7058 >= exact_log2 (mask + 1)))
7059 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7060 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7061 - exact_log2 (mask + 1)));
7063 goto shiftrt;
7065 case ASHIFTRT:
7066 /* If we are just looking for the sign bit, we don't need this shift at
7067 all, even if it has a variable count. */
7068 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7069 && (mask == ((unsigned HOST_WIDE_INT) 1
7070 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7071 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7073 /* If this is a shift by a constant, get a mask that contains those bits
7074 that are not copies of the sign bit. We then have two cases: If
7075 MASK only includes those bits, this can be a logical shift, which may
7076 allow simplifications. If MASK is a single-bit field not within
7077 those bits, we are requesting a copy of the sign bit and hence can
7078 shift the sign bit to the appropriate location. */
7080 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7081 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7083 int i = -1;
7085 /* If the considered data is wider than HOST_WIDE_INT, we can't
7086 represent a mask for all its bits in a single scalar.
7087 But we only care about the lower bits, so calculate these. */
7089 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7091 nonzero = ~(HOST_WIDE_INT) 0;
7093 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7094 is the number of bits a full-width mask would have set.
7095 We need only shift if these are fewer than nonzero can
7096 hold. If not, we must keep all bits set in nonzero. */
7098 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7099 < HOST_BITS_PER_WIDE_INT)
7100 nonzero >>= INTVAL (XEXP (x, 1))
7101 + HOST_BITS_PER_WIDE_INT
7102 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7104 else
7106 nonzero = GET_MODE_MASK (GET_MODE (x));
7107 nonzero >>= INTVAL (XEXP (x, 1));
7110 if ((mask & ~nonzero) == 0
7111 || (i = exact_log2 (mask)) >= 0)
7113 x = simplify_shift_const
7114 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7115 i < 0 ? INTVAL (XEXP (x, 1))
7116 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7118 if (GET_CODE (x) != ASHIFTRT)
7119 return force_to_mode (x, mode, mask, reg, next_select);
7123 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7124 even if the shift count isn't a constant. */
7125 if (mask == 1)
7126 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7128 shiftrt:
7130 /* If this is a zero- or sign-extension operation that just affects bits
7131 we don't care about, remove it. Be sure the call above returned
7132 something that is still a shift. */
7134 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7135 && GET_CODE (XEXP (x, 1)) == CONST_INT
7136 && INTVAL (XEXP (x, 1)) >= 0
7137 && (INTVAL (XEXP (x, 1))
7138 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7139 && GET_CODE (XEXP (x, 0)) == ASHIFT
7140 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7141 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7142 reg, next_select);
7144 break;
7146 case ROTATE:
7147 case ROTATERT:
7148 /* If the shift count is constant and we can do computations
7149 in the mode of X, compute where the bits we care about are.
7150 Otherwise, we can't do anything. Don't change the mode of
7151 the shift or propagate MODE into the shift, though. */
7152 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7153 && INTVAL (XEXP (x, 1)) >= 0)
7155 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7156 GET_MODE (x), GEN_INT (mask),
7157 XEXP (x, 1));
7158 if (temp && GET_CODE (temp) == CONST_INT)
7159 SUBST (XEXP (x, 0),
7160 force_to_mode (XEXP (x, 0), GET_MODE (x),
7161 INTVAL (temp), reg, next_select));
7163 break;
7165 case NEG:
7166 /* If we just want the low-order bit, the NEG isn't needed since it
7167 won't change the low-order bit. */
7168 if (mask == 1)
7169 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7171 /* We need any bits less significant than the most significant bit in
7172 MASK since carries from those bits will affect the bits we are
7173 interested in. */
7174 mask = fuller_mask;
7175 goto unop;
7177 case NOT:
7178 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7179 same as the XOR case above. Ensure that the constant we form is not
7180 wider than the mode of X. */
7182 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7183 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7184 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7185 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7186 < GET_MODE_BITSIZE (GET_MODE (x)))
7187 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7189 temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7190 GET_MODE (x));
7191 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7192 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7194 return force_to_mode (x, mode, mask, reg, next_select);
7197 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7198 use the full mask inside the NOT. */
7199 mask = fuller_mask;
7201 unop:
7202 op0 = gen_lowpart (op_mode,
7203 force_to_mode (XEXP (x, 0), mode, mask,
7204 reg, next_select));
7205 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7206 x = simplify_gen_unary (code, op_mode, op0, op_mode);
7207 break;
7209 case NE:
7210 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7211 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7212 which is equal to STORE_FLAG_VALUE. */
7213 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7214 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7215 && (nonzero_bits (XEXP (x, 0), mode)
7216 == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7217 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7219 break;
7221 case IF_THEN_ELSE:
7222 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7223 written in a narrower mode. We play it safe and do not do so. */
7225 SUBST (XEXP (x, 1),
7226 gen_lowpart (GET_MODE (x),
7227 force_to_mode (XEXP (x, 1), mode,
7228 mask, reg, next_select)));
7229 SUBST (XEXP (x, 2),
7230 gen_lowpart (GET_MODE (x),
7231 force_to_mode (XEXP (x, 2), mode,
7232 mask, reg, next_select)));
7233 break;
7235 default:
7236 break;
7239 /* Ensure we return a value of the proper mode. */
7240 return gen_lowpart (mode, x);
7243 /* Return nonzero if X is an expression that has one of two values depending on
7244 whether some other value is zero or nonzero. In that case, we return the
7245 value that is being tested, *PTRUE is set to the value if the rtx being
7246 returned has a nonzero value, and *PFALSE is set to the other alternative.
7248 If we return zero, we set *PTRUE and *PFALSE to X. */
7250 static rtx
7251 if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7253 enum machine_mode mode = GET_MODE (x);
7254 enum rtx_code code = GET_CODE (x);
7255 rtx cond0, cond1, true0, true1, false0, false1;
7256 unsigned HOST_WIDE_INT nz;
7258 /* If we are comparing a value against zero, we are done. */
7259 if ((code == NE || code == EQ)
7260 && XEXP (x, 1) == const0_rtx)
7262 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7263 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7264 return XEXP (x, 0);
7267 /* If this is a unary operation whose operand has one of two values, apply
7268 our opcode to compute those values. */
7269 else if (GET_RTX_CLASS (code) == '1'
7270 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7272 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7273 *pfalse = simplify_gen_unary (code, mode, false0,
7274 GET_MODE (XEXP (x, 0)));
7275 return cond0;
7278 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7279 make can't possibly match and would suppress other optimizations. */
7280 else if (code == COMPARE)
7283 /* If this is a binary operation, see if either side has only one of two
7284 values. If either one does or if both do and they are conditional on
7285 the same value, compute the new true and false values. */
7286 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7287 || GET_RTX_CLASS (code) == '<')
7289 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7290 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7292 if ((cond0 != 0 || cond1 != 0)
7293 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7295 /* If if_then_else_cond returned zero, then true/false are the
7296 same rtl. We must copy one of them to prevent invalid rtl
7297 sharing. */
7298 if (cond0 == 0)
7299 true0 = copy_rtx (true0);
7300 else if (cond1 == 0)
7301 true1 = copy_rtx (true1);
7303 *ptrue = gen_binary (code, mode, true0, true1);
7304 *pfalse = gen_binary (code, mode, false0, false1);
7305 return cond0 ? cond0 : cond1;
7308 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7309 operands is zero when the other is nonzero, and vice-versa,
7310 and STORE_FLAG_VALUE is 1 or -1. */
7312 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7313 && (code == PLUS || code == IOR || code == XOR || code == MINUS
7314 || code == UMAX)
7315 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7317 rtx op0 = XEXP (XEXP (x, 0), 1);
7318 rtx op1 = XEXP (XEXP (x, 1), 1);
7320 cond0 = XEXP (XEXP (x, 0), 0);
7321 cond1 = XEXP (XEXP (x, 1), 0);
7323 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7324 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7325 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7326 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7327 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7328 || ((swap_condition (GET_CODE (cond0))
7329 == combine_reversed_comparison_code (cond1))
7330 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7331 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7332 && ! side_effects_p (x))
7334 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7335 *pfalse = gen_binary (MULT, mode,
7336 (code == MINUS
7337 ? simplify_gen_unary (NEG, mode, op1,
7338 mode)
7339 : op1),
7340 const_true_rtx);
7341 return cond0;
7345 /* Similarly for MULT, AND and UMIN, except that for these the result
7346 is always zero. */
7347 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7348 && (code == MULT || code == AND || code == UMIN)
7349 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7351 cond0 = XEXP (XEXP (x, 0), 0);
7352 cond1 = XEXP (XEXP (x, 1), 0);
7354 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7355 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7356 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7357 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7358 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7359 || ((swap_condition (GET_CODE (cond0))
7360 == combine_reversed_comparison_code (cond1))
7361 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7362 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7363 && ! side_effects_p (x))
7365 *ptrue = *pfalse = const0_rtx;
7366 return cond0;
7371 else if (code == IF_THEN_ELSE)
7373 /* If we have IF_THEN_ELSE already, extract the condition and
7374 canonicalize it if it is NE or EQ. */
7375 cond0 = XEXP (x, 0);
7376 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7377 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7378 return XEXP (cond0, 0);
7379 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7381 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7382 return XEXP (cond0, 0);
7384 else
7385 return cond0;
7388 /* If X is a SUBREG, we can narrow both the true and false values
7389 if the inner expression, if there is a condition. */
7390 else if (code == SUBREG
7391 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7392 &true0, &false0)))
7394 *ptrue = simplify_gen_subreg (mode, true0,
7395 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7396 *pfalse = simplify_gen_subreg (mode, false0,
7397 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7399 return cond0;
7402 /* If X is a constant, this isn't special and will cause confusions
7403 if we treat it as such. Likewise if it is equivalent to a constant. */
7404 else if (CONSTANT_P (x)
7405 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7408 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7409 will be least confusing to the rest of the compiler. */
7410 else if (mode == BImode)
7412 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7413 return x;
7416 /* If X is known to be either 0 or -1, those are the true and
7417 false values when testing X. */
7418 else if (x == constm1_rtx || x == const0_rtx
7419 || (mode != VOIDmode
7420 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7422 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7423 return x;
7426 /* Likewise for 0 or a single bit. */
7427 else if (SCALAR_INT_MODE_P (mode)
7428 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7429 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7431 *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7432 return x;
7435 /* Otherwise fail; show no condition with true and false values the same. */
7436 *ptrue = *pfalse = x;
7437 return 0;
7440 /* Return the value of expression X given the fact that condition COND
7441 is known to be true when applied to REG as its first operand and VAL
7442 as its second. X is known to not be shared and so can be modified in
7443 place.
7445 We only handle the simplest cases, and specifically those cases that
7446 arise with IF_THEN_ELSE expressions. */
7448 static rtx
7449 known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7451 enum rtx_code code = GET_CODE (x);
7452 rtx temp;
7453 const char *fmt;
7454 int i, j;
7456 if (side_effects_p (x))
7457 return x;
7459 /* If either operand of the condition is a floating point value,
7460 then we have to avoid collapsing an EQ comparison. */
7461 if (cond == EQ
7462 && rtx_equal_p (x, reg)
7463 && ! FLOAT_MODE_P (GET_MODE (x))
7464 && ! FLOAT_MODE_P (GET_MODE (val)))
7465 return val;
7467 if (cond == UNEQ && rtx_equal_p (x, reg))
7468 return val;
7470 /* If X is (abs REG) and we know something about REG's relationship
7471 with zero, we may be able to simplify this. */
7473 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7474 switch (cond)
7476 case GE: case GT: case EQ:
7477 return XEXP (x, 0);
7478 case LT: case LE:
7479 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7480 XEXP (x, 0),
7481 GET_MODE (XEXP (x, 0)));
7482 default:
7483 break;
7486 /* The only other cases we handle are MIN, MAX, and comparisons if the
7487 operands are the same as REG and VAL. */
7489 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7491 if (rtx_equal_p (XEXP (x, 0), val))
7492 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7494 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7496 if (GET_RTX_CLASS (code) == '<')
7498 if (comparison_dominates_p (cond, code))
7499 return const_true_rtx;
7501 code = combine_reversed_comparison_code (x);
7502 if (code != UNKNOWN
7503 && comparison_dominates_p (cond, code))
7504 return const0_rtx;
7505 else
7506 return x;
7508 else if (code == SMAX || code == SMIN
7509 || code == UMIN || code == UMAX)
7511 int unsignedp = (code == UMIN || code == UMAX);
7513 /* Do not reverse the condition when it is NE or EQ.
7514 This is because we cannot conclude anything about
7515 the value of 'SMAX (x, y)' when x is not equal to y,
7516 but we can when x equals y. */
7517 if ((code == SMAX || code == UMAX)
7518 && ! (cond == EQ || cond == NE))
7519 cond = reverse_condition (cond);
7521 switch (cond)
7523 case GE: case GT:
7524 return unsignedp ? x : XEXP (x, 1);
7525 case LE: case LT:
7526 return unsignedp ? x : XEXP (x, 0);
7527 case GEU: case GTU:
7528 return unsignedp ? XEXP (x, 1) : x;
7529 case LEU: case LTU:
7530 return unsignedp ? XEXP (x, 0) : x;
7531 default:
7532 break;
7537 else if (code == SUBREG)
7539 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7540 rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7542 if (SUBREG_REG (x) != r)
7544 /* We must simplify subreg here, before we lose track of the
7545 original inner_mode. */
7546 new = simplify_subreg (GET_MODE (x), r,
7547 inner_mode, SUBREG_BYTE (x));
7548 if (new)
7549 return new;
7550 else
7551 SUBST (SUBREG_REG (x), r);
7554 return x;
7556 /* We don't have to handle SIGN_EXTEND here, because even in the
7557 case of replacing something with a modeless CONST_INT, a
7558 CONST_INT is already (supposed to be) a valid sign extension for
7559 its narrower mode, which implies it's already properly
7560 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
7561 story is different. */
7562 else if (code == ZERO_EXTEND)
7564 enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7565 rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7567 if (XEXP (x, 0) != r)
7569 /* We must simplify the zero_extend here, before we lose
7570 track of the original inner_mode. */
7571 new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7572 r, inner_mode);
7573 if (new)
7574 return new;
7575 else
7576 SUBST (XEXP (x, 0), r);
7579 return x;
7582 fmt = GET_RTX_FORMAT (code);
7583 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7585 if (fmt[i] == 'e')
7586 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7587 else if (fmt[i] == 'E')
7588 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7589 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7590 cond, reg, val));
7593 return x;
7596 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7597 assignment as a field assignment. */
7599 static int
7600 rtx_equal_for_field_assignment_p (rtx x, rtx y)
7602 if (x == y || rtx_equal_p (x, y))
7603 return 1;
7605 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7606 return 0;
7608 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7609 Note that all SUBREGs of MEM are paradoxical; otherwise they
7610 would have been rewritten. */
7611 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7612 && GET_CODE (SUBREG_REG (y)) == MEM
7613 && rtx_equal_p (SUBREG_REG (y),
7614 gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
7615 return 1;
7617 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7618 && GET_CODE (SUBREG_REG (x)) == MEM
7619 && rtx_equal_p (SUBREG_REG (x),
7620 gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
7621 return 1;
7623 /* We used to see if get_last_value of X and Y were the same but that's
7624 not correct. In one direction, we'll cause the assignment to have
7625 the wrong destination and in the case, we'll import a register into this
7626 insn that might have already have been dead. So fail if none of the
7627 above cases are true. */
7628 return 0;
7631 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7632 Return that assignment if so.
7634 We only handle the most common cases. */
7636 static rtx
7637 make_field_assignment (rtx x)
7639 rtx dest = SET_DEST (x);
7640 rtx src = SET_SRC (x);
7641 rtx assign;
7642 rtx rhs, lhs;
7643 HOST_WIDE_INT c1;
7644 HOST_WIDE_INT pos;
7645 unsigned HOST_WIDE_INT len;
7646 rtx other;
7647 enum machine_mode mode;
7649 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7650 a clear of a one-bit field. We will have changed it to
7651 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7652 for a SUBREG. */
7654 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7655 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7656 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7657 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7659 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7660 1, 1, 1, 0);
7661 if (assign != 0)
7662 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7663 return x;
7666 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7667 && subreg_lowpart_p (XEXP (src, 0))
7668 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7669 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7670 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7671 && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7672 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7673 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7675 assign = make_extraction (VOIDmode, dest, 0,
7676 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7677 1, 1, 1, 0);
7678 if (assign != 0)
7679 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7680 return x;
7683 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7684 one-bit field. */
7685 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7686 && XEXP (XEXP (src, 0), 0) == const1_rtx
7687 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7689 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7690 1, 1, 1, 0);
7691 if (assign != 0)
7692 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7693 return x;
7696 /* The other case we handle is assignments into a constant-position
7697 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
7698 a mask that has all one bits except for a group of zero bits and
7699 OTHER is known to have zeros where C1 has ones, this is such an
7700 assignment. Compute the position and length from C1. Shift OTHER
7701 to the appropriate position, force it to the required mode, and
7702 make the extraction. Check for the AND in both operands. */
7704 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7705 return x;
7707 rhs = expand_compound_operation (XEXP (src, 0));
7708 lhs = expand_compound_operation (XEXP (src, 1));
7710 if (GET_CODE (rhs) == AND
7711 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7712 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7713 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7714 else if (GET_CODE (lhs) == AND
7715 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7716 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7717 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7718 else
7719 return x;
7721 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7722 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7723 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7724 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7725 return x;
7727 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7728 if (assign == 0)
7729 return x;
7731 /* The mode to use for the source is the mode of the assignment, or of
7732 what is inside a possible STRICT_LOW_PART. */
7733 mode = (GET_CODE (assign) == STRICT_LOW_PART
7734 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7736 /* Shift OTHER right POS places and make it the source, restricting it
7737 to the proper length and mode. */
7739 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7740 GET_MODE (src), other, pos),
7741 mode,
7742 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7743 ? ~(unsigned HOST_WIDE_INT) 0
7744 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7745 dest, 0);
7747 /* If SRC is masked by an AND that does not make a difference in
7748 the value being stored, strip it. */
7749 if (GET_CODE (assign) == ZERO_EXTRACT
7750 && GET_CODE (XEXP (assign, 1)) == CONST_INT
7751 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
7752 && GET_CODE (src) == AND
7753 && GET_CODE (XEXP (src, 1)) == CONST_INT
7754 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
7755 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
7756 src = XEXP (src, 0);
7758 return gen_rtx_SET (VOIDmode, assign, src);
7761 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7762 if so. */
7764 static rtx
7765 apply_distributive_law (rtx x)
7767 enum rtx_code code = GET_CODE (x);
7768 enum rtx_code inner_code;
7769 rtx lhs, rhs, other;
7770 rtx tem;
7772 /* Distributivity is not true for floating point as it can change the
7773 value. So we don't do it unless -funsafe-math-optimizations. */
7774 if (FLOAT_MODE_P (GET_MODE (x))
7775 && ! flag_unsafe_math_optimizations)
7776 return x;
7778 /* The outer operation can only be one of the following: */
7779 if (code != IOR && code != AND && code != XOR
7780 && code != PLUS && code != MINUS)
7781 return x;
7783 lhs = XEXP (x, 0);
7784 rhs = XEXP (x, 1);
7786 /* If either operand is a primitive we can't do anything, so get out
7787 fast. */
7788 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7789 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7790 return x;
7792 lhs = expand_compound_operation (lhs);
7793 rhs = expand_compound_operation (rhs);
7794 inner_code = GET_CODE (lhs);
7795 if (inner_code != GET_CODE (rhs))
7796 return x;
7798 /* See if the inner and outer operations distribute. */
7799 switch (inner_code)
7801 case LSHIFTRT:
7802 case ASHIFTRT:
7803 case AND:
7804 case IOR:
7805 /* These all distribute except over PLUS. */
7806 if (code == PLUS || code == MINUS)
7807 return x;
7808 break;
7810 case MULT:
7811 if (code != PLUS && code != MINUS)
7812 return x;
7813 break;
7815 case ASHIFT:
7816 /* This is also a multiply, so it distributes over everything. */
7817 break;
7819 case SUBREG:
7820 /* Non-paradoxical SUBREGs distributes over all operations, provided
7821 the inner modes and byte offsets are the same, this is an extraction
7822 of a low-order part, we don't convert an fp operation to int or
7823 vice versa, and we would not be converting a single-word
7824 operation into a multi-word operation. The latter test is not
7825 required, but it prevents generating unneeded multi-word operations.
7826 Some of the previous tests are redundant given the latter test, but
7827 are retained because they are required for correctness.
7829 We produce the result slightly differently in this case. */
7831 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7832 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7833 || ! subreg_lowpart_p (lhs)
7834 || (GET_MODE_CLASS (GET_MODE (lhs))
7835 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7836 || (GET_MODE_SIZE (GET_MODE (lhs))
7837 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7838 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7839 return x;
7841 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7842 SUBREG_REG (lhs), SUBREG_REG (rhs));
7843 return gen_lowpart (GET_MODE (x), tem);
7845 default:
7846 return x;
7849 /* Set LHS and RHS to the inner operands (A and B in the example
7850 above) and set OTHER to the common operand (C in the example).
7851 These is only one way to do this unless the inner operation is
7852 commutative. */
7853 if (GET_RTX_CLASS (inner_code) == 'c'
7854 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7855 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7856 else if (GET_RTX_CLASS (inner_code) == 'c'
7857 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7858 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7859 else if (GET_RTX_CLASS (inner_code) == 'c'
7860 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7861 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7862 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7863 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7864 else
7865 return x;
7867 /* Form the new inner operation, seeing if it simplifies first. */
7868 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7870 /* There is one exception to the general way of distributing:
7871 (a | c) ^ (b | c) -> (a ^ b) & ~c */
7872 if (code == XOR && inner_code == IOR)
7874 inner_code = AND;
7875 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7878 /* We may be able to continuing distributing the result, so call
7879 ourselves recursively on the inner operation before forming the
7880 outer operation, which we return. */
7881 return gen_binary (inner_code, GET_MODE (x),
7882 apply_distributive_law (tem), other);
7885 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7886 in MODE.
7888 Return an equivalent form, if different from X. Otherwise, return X. If
7889 X is zero, we are to always construct the equivalent form. */
7891 static rtx
7892 simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
7893 unsigned HOST_WIDE_INT constop)
7895 unsigned HOST_WIDE_INT nonzero;
7896 int i;
7898 /* Simplify VAROP knowing that we will be only looking at some of the
7899 bits in it.
7901 Note by passing in CONSTOP, we guarantee that the bits not set in
7902 CONSTOP are not significant and will never be examined. We must
7903 ensure that is the case by explicitly masking out those bits
7904 before returning. */
7905 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7907 /* If VAROP is a CLOBBER, we will fail so return it. */
7908 if (GET_CODE (varop) == CLOBBER)
7909 return varop;
7911 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
7912 to VAROP and return the new constant. */
7913 if (GET_CODE (varop) == CONST_INT)
7914 return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
7916 /* See what bits may be nonzero in VAROP. Unlike the general case of
7917 a call to nonzero_bits, here we don't care about bits outside
7918 MODE. */
7920 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7922 /* Turn off all bits in the constant that are known to already be zero.
7923 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7924 which is tested below. */
7926 constop &= nonzero;
7928 /* If we don't have any bits left, return zero. */
7929 if (constop == 0)
7930 return const0_rtx;
7932 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7933 a power of two, we can replace this with an ASHIFT. */
7934 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7935 && (i = exact_log2 (constop)) >= 0)
7936 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7938 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7939 or XOR, then try to apply the distributive law. This may eliminate
7940 operations if either branch can be simplified because of the AND.
7941 It may also make some cases more complex, but those cases probably
7942 won't match a pattern either with or without this. */
7944 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7945 return
7946 gen_lowpart
7947 (mode,
7948 apply_distributive_law
7949 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7950 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7951 XEXP (varop, 0), constop),
7952 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7953 XEXP (varop, 1), constop))));
7955 /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
7956 the AND and see if one of the operands simplifies to zero. If so, we
7957 may eliminate it. */
7959 if (GET_CODE (varop) == PLUS
7960 && exact_log2 (constop + 1) >= 0)
7962 rtx o0, o1;
7964 o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
7965 o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
7966 if (o0 == const0_rtx)
7967 return o1;
7968 if (o1 == const0_rtx)
7969 return o0;
7972 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7973 if we already had one (just check for the simplest cases). */
7974 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7975 && GET_MODE (XEXP (x, 0)) == mode
7976 && SUBREG_REG (XEXP (x, 0)) == varop)
7977 varop = XEXP (x, 0);
7978 else
7979 varop = gen_lowpart (mode, varop);
7981 /* If we can't make the SUBREG, try to return what we were given. */
7982 if (GET_CODE (varop) == CLOBBER)
7983 return x ? x : varop;
7985 /* If we are only masking insignificant bits, return VAROP. */
7986 if (constop == nonzero)
7987 x = varop;
7988 else
7990 /* Otherwise, return an AND. */
7991 constop = trunc_int_for_mode (constop, mode);
7992 /* See how much, if any, of X we can use. */
7993 if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7994 x = gen_binary (AND, mode, varop, GEN_INT (constop));
7996 else
7998 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7999 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8000 SUBST (XEXP (x, 1), GEN_INT (constop));
8002 SUBST (XEXP (x, 0), varop);
8006 return x;
8009 #define nonzero_bits_with_known(X, MODE) \
8010 cached_nonzero_bits (X, MODE, known_x, known_mode, known_ret)
8012 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
8013 It avoids exponential behavior in nonzero_bits1 when X has
8014 identical subexpressions on the first or the second level. */
8016 static unsigned HOST_WIDE_INT
8017 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
8018 enum machine_mode known_mode,
8019 unsigned HOST_WIDE_INT known_ret)
8021 if (x == known_x && mode == known_mode)
8022 return known_ret;
8024 /* Try to find identical subexpressions. If found call
8025 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
8026 precomputed value for the subexpression as KNOWN_RET. */
8028 if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8029 || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8031 rtx x0 = XEXP (x, 0);
8032 rtx x1 = XEXP (x, 1);
8034 /* Check the first level. */
8035 if (x0 == x1)
8036 return nonzero_bits1 (x, mode, x0, mode,
8037 nonzero_bits_with_known (x0, mode));
8039 /* Check the second level. */
8040 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8041 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8042 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8043 return nonzero_bits1 (x, mode, x1, mode,
8044 nonzero_bits_with_known (x1, mode));
8046 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8047 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8048 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8049 return nonzero_bits1 (x, mode, x0, mode,
8050 nonzero_bits_with_known (x0, mode));
8053 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
8056 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
8057 We don't let nonzero_bits recur into num_sign_bit_copies, because that
8058 is less useful. We can't allow both, because that results in exponential
8059 run time recursion. There is a nullstone testcase that triggered
8060 this. This macro avoids accidental uses of num_sign_bit_copies. */
8061 #define cached_num_sign_bit_copies()
8063 /* Given an expression, X, compute which bits in X can be nonzero.
8064 We don't care about bits outside of those defined in MODE.
8066 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8067 a shift, AND, or zero_extract, we can do better. */
8069 static unsigned HOST_WIDE_INT
8070 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
8071 enum machine_mode known_mode,
8072 unsigned HOST_WIDE_INT known_ret)
8074 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
8075 unsigned HOST_WIDE_INT inner_nz;
8076 enum rtx_code code;
8077 unsigned int mode_width = GET_MODE_BITSIZE (mode);
8078 rtx tem;
8080 /* For floating-point values, assume all bits are needed. */
8081 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
8082 return nonzero;
8084 /* If X is wider than MODE, use its mode instead. */
8085 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
8087 mode = GET_MODE (x);
8088 nonzero = GET_MODE_MASK (mode);
8089 mode_width = GET_MODE_BITSIZE (mode);
8092 if (mode_width > HOST_BITS_PER_WIDE_INT)
8093 /* Our only callers in this case look for single bit values. So
8094 just return the mode mask. Those tests will then be false. */
8095 return nonzero;
8097 #ifndef WORD_REGISTER_OPERATIONS
8098 /* If MODE is wider than X, but both are a single word for both the host
8099 and target machines, we can compute this from which bits of the
8100 object might be nonzero in its own mode, taking into account the fact
8101 that on many CISC machines, accessing an object in a wider mode
8102 causes the high-order bits to become undefined. So they are
8103 not known to be zero. */
8105 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8106 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8107 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8108 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8110 nonzero &= nonzero_bits_with_known (x, GET_MODE (x));
8111 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8112 return nonzero;
8114 #endif
8116 code = GET_CODE (x);
8117 switch (code)
8119 case REG:
8120 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8121 /* If pointers extend unsigned and this is a pointer in Pmode, say that
8122 all the bits above ptr_mode are known to be zero. */
8123 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8124 && REG_POINTER (x))
8125 nonzero &= GET_MODE_MASK (ptr_mode);
8126 #endif
8128 /* Include declared information about alignment of pointers. */
8129 /* ??? We don't properly preserve REG_POINTER changes across
8130 pointer-to-integer casts, so we can't trust it except for
8131 things that we know must be pointers. See execute/960116-1.c. */
8132 if ((x == stack_pointer_rtx
8133 || x == frame_pointer_rtx
8134 || x == arg_pointer_rtx)
8135 && REGNO_POINTER_ALIGN (REGNO (x)))
8137 unsigned HOST_WIDE_INT alignment
8138 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8140 #ifdef PUSH_ROUNDING
8141 /* If PUSH_ROUNDING is defined, it is possible for the
8142 stack to be momentarily aligned only to that amount,
8143 so we pick the least alignment. */
8144 if (x == stack_pointer_rtx && PUSH_ARGS)
8145 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
8146 alignment);
8147 #endif
8149 nonzero &= ~(alignment - 1);
8152 /* If X is a register whose nonzero bits value is current, use it.
8153 Otherwise, if X is a register whose value we can find, use that
8154 value. Otherwise, use the previously-computed global nonzero bits
8155 for this register. */
8157 if (reg_last_set_value[REGNO (x)] != 0
8158 && (reg_last_set_mode[REGNO (x)] == mode
8159 || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8160 && GET_MODE_CLASS (mode) == MODE_INT))
8161 && (reg_last_set_label[REGNO (x)] == label_tick
8162 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8163 && REG_N_SETS (REGNO (x)) == 1
8164 && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8165 REGNO (x))))
8166 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8167 return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8169 tem = get_last_value (x);
8171 if (tem)
8173 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8174 /* If X is narrower than MODE and TEM is a non-negative
8175 constant that would appear negative in the mode of X,
8176 sign-extend it for use in reg_nonzero_bits because some
8177 machines (maybe most) will actually do the sign-extension
8178 and this is the conservative approach.
8180 ??? For 2.5, try to tighten up the MD files in this regard
8181 instead of this kludge. */
8183 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8184 && GET_CODE (tem) == CONST_INT
8185 && INTVAL (tem) > 0
8186 && 0 != (INTVAL (tem)
8187 & ((HOST_WIDE_INT) 1
8188 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8189 tem = GEN_INT (INTVAL (tem)
8190 | ((HOST_WIDE_INT) (-1)
8191 << GET_MODE_BITSIZE (GET_MODE (x))));
8192 #endif
8193 return nonzero_bits_with_known (tem, mode) & nonzero;
8195 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8197 unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8199 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8200 /* We don't know anything about the upper bits. */
8201 mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8202 return nonzero & mask;
8204 else
8205 return nonzero;
8207 case CONST_INT:
8208 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8209 /* If X is negative in MODE, sign-extend the value. */
8210 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8211 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8212 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8213 #endif
8215 return INTVAL (x);
8217 case MEM:
8218 #ifdef LOAD_EXTEND_OP
8219 /* In many, if not most, RISC machines, reading a byte from memory
8220 zeros the rest of the register. Noticing that fact saves a lot
8221 of extra zero-extends. */
8222 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8223 nonzero &= GET_MODE_MASK (GET_MODE (x));
8224 #endif
8225 break;
8227 case EQ: case NE:
8228 case UNEQ: case LTGT:
8229 case GT: case GTU: case UNGT:
8230 case LT: case LTU: case UNLT:
8231 case GE: case GEU: case UNGE:
8232 case LE: case LEU: case UNLE:
8233 case UNORDERED: case ORDERED:
8235 /* If this produces an integer result, we know which bits are set.
8236 Code here used to clear bits outside the mode of X, but that is
8237 now done above. */
8239 if (GET_MODE_CLASS (mode) == MODE_INT
8240 && mode_width <= HOST_BITS_PER_WIDE_INT)
8241 nonzero = STORE_FLAG_VALUE;
8242 break;
8244 case NEG:
8245 #if 0
8246 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8247 and num_sign_bit_copies. */
8248 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8249 == GET_MODE_BITSIZE (GET_MODE (x)))
8250 nonzero = 1;
8251 #endif
8253 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8254 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8255 break;
8257 case ABS:
8258 #if 0
8259 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8260 and num_sign_bit_copies. */
8261 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8262 == GET_MODE_BITSIZE (GET_MODE (x)))
8263 nonzero = 1;
8264 #endif
8265 break;
8267 case TRUNCATE:
8268 nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8269 & GET_MODE_MASK (mode));
8270 break;
8272 case ZERO_EXTEND:
8273 nonzero &= nonzero_bits_with_known (XEXP (x, 0), mode);
8274 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8275 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8276 break;
8278 case SIGN_EXTEND:
8279 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8280 Otherwise, show all the bits in the outer mode but not the inner
8281 may be nonzero. */
8282 inner_nz = nonzero_bits_with_known (XEXP (x, 0), mode);
8283 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8285 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8286 if (inner_nz
8287 & (((HOST_WIDE_INT) 1
8288 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8289 inner_nz |= (GET_MODE_MASK (mode)
8290 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8293 nonzero &= inner_nz;
8294 break;
8296 case AND:
8297 nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8298 & nonzero_bits_with_known (XEXP (x, 1), mode));
8299 break;
8301 case XOR: case IOR:
8302 case UMIN: case UMAX: case SMIN: case SMAX:
8304 unsigned HOST_WIDE_INT nonzero0 =
8305 nonzero_bits_with_known (XEXP (x, 0), mode);
8307 /* Don't call nonzero_bits for the second time if it cannot change
8308 anything. */
8309 if ((nonzero & nonzero0) != nonzero)
8310 nonzero &= (nonzero0
8311 | nonzero_bits_with_known (XEXP (x, 1), mode));
8313 break;
8315 case PLUS: case MINUS:
8316 case MULT:
8317 case DIV: case UDIV:
8318 case MOD: case UMOD:
8319 /* We can apply the rules of arithmetic to compute the number of
8320 high- and low-order zero bits of these operations. We start by
8321 computing the width (position of the highest-order nonzero bit)
8322 and the number of low-order zero bits for each value. */
8324 unsigned HOST_WIDE_INT nz0 =
8325 nonzero_bits_with_known (XEXP (x, 0), mode);
8326 unsigned HOST_WIDE_INT nz1 =
8327 nonzero_bits_with_known (XEXP (x, 1), mode);
8328 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
8329 int width0 = floor_log2 (nz0) + 1;
8330 int width1 = floor_log2 (nz1) + 1;
8331 int low0 = floor_log2 (nz0 & -nz0);
8332 int low1 = floor_log2 (nz1 & -nz1);
8333 HOST_WIDE_INT op0_maybe_minusp
8334 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
8335 HOST_WIDE_INT op1_maybe_minusp
8336 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
8337 unsigned int result_width = mode_width;
8338 int result_low = 0;
8340 switch (code)
8342 case PLUS:
8343 result_width = MAX (width0, width1) + 1;
8344 result_low = MIN (low0, low1);
8345 break;
8346 case MINUS:
8347 result_low = MIN (low0, low1);
8348 break;
8349 case MULT:
8350 result_width = width0 + width1;
8351 result_low = low0 + low1;
8352 break;
8353 case DIV:
8354 if (width1 == 0)
8355 break;
8356 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8357 result_width = width0;
8358 break;
8359 case UDIV:
8360 if (width1 == 0)
8361 break;
8362 result_width = width0;
8363 break;
8364 case MOD:
8365 if (width1 == 0)
8366 break;
8367 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8368 result_width = MIN (width0, width1);
8369 result_low = MIN (low0, low1);
8370 break;
8371 case UMOD:
8372 if (width1 == 0)
8373 break;
8374 result_width = MIN (width0, width1);
8375 result_low = MIN (low0, low1);
8376 break;
8377 default:
8378 abort ();
8381 if (result_width < mode_width)
8382 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8384 if (result_low > 0)
8385 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8387 #ifdef POINTERS_EXTEND_UNSIGNED
8388 /* If pointers extend unsigned and this is an addition or subtraction
8389 to a pointer in Pmode, all the bits above ptr_mode are known to be
8390 zero. */
8391 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8392 && (code == PLUS || code == MINUS)
8393 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8394 nonzero &= GET_MODE_MASK (ptr_mode);
8395 #endif
8397 break;
8399 case ZERO_EXTRACT:
8400 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8401 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8402 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8403 break;
8405 case SUBREG:
8406 /* If this is a SUBREG formed for a promoted variable that has
8407 been zero-extended, we know that at least the high-order bits
8408 are zero, though others might be too. */
8410 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
8411 nonzero = (GET_MODE_MASK (GET_MODE (x))
8412 & nonzero_bits_with_known (SUBREG_REG (x), GET_MODE (x)));
8414 /* If the inner mode is a single word for both the host and target
8415 machines, we can compute this from which bits of the inner
8416 object might be nonzero. */
8417 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8418 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8419 <= HOST_BITS_PER_WIDE_INT))
8421 nonzero &= nonzero_bits_with_known (SUBREG_REG (x), mode);
8423 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8424 /* If this is a typical RISC machine, we only have to worry
8425 about the way loads are extended. */
8426 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8427 ? (((nonzero
8428 & (((unsigned HOST_WIDE_INT) 1
8429 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8430 != 0))
8431 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8432 || GET_CODE (SUBREG_REG (x)) != MEM)
8433 #endif
8435 /* On many CISC machines, accessing an object in a wider mode
8436 causes the high-order bits to become undefined. So they are
8437 not known to be zero. */
8438 if (GET_MODE_SIZE (GET_MODE (x))
8439 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8440 nonzero |= (GET_MODE_MASK (GET_MODE (x))
8441 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8444 break;
8446 case ASHIFTRT:
8447 case LSHIFTRT:
8448 case ASHIFT:
8449 case ROTATE:
8450 /* The nonzero bits are in two classes: any bits within MODE
8451 that aren't in GET_MODE (x) are always significant. The rest of the
8452 nonzero bits are those that are significant in the operand of
8453 the shift when shifted the appropriate number of bits. This
8454 shows that high-order bits are cleared by the right shift and
8455 low-order bits by left shifts. */
8456 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8457 && INTVAL (XEXP (x, 1)) >= 0
8458 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8460 enum machine_mode inner_mode = GET_MODE (x);
8461 unsigned int width = GET_MODE_BITSIZE (inner_mode);
8462 int count = INTVAL (XEXP (x, 1));
8463 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8464 unsigned HOST_WIDE_INT op_nonzero =
8465 nonzero_bits_with_known (XEXP (x, 0), mode);
8466 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8467 unsigned HOST_WIDE_INT outer = 0;
8469 if (mode_width > width)
8470 outer = (op_nonzero & nonzero & ~mode_mask);
8472 if (code == LSHIFTRT)
8473 inner >>= count;
8474 else if (code == ASHIFTRT)
8476 inner >>= count;
8478 /* If the sign bit may have been nonzero before the shift, we
8479 need to mark all the places it could have been copied to
8480 by the shift as possibly nonzero. */
8481 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8482 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8484 else if (code == ASHIFT)
8485 inner <<= count;
8486 else
8487 inner = ((inner << (count % width)
8488 | (inner >> (width - (count % width)))) & mode_mask);
8490 nonzero &= (outer | inner);
8492 break;
8494 case FFS:
8495 case POPCOUNT:
8496 /* This is at most the number of bits in the mode. */
8497 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
8498 break;
8500 case CLZ:
8501 /* If CLZ has a known value at zero, then the nonzero bits are
8502 that value, plus the number of bits in the mode minus one. */
8503 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
8504 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
8505 else
8506 nonzero = -1;
8507 break;
8509 case CTZ:
8510 /* If CTZ has a known value at zero, then the nonzero bits are
8511 that value, plus the number of bits in the mode minus one. */
8512 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
8513 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
8514 else
8515 nonzero = -1;
8516 break;
8518 case PARITY:
8519 nonzero = 1;
8520 break;
8522 case IF_THEN_ELSE:
8523 nonzero &= (nonzero_bits_with_known (XEXP (x, 1), mode)
8524 | nonzero_bits_with_known (XEXP (x, 2), mode));
8525 break;
8527 default:
8528 break;
8531 return nonzero;
8534 /* See the macro definition above. */
8535 #undef cached_num_sign_bit_copies
8537 #define num_sign_bit_copies_with_known(X, M) \
8538 cached_num_sign_bit_copies (X, M, known_x, known_mode, known_ret)
8540 /* The function cached_num_sign_bit_copies is a wrapper around
8541 num_sign_bit_copies1. It avoids exponential behavior in
8542 num_sign_bit_copies1 when X has identical subexpressions on the
8543 first or the second level. */
8545 static unsigned int
8546 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
8547 enum machine_mode known_mode,
8548 unsigned int known_ret)
8550 if (x == known_x && mode == known_mode)
8551 return known_ret;
8553 /* Try to find identical subexpressions. If found call
8554 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
8555 the precomputed value for the subexpression as KNOWN_RET. */
8557 if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8558 || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8560 rtx x0 = XEXP (x, 0);
8561 rtx x1 = XEXP (x, 1);
8563 /* Check the first level. */
8564 if (x0 == x1)
8565 return
8566 num_sign_bit_copies1 (x, mode, x0, mode,
8567 num_sign_bit_copies_with_known (x0, mode));
8569 /* Check the second level. */
8570 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8571 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8572 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8573 return
8574 num_sign_bit_copies1 (x, mode, x1, mode,
8575 num_sign_bit_copies_with_known (x1, mode));
8577 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8578 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8579 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8580 return
8581 num_sign_bit_copies1 (x, mode, x0, mode,
8582 num_sign_bit_copies_with_known (x0, mode));
8585 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
8588 /* Return the number of bits at the high-order end of X that are known to
8589 be equal to the sign bit. X will be used in mode MODE; if MODE is
8590 VOIDmode, X will be used in its own mode. The returned value will always
8591 be between 1 and the number of bits in MODE. */
8593 static unsigned int
8594 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
8595 enum machine_mode known_mode,
8596 unsigned int known_ret)
8598 enum rtx_code code = GET_CODE (x);
8599 unsigned int bitwidth;
8600 int num0, num1, result;
8601 unsigned HOST_WIDE_INT nonzero;
8602 rtx tem;
8604 /* If we weren't given a mode, use the mode of X. If the mode is still
8605 VOIDmode, we don't know anything. Likewise if one of the modes is
8606 floating-point. */
8608 if (mode == VOIDmode)
8609 mode = GET_MODE (x);
8611 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8612 return 1;
8614 bitwidth = GET_MODE_BITSIZE (mode);
8616 /* For a smaller object, just ignore the high bits. */
8617 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8619 num0 = num_sign_bit_copies_with_known (x, GET_MODE (x));
8620 return MAX (1,
8621 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8624 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8626 #ifndef WORD_REGISTER_OPERATIONS
8627 /* If this machine does not do all register operations on the entire
8628 register and MODE is wider than the mode of X, we can say nothing
8629 at all about the high-order bits. */
8630 return 1;
8631 #else
8632 /* Likewise on machines that do, if the mode of the object is smaller
8633 than a word and loads of that size don't sign extend, we can say
8634 nothing about the high order bits. */
8635 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8636 #ifdef LOAD_EXTEND_OP
8637 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8638 #endif
8640 return 1;
8641 #endif
8644 switch (code)
8646 case REG:
8648 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8649 /* If pointers extend signed and this is a pointer in Pmode, say that
8650 all the bits above ptr_mode are known to be sign bit copies. */
8651 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8652 && REG_POINTER (x))
8653 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8654 #endif
8656 if (reg_last_set_value[REGNO (x)] != 0
8657 && reg_last_set_mode[REGNO (x)] == mode
8658 && (reg_last_set_label[REGNO (x)] == label_tick
8659 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8660 && REG_N_SETS (REGNO (x)) == 1
8661 && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8662 REGNO (x))))
8663 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8664 return reg_last_set_sign_bit_copies[REGNO (x)];
8666 tem = get_last_value (x);
8667 if (tem != 0)
8668 return num_sign_bit_copies_with_known (tem, mode);
8670 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8671 && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8672 return reg_sign_bit_copies[REGNO (x)];
8673 break;
8675 case MEM:
8676 #ifdef LOAD_EXTEND_OP
8677 /* Some RISC machines sign-extend all loads of smaller than a word. */
8678 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8679 return MAX (1, ((int) bitwidth
8680 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8681 #endif
8682 break;
8684 case CONST_INT:
8685 /* If the constant is negative, take its 1's complement and remask.
8686 Then see how many zero bits we have. */
8687 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8688 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8689 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8690 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8692 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8694 case SUBREG:
8695 /* If this is a SUBREG for a promoted object that is sign-extended
8696 and we are looking at it in a wider mode, we know that at least the
8697 high-order bits are known to be sign bit copies. */
8699 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8701 num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8702 return MAX ((int) bitwidth
8703 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8704 num0);
8707 /* For a smaller object, just ignore the high bits. */
8708 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8710 num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), VOIDmode);
8711 return MAX (1, (num0
8712 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8713 - bitwidth)));
8716 #ifdef WORD_REGISTER_OPERATIONS
8717 #ifdef LOAD_EXTEND_OP
8718 /* For paradoxical SUBREGs on machines where all register operations
8719 affect the entire register, just look inside. Note that we are
8720 passing MODE to the recursive call, so the number of sign bit copies
8721 will remain relative to that mode, not the inner mode. */
8723 /* This works only if loads sign extend. Otherwise, if we get a
8724 reload for the inner part, it may be loaded from the stack, and
8725 then we lose all sign bit copies that existed before the store
8726 to the stack. */
8728 if ((GET_MODE_SIZE (GET_MODE (x))
8729 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8730 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8731 && GET_CODE (SUBREG_REG (x)) == MEM)
8732 return num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8733 #endif
8734 #endif
8735 break;
8737 case SIGN_EXTRACT:
8738 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8739 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8740 break;
8742 case SIGN_EXTEND:
8743 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8744 + num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode));
8746 case TRUNCATE:
8747 /* For a smaller object, just ignore the high bits. */
8748 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode);
8749 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8750 - bitwidth)));
8752 case NOT:
8753 return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8755 case ROTATE: case ROTATERT:
8756 /* If we are rotating left by a number of bits less than the number
8757 of sign bit copies, we can just subtract that amount from the
8758 number. */
8759 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8760 && INTVAL (XEXP (x, 1)) >= 0
8761 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8763 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8764 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8765 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8767 break;
8769 case NEG:
8770 /* In general, this subtracts one sign bit copy. But if the value
8771 is known to be positive, the number of sign bit copies is the
8772 same as that of the input. Finally, if the input has just one bit
8773 that might be nonzero, all the bits are copies of the sign bit. */
8774 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8775 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8776 return num0 > 1 ? num0 - 1 : 1;
8778 nonzero = nonzero_bits (XEXP (x, 0), mode);
8779 if (nonzero == 1)
8780 return bitwidth;
8782 if (num0 > 1
8783 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8784 num0--;
8786 return num0;
8788 case IOR: case AND: case XOR:
8789 case SMIN: case SMAX: case UMIN: case UMAX:
8790 /* Logical operations will preserve the number of sign-bit copies.
8791 MIN and MAX operations always return one of the operands. */
8792 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8793 num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8794 return MIN (num0, num1);
8796 case PLUS: case MINUS:
8797 /* For addition and subtraction, we can have a 1-bit carry. However,
8798 if we are subtracting 1 from a positive number, there will not
8799 be such a carry. Furthermore, if the positive number is known to
8800 be 0 or 1, we know the result is either -1 or 0. */
8802 if (code == PLUS && XEXP (x, 1) == constm1_rtx
8803 && bitwidth <= HOST_BITS_PER_WIDE_INT)
8805 nonzero = nonzero_bits (XEXP (x, 0), mode);
8806 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8807 return (nonzero == 1 || nonzero == 0 ? bitwidth
8808 : bitwidth - floor_log2 (nonzero) - 1);
8811 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8812 num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8813 result = MAX (1, MIN (num0, num1) - 1);
8815 #ifdef POINTERS_EXTEND_UNSIGNED
8816 /* If pointers extend signed and this is an addition or subtraction
8817 to a pointer in Pmode, all the bits above ptr_mode are known to be
8818 sign bit copies. */
8819 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8820 && (code == PLUS || code == MINUS)
8821 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8822 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8823 - GET_MODE_BITSIZE (ptr_mode) + 1),
8824 result);
8825 #endif
8826 return result;
8828 case MULT:
8829 /* The number of bits of the product is the sum of the number of
8830 bits of both terms. However, unless one of the terms if known
8831 to be positive, we must allow for an additional bit since negating
8832 a negative number can remove one sign bit copy. */
8834 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8835 num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8837 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8838 if (result > 0
8839 && (bitwidth > HOST_BITS_PER_WIDE_INT
8840 || (((nonzero_bits (XEXP (x, 0), mode)
8841 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8842 && ((nonzero_bits (XEXP (x, 1), mode)
8843 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8844 result--;
8846 return MAX (1, result);
8848 case UDIV:
8849 /* The result must be <= the first operand. If the first operand
8850 has the high bit set, we know nothing about the number of sign
8851 bit copies. */
8852 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8853 return 1;
8854 else if ((nonzero_bits (XEXP (x, 0), mode)
8855 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8856 return 1;
8857 else
8858 return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8860 case UMOD:
8861 /* The result must be <= the second operand. */
8862 return num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8864 case DIV:
8865 /* Similar to unsigned division, except that we have to worry about
8866 the case where the divisor is negative, in which case we have
8867 to add 1. */
8868 result = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8869 if (result > 1
8870 && (bitwidth > HOST_BITS_PER_WIDE_INT
8871 || (nonzero_bits (XEXP (x, 1), mode)
8872 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8873 result--;
8875 return result;
8877 case MOD:
8878 result = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8879 if (result > 1
8880 && (bitwidth > HOST_BITS_PER_WIDE_INT
8881 || (nonzero_bits (XEXP (x, 1), mode)
8882 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8883 result--;
8885 return result;
8887 case ASHIFTRT:
8888 /* Shifts by a constant add to the number of bits equal to the
8889 sign bit. */
8890 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8891 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8892 && INTVAL (XEXP (x, 1)) > 0)
8893 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8895 return num0;
8897 case ASHIFT:
8898 /* Left shifts destroy copies. */
8899 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8900 || INTVAL (XEXP (x, 1)) < 0
8901 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8902 return 1;
8904 num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8905 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8907 case IF_THEN_ELSE:
8908 num0 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8909 num1 = num_sign_bit_copies_with_known (XEXP (x, 2), mode);
8910 return MIN (num0, num1);
8912 case EQ: case NE: case GE: case GT: case LE: case LT:
8913 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
8914 case GEU: case GTU: case LEU: case LTU:
8915 case UNORDERED: case ORDERED:
8916 /* If the constant is negative, take its 1's complement and remask.
8917 Then see how many zero bits we have. */
8918 nonzero = STORE_FLAG_VALUE;
8919 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8920 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8921 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8923 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8924 break;
8926 default:
8927 break;
8930 /* If we haven't been able to figure it out by one of the above rules,
8931 see if some of the high-order bits are known to be zero. If so,
8932 count those bits and return one less than that amount. If we can't
8933 safely compute the mask for this mode, always return BITWIDTH. */
8935 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8936 return 1;
8938 nonzero = nonzero_bits (x, mode);
8939 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8940 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8943 /* Return the number of "extended" bits there are in X, when interpreted
8944 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8945 unsigned quantities, this is the number of high-order zero bits.
8946 For signed quantities, this is the number of copies of the sign bit
8947 minus 1. In both case, this function returns the number of "spare"
8948 bits. For example, if two quantities for which this function returns
8949 at least 1 are added, the addition is known not to overflow.
8951 This function will always return 0 unless called during combine, which
8952 implies that it must be called from a define_split. */
8954 unsigned int
8955 extended_count (rtx x, enum machine_mode mode, int unsignedp)
8957 if (nonzero_sign_valid == 0)
8958 return 0;
8960 return (unsignedp
8961 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8962 ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8963 - floor_log2 (nonzero_bits (x, mode)))
8964 : 0)
8965 : num_sign_bit_copies (x, mode) - 1);
8968 /* This function is called from `simplify_shift_const' to merge two
8969 outer operations. Specifically, we have already found that we need
8970 to perform operation *POP0 with constant *PCONST0 at the outermost
8971 position. We would now like to also perform OP1 with constant CONST1
8972 (with *POP0 being done last).
8974 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8975 the resulting operation. *PCOMP_P is set to 1 if we would need to
8976 complement the innermost operand, otherwise it is unchanged.
8978 MODE is the mode in which the operation will be done. No bits outside
8979 the width of this mode matter. It is assumed that the width of this mode
8980 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8982 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8983 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8984 result is simply *PCONST0.
8986 If the resulting operation cannot be expressed as one operation, we
8987 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8989 static int
8990 merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
8992 enum rtx_code op0 = *pop0;
8993 HOST_WIDE_INT const0 = *pconst0;
8995 const0 &= GET_MODE_MASK (mode);
8996 const1 &= GET_MODE_MASK (mode);
8998 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8999 if (op0 == AND)
9000 const1 &= const0;
9002 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
9003 if OP0 is SET. */
9005 if (op1 == NIL || op0 == SET)
9006 return 1;
9008 else if (op0 == NIL)
9009 op0 = op1, const0 = const1;
9011 else if (op0 == op1)
9013 switch (op0)
9015 case AND:
9016 const0 &= const1;
9017 break;
9018 case IOR:
9019 const0 |= const1;
9020 break;
9021 case XOR:
9022 const0 ^= const1;
9023 break;
9024 case PLUS:
9025 const0 += const1;
9026 break;
9027 case NEG:
9028 op0 = NIL;
9029 break;
9030 default:
9031 break;
9035 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
9036 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9037 return 0;
9039 /* If the two constants aren't the same, we can't do anything. The
9040 remaining six cases can all be done. */
9041 else if (const0 != const1)
9042 return 0;
9044 else
9045 switch (op0)
9047 case IOR:
9048 if (op1 == AND)
9049 /* (a & b) | b == b */
9050 op0 = SET;
9051 else /* op1 == XOR */
9052 /* (a ^ b) | b == a | b */
9054 break;
9056 case XOR:
9057 if (op1 == AND)
9058 /* (a & b) ^ b == (~a) & b */
9059 op0 = AND, *pcomp_p = 1;
9060 else /* op1 == IOR */
9061 /* (a | b) ^ b == a & ~b */
9062 op0 = AND, const0 = ~const0;
9063 break;
9065 case AND:
9066 if (op1 == IOR)
9067 /* (a | b) & b == b */
9068 op0 = SET;
9069 else /* op1 == XOR */
9070 /* (a ^ b) & b) == (~a) & b */
9071 *pcomp_p = 1;
9072 break;
9073 default:
9074 break;
9077 /* Check for NO-OP cases. */
9078 const0 &= GET_MODE_MASK (mode);
9079 if (const0 == 0
9080 && (op0 == IOR || op0 == XOR || op0 == PLUS))
9081 op0 = NIL;
9082 else if (const0 == 0 && op0 == AND)
9083 op0 = SET;
9084 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9085 && op0 == AND)
9086 op0 = NIL;
9088 /* ??? Slightly redundant with the above mask, but not entirely.
9089 Moving this above means we'd have to sign-extend the mode mask
9090 for the final test. */
9091 const0 = trunc_int_for_mode (const0, mode);
9093 *pop0 = op0;
9094 *pconst0 = const0;
9096 return 1;
9099 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
9100 The result of the shift is RESULT_MODE. X, if nonzero, is an expression
9101 that we started with.
9103 The shift is normally computed in the widest mode we find in VAROP, as
9104 long as it isn't a different number of words than RESULT_MODE. Exceptions
9105 are ASHIFTRT and ROTATE, which are always done in their original mode, */
9107 static rtx
9108 simplify_shift_const (rtx x, enum rtx_code code,
9109 enum machine_mode result_mode, rtx varop,
9110 int orig_count)
9112 enum rtx_code orig_code = code;
9113 unsigned int count;
9114 int signed_count;
9115 enum machine_mode mode = result_mode;
9116 enum machine_mode shift_mode, tmode;
9117 unsigned int mode_words
9118 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9119 /* We form (outer_op (code varop count) (outer_const)). */
9120 enum rtx_code outer_op = NIL;
9121 HOST_WIDE_INT outer_const = 0;
9122 rtx const_rtx;
9123 int complement_p = 0;
9124 rtx new;
9126 /* Make sure and truncate the "natural" shift on the way in. We don't
9127 want to do this inside the loop as it makes it more difficult to
9128 combine shifts. */
9129 if (SHIFT_COUNT_TRUNCATED)
9130 orig_count &= GET_MODE_BITSIZE (mode) - 1;
9132 /* If we were given an invalid count, don't do anything except exactly
9133 what was requested. */
9135 if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9137 if (x)
9138 return x;
9140 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
9143 count = orig_count;
9145 /* Unless one of the branches of the `if' in this loop does a `continue',
9146 we will `break' the loop after the `if'. */
9148 while (count != 0)
9150 /* If we have an operand of (clobber (const_int 0)), just return that
9151 value. */
9152 if (GET_CODE (varop) == CLOBBER)
9153 return varop;
9155 /* If we discovered we had to complement VAROP, leave. Making a NOT
9156 here would cause an infinite loop. */
9157 if (complement_p)
9158 break;
9160 /* Convert ROTATERT to ROTATE. */
9161 if (code == ROTATERT)
9163 unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9164 code = ROTATE;
9165 if (VECTOR_MODE_P (result_mode))
9166 count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9167 else
9168 count = bitsize - count;
9171 /* We need to determine what mode we will do the shift in. If the
9172 shift is a right shift or a ROTATE, we must always do it in the mode
9173 it was originally done in. Otherwise, we can do it in MODE, the
9174 widest mode encountered. */
9175 shift_mode
9176 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9177 ? result_mode : mode);
9179 /* Handle cases where the count is greater than the size of the mode
9180 minus 1. For ASHIFT, use the size minus one as the count (this can
9181 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
9182 take the count modulo the size. For other shifts, the result is
9183 zero.
9185 Since these shifts are being produced by the compiler by combining
9186 multiple operations, each of which are defined, we know what the
9187 result is supposed to be. */
9189 if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
9191 if (code == ASHIFTRT)
9192 count = GET_MODE_BITSIZE (shift_mode) - 1;
9193 else if (code == ROTATE || code == ROTATERT)
9194 count %= GET_MODE_BITSIZE (shift_mode);
9195 else
9197 /* We can't simply return zero because there may be an
9198 outer op. */
9199 varop = const0_rtx;
9200 count = 0;
9201 break;
9205 /* An arithmetic right shift of a quantity known to be -1 or 0
9206 is a no-op. */
9207 if (code == ASHIFTRT
9208 && (num_sign_bit_copies (varop, shift_mode)
9209 == GET_MODE_BITSIZE (shift_mode)))
9211 count = 0;
9212 break;
9215 /* If we are doing an arithmetic right shift and discarding all but
9216 the sign bit copies, this is equivalent to doing a shift by the
9217 bitsize minus one. Convert it into that shift because it will often
9218 allow other simplifications. */
9220 if (code == ASHIFTRT
9221 && (count + num_sign_bit_copies (varop, shift_mode)
9222 >= GET_MODE_BITSIZE (shift_mode)))
9223 count = GET_MODE_BITSIZE (shift_mode) - 1;
9225 /* We simplify the tests below and elsewhere by converting
9226 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9227 `make_compound_operation' will convert it to an ASHIFTRT for
9228 those machines (such as VAX) that don't have an LSHIFTRT. */
9229 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9230 && code == ASHIFTRT
9231 && ((nonzero_bits (varop, shift_mode)
9232 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9233 == 0))
9234 code = LSHIFTRT;
9236 if (code == LSHIFTRT
9237 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9238 && !(nonzero_bits (varop, shift_mode) >> count))
9239 varop = const0_rtx;
9240 if (code == ASHIFT
9241 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9242 && !((nonzero_bits (varop, shift_mode) << count)
9243 & GET_MODE_MASK (shift_mode)))
9244 varop = const0_rtx;
9246 switch (GET_CODE (varop))
9248 case SIGN_EXTEND:
9249 case ZERO_EXTEND:
9250 case SIGN_EXTRACT:
9251 case ZERO_EXTRACT:
9252 new = expand_compound_operation (varop);
9253 if (new != varop)
9255 varop = new;
9256 continue;
9258 break;
9260 case MEM:
9261 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9262 minus the width of a smaller mode, we can do this with a
9263 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9264 if ((code == ASHIFTRT || code == LSHIFTRT)
9265 && ! mode_dependent_address_p (XEXP (varop, 0))
9266 && ! MEM_VOLATILE_P (varop)
9267 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9268 MODE_INT, 1)) != BLKmode)
9270 new = adjust_address_nv (varop, tmode,
9271 BYTES_BIG_ENDIAN ? 0
9272 : count / BITS_PER_UNIT);
9274 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9275 : ZERO_EXTEND, mode, new);
9276 count = 0;
9277 continue;
9279 break;
9281 case USE:
9282 /* Similar to the case above, except that we can only do this if
9283 the resulting mode is the same as that of the underlying
9284 MEM and adjust the address depending on the *bits* endianness
9285 because of the way that bit-field extract insns are defined. */
9286 if ((code == ASHIFTRT || code == LSHIFTRT)
9287 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9288 MODE_INT, 1)) != BLKmode
9289 && tmode == GET_MODE (XEXP (varop, 0)))
9291 if (BITS_BIG_ENDIAN)
9292 new = XEXP (varop, 0);
9293 else
9295 new = copy_rtx (XEXP (varop, 0));
9296 SUBST (XEXP (new, 0),
9297 plus_constant (XEXP (new, 0),
9298 count / BITS_PER_UNIT));
9301 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9302 : ZERO_EXTEND, mode, new);
9303 count = 0;
9304 continue;
9306 break;
9308 case SUBREG:
9309 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9310 the same number of words as what we've seen so far. Then store
9311 the widest mode in MODE. */
9312 if (subreg_lowpart_p (varop)
9313 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9314 > GET_MODE_SIZE (GET_MODE (varop)))
9315 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9316 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9317 == mode_words)
9319 varop = SUBREG_REG (varop);
9320 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9321 mode = GET_MODE (varop);
9322 continue;
9324 break;
9326 case MULT:
9327 /* Some machines use MULT instead of ASHIFT because MULT
9328 is cheaper. But it is still better on those machines to
9329 merge two shifts into one. */
9330 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9331 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9333 varop
9334 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9335 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9336 continue;
9338 break;
9340 case UDIV:
9341 /* Similar, for when divides are cheaper. */
9342 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9343 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9345 varop
9346 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9347 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9348 continue;
9350 break;
9352 case ASHIFTRT:
9353 /* If we are extracting just the sign bit of an arithmetic
9354 right shift, that shift is not needed. However, the sign
9355 bit of a wider mode may be different from what would be
9356 interpreted as the sign bit in a narrower mode, so, if
9357 the result is narrower, don't discard the shift. */
9358 if (code == LSHIFTRT
9359 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9360 && (GET_MODE_BITSIZE (result_mode)
9361 >= GET_MODE_BITSIZE (GET_MODE (varop))))
9363 varop = XEXP (varop, 0);
9364 continue;
9367 /* ... fall through ... */
9369 case LSHIFTRT:
9370 case ASHIFT:
9371 case ROTATE:
9372 /* Here we have two nested shifts. The result is usually the
9373 AND of a new shift with a mask. We compute the result below. */
9374 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9375 && INTVAL (XEXP (varop, 1)) >= 0
9376 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9377 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9378 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9380 enum rtx_code first_code = GET_CODE (varop);
9381 unsigned int first_count = INTVAL (XEXP (varop, 1));
9382 unsigned HOST_WIDE_INT mask;
9383 rtx mask_rtx;
9385 /* We have one common special case. We can't do any merging if
9386 the inner code is an ASHIFTRT of a smaller mode. However, if
9387 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9388 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9389 we can convert it to
9390 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9391 This simplifies certain SIGN_EXTEND operations. */
9392 if (code == ASHIFT && first_code == ASHIFTRT
9393 && count == (unsigned int)
9394 (GET_MODE_BITSIZE (result_mode)
9395 - GET_MODE_BITSIZE (GET_MODE (varop))))
9397 /* C3 has the low-order C1 bits zero. */
9399 mask = (GET_MODE_MASK (mode)
9400 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9402 varop = simplify_and_const_int (NULL_RTX, result_mode,
9403 XEXP (varop, 0), mask);
9404 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9405 varop, count);
9406 count = first_count;
9407 code = ASHIFTRT;
9408 continue;
9411 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9412 than C1 high-order bits equal to the sign bit, we can convert
9413 this to either an ASHIFT or an ASHIFTRT depending on the
9414 two counts.
9416 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9418 if (code == ASHIFTRT && first_code == ASHIFT
9419 && GET_MODE (varop) == shift_mode
9420 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9421 > first_count))
9423 varop = XEXP (varop, 0);
9425 signed_count = count - first_count;
9426 if (signed_count < 0)
9427 count = -signed_count, code = ASHIFT;
9428 else
9429 count = signed_count;
9431 continue;
9434 /* There are some cases we can't do. If CODE is ASHIFTRT,
9435 we can only do this if FIRST_CODE is also ASHIFTRT.
9437 We can't do the case when CODE is ROTATE and FIRST_CODE is
9438 ASHIFTRT.
9440 If the mode of this shift is not the mode of the outer shift,
9441 we can't do this if either shift is a right shift or ROTATE.
9443 Finally, we can't do any of these if the mode is too wide
9444 unless the codes are the same.
9446 Handle the case where the shift codes are the same
9447 first. */
9449 if (code == first_code)
9451 if (GET_MODE (varop) != result_mode
9452 && (code == ASHIFTRT || code == LSHIFTRT
9453 || code == ROTATE))
9454 break;
9456 count += first_count;
9457 varop = XEXP (varop, 0);
9458 continue;
9461 if (code == ASHIFTRT
9462 || (code == ROTATE && first_code == ASHIFTRT)
9463 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9464 || (GET_MODE (varop) != result_mode
9465 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9466 || first_code == ROTATE
9467 || code == ROTATE)))
9468 break;
9470 /* To compute the mask to apply after the shift, shift the
9471 nonzero bits of the inner shift the same way the
9472 outer shift will. */
9474 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9476 mask_rtx
9477 = simplify_binary_operation (code, result_mode, mask_rtx,
9478 GEN_INT (count));
9480 /* Give up if we can't compute an outer operation to use. */
9481 if (mask_rtx == 0
9482 || GET_CODE (mask_rtx) != CONST_INT
9483 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9484 INTVAL (mask_rtx),
9485 result_mode, &complement_p))
9486 break;
9488 /* If the shifts are in the same direction, we add the
9489 counts. Otherwise, we subtract them. */
9490 signed_count = count;
9491 if ((code == ASHIFTRT || code == LSHIFTRT)
9492 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9493 signed_count += first_count;
9494 else
9495 signed_count -= first_count;
9497 /* If COUNT is positive, the new shift is usually CODE,
9498 except for the two exceptions below, in which case it is
9499 FIRST_CODE. If the count is negative, FIRST_CODE should
9500 always be used */
9501 if (signed_count > 0
9502 && ((first_code == ROTATE && code == ASHIFT)
9503 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9504 code = first_code, count = signed_count;
9505 else if (signed_count < 0)
9506 code = first_code, count = -signed_count;
9507 else
9508 count = signed_count;
9510 varop = XEXP (varop, 0);
9511 continue;
9514 /* If we have (A << B << C) for any shift, we can convert this to
9515 (A << C << B). This wins if A is a constant. Only try this if
9516 B is not a constant. */
9518 else if (GET_CODE (varop) == code
9519 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9520 && 0 != (new
9521 = simplify_binary_operation (code, mode,
9522 XEXP (varop, 0),
9523 GEN_INT (count))))
9525 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9526 count = 0;
9527 continue;
9529 break;
9531 case NOT:
9532 /* Make this fit the case below. */
9533 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9534 GEN_INT (GET_MODE_MASK (mode)));
9535 continue;
9537 case IOR:
9538 case AND:
9539 case XOR:
9540 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9541 with C the size of VAROP - 1 and the shift is logical if
9542 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9543 we have an (le X 0) operation. If we have an arithmetic shift
9544 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9545 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9547 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9548 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9549 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9550 && (code == LSHIFTRT || code == ASHIFTRT)
9551 && count == (unsigned int)
9552 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9553 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9555 count = 0;
9556 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9557 const0_rtx);
9559 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9560 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9562 continue;
9565 /* If we have (shift (logical)), move the logical to the outside
9566 to allow it to possibly combine with another logical and the
9567 shift to combine with another shift. This also canonicalizes to
9568 what a ZERO_EXTRACT looks like. Also, some machines have
9569 (and (shift)) insns. */
9571 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9572 /* We can't do this if we have (ashiftrt (xor)) and the
9573 constant has its sign bit set in shift_mode. */
9574 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9575 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9576 shift_mode))
9577 && (new = simplify_binary_operation (code, result_mode,
9578 XEXP (varop, 1),
9579 GEN_INT (count))) != 0
9580 && GET_CODE (new) == CONST_INT
9581 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9582 INTVAL (new), result_mode, &complement_p))
9584 varop = XEXP (varop, 0);
9585 continue;
9588 /* If we can't do that, try to simplify the shift in each arm of the
9589 logical expression, make a new logical expression, and apply
9590 the inverse distributive law. This also can't be done
9591 for some (ashiftrt (xor)). */
9592 if (code != ASHIFTRT || GET_CODE (varop)!= XOR
9593 || 0 <= trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9594 shift_mode))
9596 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9597 XEXP (varop, 0), count);
9598 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9599 XEXP (varop, 1), count);
9601 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9602 varop = apply_distributive_law (varop);
9604 count = 0;
9606 break;
9608 case EQ:
9609 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9610 says that the sign bit can be tested, FOO has mode MODE, C is
9611 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9612 that may be nonzero. */
9613 if (code == LSHIFTRT
9614 && XEXP (varop, 1) == const0_rtx
9615 && GET_MODE (XEXP (varop, 0)) == result_mode
9616 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9617 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9618 && ((STORE_FLAG_VALUE
9619 & ((HOST_WIDE_INT) 1
9620 < (GET_MODE_BITSIZE (result_mode) - 1))))
9621 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9622 && merge_outer_ops (&outer_op, &outer_const, XOR,
9623 (HOST_WIDE_INT) 1, result_mode,
9624 &complement_p))
9626 varop = XEXP (varop, 0);
9627 count = 0;
9628 continue;
9630 break;
9632 case NEG:
9633 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9634 than the number of bits in the mode is equivalent to A. */
9635 if (code == LSHIFTRT
9636 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9637 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9639 varop = XEXP (varop, 0);
9640 count = 0;
9641 continue;
9644 /* NEG commutes with ASHIFT since it is multiplication. Move the
9645 NEG outside to allow shifts to combine. */
9646 if (code == ASHIFT
9647 && merge_outer_ops (&outer_op, &outer_const, NEG,
9648 (HOST_WIDE_INT) 0, result_mode,
9649 &complement_p))
9651 varop = XEXP (varop, 0);
9652 continue;
9654 break;
9656 case PLUS:
9657 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9658 is one less than the number of bits in the mode is
9659 equivalent to (xor A 1). */
9660 if (code == LSHIFTRT
9661 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9662 && XEXP (varop, 1) == constm1_rtx
9663 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9664 && merge_outer_ops (&outer_op, &outer_const, XOR,
9665 (HOST_WIDE_INT) 1, result_mode,
9666 &complement_p))
9668 count = 0;
9669 varop = XEXP (varop, 0);
9670 continue;
9673 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9674 that might be nonzero in BAR are those being shifted out and those
9675 bits are known zero in FOO, we can replace the PLUS with FOO.
9676 Similarly in the other operand order. This code occurs when
9677 we are computing the size of a variable-size array. */
9679 if ((code == ASHIFTRT || code == LSHIFTRT)
9680 && count < HOST_BITS_PER_WIDE_INT
9681 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9682 && (nonzero_bits (XEXP (varop, 1), result_mode)
9683 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9685 varop = XEXP (varop, 0);
9686 continue;
9688 else if ((code == ASHIFTRT || code == LSHIFTRT)
9689 && count < HOST_BITS_PER_WIDE_INT
9690 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9691 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9692 >> count)
9693 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9694 & nonzero_bits (XEXP (varop, 1),
9695 result_mode)))
9697 varop = XEXP (varop, 1);
9698 continue;
9701 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9702 if (code == ASHIFT
9703 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9704 && (new = simplify_binary_operation (ASHIFT, result_mode,
9705 XEXP (varop, 1),
9706 GEN_INT (count))) != 0
9707 && GET_CODE (new) == CONST_INT
9708 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9709 INTVAL (new), result_mode, &complement_p))
9711 varop = XEXP (varop, 0);
9712 continue;
9714 break;
9716 case MINUS:
9717 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9718 with C the size of VAROP - 1 and the shift is logical if
9719 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9720 we have a (gt X 0) operation. If the shift is arithmetic with
9721 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9722 we have a (neg (gt X 0)) operation. */
9724 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9725 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9726 && count == (unsigned int)
9727 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9728 && (code == LSHIFTRT || code == ASHIFTRT)
9729 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9730 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9731 == count
9732 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9734 count = 0;
9735 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9736 const0_rtx);
9738 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9739 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9741 continue;
9743 break;
9745 case TRUNCATE:
9746 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9747 if the truncate does not affect the value. */
9748 if (code == LSHIFTRT
9749 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9750 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9751 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9752 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9753 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9755 rtx varop_inner = XEXP (varop, 0);
9757 varop_inner
9758 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9759 XEXP (varop_inner, 0),
9760 GEN_INT
9761 (count + INTVAL (XEXP (varop_inner, 1))));
9762 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9763 count = 0;
9764 continue;
9766 break;
9768 default:
9769 break;
9772 break;
9775 /* We need to determine what mode to do the shift in. If the shift is
9776 a right shift or ROTATE, we must always do it in the mode it was
9777 originally done in. Otherwise, we can do it in MODE, the widest mode
9778 encountered. The code we care about is that of the shift that will
9779 actually be done, not the shift that was originally requested. */
9780 shift_mode
9781 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9782 ? result_mode : mode);
9784 /* We have now finished analyzing the shift. The result should be
9785 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9786 OUTER_OP is non-NIL, it is an operation that needs to be applied
9787 to the result of the shift. OUTER_CONST is the relevant constant,
9788 but we must turn off all bits turned off in the shift.
9790 If we were passed a value for X, see if we can use any pieces of
9791 it. If not, make new rtx. */
9793 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9794 && GET_CODE (XEXP (x, 1)) == CONST_INT
9795 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9796 const_rtx = XEXP (x, 1);
9797 else
9798 const_rtx = GEN_INT (count);
9800 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9801 && GET_MODE (XEXP (x, 0)) == shift_mode
9802 && SUBREG_REG (XEXP (x, 0)) == varop)
9803 varop = XEXP (x, 0);
9804 else if (GET_MODE (varop) != shift_mode)
9805 varop = gen_lowpart (shift_mode, varop);
9807 /* If we can't make the SUBREG, try to return what we were given. */
9808 if (GET_CODE (varop) == CLOBBER)
9809 return x ? x : varop;
9811 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9812 if (new != 0)
9813 x = new;
9814 else
9815 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9817 /* If we have an outer operation and we just made a shift, it is
9818 possible that we could have simplified the shift were it not
9819 for the outer operation. So try to do the simplification
9820 recursively. */
9822 if (outer_op != NIL && GET_CODE (x) == code
9823 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9824 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9825 INTVAL (XEXP (x, 1)));
9827 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9828 turn off all the bits that the shift would have turned off. */
9829 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9830 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9831 GET_MODE_MASK (result_mode) >> orig_count);
9833 /* Do the remainder of the processing in RESULT_MODE. */
9834 x = gen_lowpart (result_mode, x);
9836 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9837 operation. */
9838 if (complement_p)
9839 x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9841 if (outer_op != NIL)
9843 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9844 outer_const = trunc_int_for_mode (outer_const, result_mode);
9846 if (outer_op == AND)
9847 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9848 else if (outer_op == SET)
9849 /* This means that we have determined that the result is
9850 equivalent to a constant. This should be rare. */
9851 x = GEN_INT (outer_const);
9852 else if (GET_RTX_CLASS (outer_op) == '1')
9853 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9854 else
9855 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9858 return x;
9861 /* Like recog, but we receive the address of a pointer to a new pattern.
9862 We try to match the rtx that the pointer points to.
9863 If that fails, we may try to modify or replace the pattern,
9864 storing the replacement into the same pointer object.
9866 Modifications include deletion or addition of CLOBBERs.
9868 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9869 the CLOBBERs are placed.
9871 The value is the final insn code from the pattern ultimately matched,
9872 or -1. */
9874 static int
9875 recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9877 rtx pat = *pnewpat;
9878 int insn_code_number;
9879 int num_clobbers_to_add = 0;
9880 int i;
9881 rtx notes = 0;
9882 rtx old_notes, old_pat;
9884 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9885 we use to indicate that something didn't match. If we find such a
9886 thing, force rejection. */
9887 if (GET_CODE (pat) == PARALLEL)
9888 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9889 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9890 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9891 return -1;
9893 old_pat = PATTERN (insn);
9894 old_notes = REG_NOTES (insn);
9895 PATTERN (insn) = pat;
9896 REG_NOTES (insn) = 0;
9898 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9900 /* If it isn't, there is the possibility that we previously had an insn
9901 that clobbered some register as a side effect, but the combined
9902 insn doesn't need to do that. So try once more without the clobbers
9903 unless this represents an ASM insn. */
9905 if (insn_code_number < 0 && ! check_asm_operands (pat)
9906 && GET_CODE (pat) == PARALLEL)
9908 int pos;
9910 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9911 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9913 if (i != pos)
9914 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9915 pos++;
9918 SUBST_INT (XVECLEN (pat, 0), pos);
9920 if (pos == 1)
9921 pat = XVECEXP (pat, 0, 0);
9923 PATTERN (insn) = pat;
9924 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9926 PATTERN (insn) = old_pat;
9927 REG_NOTES (insn) = old_notes;
9929 /* Recognize all noop sets, these will be killed by followup pass. */
9930 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9931 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9933 /* If we had any clobbers to add, make a new pattern than contains
9934 them. Then check to make sure that all of them are dead. */
9935 if (num_clobbers_to_add)
9937 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9938 rtvec_alloc (GET_CODE (pat) == PARALLEL
9939 ? (XVECLEN (pat, 0)
9940 + num_clobbers_to_add)
9941 : num_clobbers_to_add + 1));
9943 if (GET_CODE (pat) == PARALLEL)
9944 for (i = 0; i < XVECLEN (pat, 0); i++)
9945 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9946 else
9947 XVECEXP (newpat, 0, 0) = pat;
9949 add_clobbers (newpat, insn_code_number);
9951 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9952 i < XVECLEN (newpat, 0); i++)
9954 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9955 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9956 return -1;
9957 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9958 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9960 pat = newpat;
9963 *pnewpat = pat;
9964 *pnotes = notes;
9966 return insn_code_number;
9969 /* Like gen_lowpart_general but for use by combine. In combine it
9970 is not possible to create any new pseudoregs. However, it is
9971 safe to create invalid memory addresses, because combine will
9972 try to recognize them and all they will do is make the combine
9973 attempt fail.
9975 If for some reason this cannot do its job, an rtx
9976 (clobber (const_int 0)) is returned.
9977 An insn containing that will not be recognized. */
9979 static rtx
9980 gen_lowpart_for_combine (enum machine_mode mode, rtx x)
9982 rtx result;
9984 if (GET_MODE (x) == mode)
9985 return x;
9987 /* Return identity if this is a CONST or symbolic
9988 reference. */
9989 if (mode == Pmode
9990 && (GET_CODE (x) == CONST
9991 || GET_CODE (x) == SYMBOL_REF
9992 || GET_CODE (x) == LABEL_REF))
9993 return x;
9995 /* We can only support MODE being wider than a word if X is a
9996 constant integer or has a mode the same size. */
9998 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9999 && ! ((GET_MODE (x) == VOIDmode
10000 && (GET_CODE (x) == CONST_INT
10001 || GET_CODE (x) == CONST_DOUBLE))
10002 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
10003 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10005 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
10006 won't know what to do. So we will strip off the SUBREG here and
10007 process normally. */
10008 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
10010 x = SUBREG_REG (x);
10011 if (GET_MODE (x) == mode)
10012 return x;
10015 result = gen_lowpart_common (mode, x);
10016 #ifdef CANNOT_CHANGE_MODE_CLASS
10017 if (result != 0
10018 && GET_CODE (result) == SUBREG
10019 && GET_CODE (SUBREG_REG (result)) == REG
10020 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER)
10021 bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (result))
10022 * MAX_MACHINE_MODE
10023 + GET_MODE (result));
10024 #endif
10026 if (result)
10027 return result;
10029 if (GET_CODE (x) == MEM)
10031 int offset = 0;
10033 /* Refuse to work on a volatile memory ref or one with a mode-dependent
10034 address. */
10035 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10036 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10038 /* If we want to refer to something bigger than the original memref,
10039 generate a perverse subreg instead. That will force a reload
10040 of the original memref X. */
10041 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
10042 return gen_rtx_SUBREG (mode, x, 0);
10044 if (WORDS_BIG_ENDIAN)
10045 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
10046 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
10048 if (BYTES_BIG_ENDIAN)
10050 /* Adjust the address so that the address-after-the-data is
10051 unchanged. */
10052 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
10053 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
10056 return adjust_address_nv (x, mode, offset);
10059 /* If X is a comparison operator, rewrite it in a new mode. This
10060 probably won't match, but may allow further simplifications. */
10061 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
10062 return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
10064 /* If we couldn't simplify X any other way, just enclose it in a
10065 SUBREG. Normally, this SUBREG won't match, but some patterns may
10066 include an explicit SUBREG or we may simplify it further in combine. */
10067 else
10069 int offset = 0;
10070 rtx res;
10071 enum machine_mode sub_mode = GET_MODE (x);
10073 offset = subreg_lowpart_offset (mode, sub_mode);
10074 if (sub_mode == VOIDmode)
10076 sub_mode = int_mode_for_mode (mode);
10077 x = gen_lowpart_common (sub_mode, x);
10078 if (x == 0)
10079 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
10081 res = simplify_gen_subreg (mode, x, sub_mode, offset);
10082 if (res)
10083 return res;
10084 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10088 /* These routines make binary and unary operations by first seeing if they
10089 fold; if not, a new expression is allocated. */
10091 static rtx
10092 gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0, rtx op1)
10094 rtx result;
10095 rtx tem;
10097 if (GET_CODE (op0) == CLOBBER)
10098 return op0;
10099 else if (GET_CODE (op1) == CLOBBER)
10100 return op1;
10102 if (GET_RTX_CLASS (code) == 'c'
10103 && swap_commutative_operands_p (op0, op1))
10104 tem = op0, op0 = op1, op1 = tem;
10106 if (GET_RTX_CLASS (code) == '<')
10108 enum machine_mode op_mode = GET_MODE (op0);
10110 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
10111 just (REL_OP X Y). */
10112 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
10114 op1 = XEXP (op0, 1);
10115 op0 = XEXP (op0, 0);
10116 op_mode = GET_MODE (op0);
10119 if (op_mode == VOIDmode)
10120 op_mode = GET_MODE (op1);
10121 result = simplify_relational_operation (code, op_mode, op0, op1);
10123 else
10124 result = simplify_binary_operation (code, mode, op0, op1);
10126 if (result)
10127 return result;
10129 /* Put complex operands first and constants second. */
10130 if (GET_RTX_CLASS (code) == 'c'
10131 && swap_commutative_operands_p (op0, op1))
10132 return gen_rtx_fmt_ee (code, mode, op1, op0);
10134 /* If we are turning off bits already known off in OP0, we need not do
10135 an AND. */
10136 else if (code == AND && GET_CODE (op1) == CONST_INT
10137 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10138 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
10139 return op0;
10141 return gen_rtx_fmt_ee (code, mode, op0, op1);
10144 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
10145 comparison code that will be tested.
10147 The result is a possibly different comparison code to use. *POP0 and
10148 *POP1 may be updated.
10150 It is possible that we might detect that a comparison is either always
10151 true or always false. However, we do not perform general constant
10152 folding in combine, so this knowledge isn't useful. Such tautologies
10153 should have been detected earlier. Hence we ignore all such cases. */
10155 static enum rtx_code
10156 simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10158 rtx op0 = *pop0;
10159 rtx op1 = *pop1;
10160 rtx tem, tem1;
10161 int i;
10162 enum machine_mode mode, tmode;
10164 /* Try a few ways of applying the same transformation to both operands. */
10165 while (1)
10167 #ifndef WORD_REGISTER_OPERATIONS
10168 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10169 so check specially. */
10170 if (code != GTU && code != GEU && code != LTU && code != LEU
10171 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10172 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10173 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10174 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10175 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10176 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10177 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10178 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10179 && XEXP (op0, 1) == XEXP (op1, 1)
10180 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10181 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10182 && (INTVAL (XEXP (op0, 1))
10183 == (GET_MODE_BITSIZE (GET_MODE (op0))
10184 - (GET_MODE_BITSIZE
10185 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10187 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10188 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10190 #endif
10192 /* If both operands are the same constant shift, see if we can ignore the
10193 shift. We can if the shift is a rotate or if the bits shifted out of
10194 this shift are known to be zero for both inputs and if the type of
10195 comparison is compatible with the shift. */
10196 if (GET_CODE (op0) == GET_CODE (op1)
10197 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10198 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10199 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10200 && (code != GT && code != LT && code != GE && code != LE))
10201 || (GET_CODE (op0) == ASHIFTRT
10202 && (code != GTU && code != LTU
10203 && code != GEU && code != LEU)))
10204 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10205 && INTVAL (XEXP (op0, 1)) >= 0
10206 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10207 && XEXP (op0, 1) == XEXP (op1, 1))
10209 enum machine_mode mode = GET_MODE (op0);
10210 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10211 int shift_count = INTVAL (XEXP (op0, 1));
10213 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10214 mask &= (mask >> shift_count) << shift_count;
10215 else if (GET_CODE (op0) == ASHIFT)
10216 mask = (mask & (mask << shift_count)) >> shift_count;
10218 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10219 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10220 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10221 else
10222 break;
10225 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10226 SUBREGs are of the same mode, and, in both cases, the AND would
10227 be redundant if the comparison was done in the narrower mode,
10228 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10229 and the operand's possibly nonzero bits are 0xffffff01; in that case
10230 if we only care about QImode, we don't need the AND). This case
10231 occurs if the output mode of an scc insn is not SImode and
10232 STORE_FLAG_VALUE == 1 (e.g., the 386).
10234 Similarly, check for a case where the AND's are ZERO_EXTEND
10235 operations from some narrower mode even though a SUBREG is not
10236 present. */
10238 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10239 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10240 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10242 rtx inner_op0 = XEXP (op0, 0);
10243 rtx inner_op1 = XEXP (op1, 0);
10244 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10245 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10246 int changed = 0;
10248 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10249 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10250 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10251 && (GET_MODE (SUBREG_REG (inner_op0))
10252 == GET_MODE (SUBREG_REG (inner_op1)))
10253 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10254 <= HOST_BITS_PER_WIDE_INT)
10255 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10256 GET_MODE (SUBREG_REG (inner_op0)))))
10257 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10258 GET_MODE (SUBREG_REG (inner_op1))))))
10260 op0 = SUBREG_REG (inner_op0);
10261 op1 = SUBREG_REG (inner_op1);
10263 /* The resulting comparison is always unsigned since we masked
10264 off the original sign bit. */
10265 code = unsigned_condition (code);
10267 changed = 1;
10270 else if (c0 == c1)
10271 for (tmode = GET_CLASS_NARROWEST_MODE
10272 (GET_MODE_CLASS (GET_MODE (op0)));
10273 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10274 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10276 op0 = gen_lowpart (tmode, inner_op0);
10277 op1 = gen_lowpart (tmode, inner_op1);
10278 code = unsigned_condition (code);
10279 changed = 1;
10280 break;
10283 if (! changed)
10284 break;
10287 /* If both operands are NOT, we can strip off the outer operation
10288 and adjust the comparison code for swapped operands; similarly for
10289 NEG, except that this must be an equality comparison. */
10290 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10291 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10292 && (code == EQ || code == NE)))
10293 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10295 else
10296 break;
10299 /* If the first operand is a constant, swap the operands and adjust the
10300 comparison code appropriately, but don't do this if the second operand
10301 is already a constant integer. */
10302 if (swap_commutative_operands_p (op0, op1))
10304 tem = op0, op0 = op1, op1 = tem;
10305 code = swap_condition (code);
10308 /* We now enter a loop during which we will try to simplify the comparison.
10309 For the most part, we only are concerned with comparisons with zero,
10310 but some things may really be comparisons with zero but not start
10311 out looking that way. */
10313 while (GET_CODE (op1) == CONST_INT)
10315 enum machine_mode mode = GET_MODE (op0);
10316 unsigned int mode_width = GET_MODE_BITSIZE (mode);
10317 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10318 int equality_comparison_p;
10319 int sign_bit_comparison_p;
10320 int unsigned_comparison_p;
10321 HOST_WIDE_INT const_op;
10323 /* We only want to handle integral modes. This catches VOIDmode,
10324 CCmode, and the floating-point modes. An exception is that we
10325 can handle VOIDmode if OP0 is a COMPARE or a comparison
10326 operation. */
10328 if (GET_MODE_CLASS (mode) != MODE_INT
10329 && ! (mode == VOIDmode
10330 && (GET_CODE (op0) == COMPARE
10331 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10332 break;
10334 /* Get the constant we are comparing against and turn off all bits
10335 not on in our mode. */
10336 const_op = INTVAL (op1);
10337 if (mode != VOIDmode)
10338 const_op = trunc_int_for_mode (const_op, mode);
10339 op1 = GEN_INT (const_op);
10341 /* If we are comparing against a constant power of two and the value
10342 being compared can only have that single bit nonzero (e.g., it was
10343 `and'ed with that bit), we can replace this with a comparison
10344 with zero. */
10345 if (const_op
10346 && (code == EQ || code == NE || code == GE || code == GEU
10347 || code == LT || code == LTU)
10348 && mode_width <= HOST_BITS_PER_WIDE_INT
10349 && exact_log2 (const_op) >= 0
10350 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10352 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10353 op1 = const0_rtx, const_op = 0;
10356 /* Similarly, if we are comparing a value known to be either -1 or
10357 0 with -1, change it to the opposite comparison against zero. */
10359 if (const_op == -1
10360 && (code == EQ || code == NE || code == GT || code == LE
10361 || code == GEU || code == LTU)
10362 && num_sign_bit_copies (op0, mode) == mode_width)
10364 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10365 op1 = const0_rtx, const_op = 0;
10368 /* Do some canonicalizations based on the comparison code. We prefer
10369 comparisons against zero and then prefer equality comparisons.
10370 If we can reduce the size of a constant, we will do that too. */
10372 switch (code)
10374 case LT:
10375 /* < C is equivalent to <= (C - 1) */
10376 if (const_op > 0)
10378 const_op -= 1;
10379 op1 = GEN_INT (const_op);
10380 code = LE;
10381 /* ... fall through to LE case below. */
10383 else
10384 break;
10386 case LE:
10387 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10388 if (const_op < 0)
10390 const_op += 1;
10391 op1 = GEN_INT (const_op);
10392 code = LT;
10395 /* If we are doing a <= 0 comparison on a value known to have
10396 a zero sign bit, we can replace this with == 0. */
10397 else if (const_op == 0
10398 && mode_width <= HOST_BITS_PER_WIDE_INT
10399 && (nonzero_bits (op0, mode)
10400 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10401 code = EQ;
10402 break;
10404 case GE:
10405 /* >= C is equivalent to > (C - 1). */
10406 if (const_op > 0)
10408 const_op -= 1;
10409 op1 = GEN_INT (const_op);
10410 code = GT;
10411 /* ... fall through to GT below. */
10413 else
10414 break;
10416 case GT:
10417 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
10418 if (const_op < 0)
10420 const_op += 1;
10421 op1 = GEN_INT (const_op);
10422 code = GE;
10425 /* If we are doing a > 0 comparison on a value known to have
10426 a zero sign bit, we can replace this with != 0. */
10427 else if (const_op == 0
10428 && mode_width <= HOST_BITS_PER_WIDE_INT
10429 && (nonzero_bits (op0, mode)
10430 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10431 code = NE;
10432 break;
10434 case LTU:
10435 /* < C is equivalent to <= (C - 1). */
10436 if (const_op > 0)
10438 const_op -= 1;
10439 op1 = GEN_INT (const_op);
10440 code = LEU;
10441 /* ... fall through ... */
10444 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10445 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10446 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10448 const_op = 0, op1 = const0_rtx;
10449 code = GE;
10450 break;
10452 else
10453 break;
10455 case LEU:
10456 /* unsigned <= 0 is equivalent to == 0 */
10457 if (const_op == 0)
10458 code = EQ;
10460 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10461 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10462 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10464 const_op = 0, op1 = const0_rtx;
10465 code = GE;
10467 break;
10469 case GEU:
10470 /* >= C is equivalent to < (C - 1). */
10471 if (const_op > 1)
10473 const_op -= 1;
10474 op1 = GEN_INT (const_op);
10475 code = GTU;
10476 /* ... fall through ... */
10479 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10480 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10481 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10483 const_op = 0, op1 = const0_rtx;
10484 code = LT;
10485 break;
10487 else
10488 break;
10490 case GTU:
10491 /* unsigned > 0 is equivalent to != 0 */
10492 if (const_op == 0)
10493 code = NE;
10495 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10496 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10497 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10499 const_op = 0, op1 = const0_rtx;
10500 code = LT;
10502 break;
10504 default:
10505 break;
10508 /* Compute some predicates to simplify code below. */
10510 equality_comparison_p = (code == EQ || code == NE);
10511 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10512 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10513 || code == GEU);
10515 /* If this is a sign bit comparison and we can do arithmetic in
10516 MODE, say that we will only be needing the sign bit of OP0. */
10517 if (sign_bit_comparison_p
10518 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10519 op0 = force_to_mode (op0, mode,
10520 ((HOST_WIDE_INT) 1
10521 << (GET_MODE_BITSIZE (mode) - 1)),
10522 NULL_RTX, 0);
10524 /* Now try cases based on the opcode of OP0. If none of the cases
10525 does a "continue", we exit this loop immediately after the
10526 switch. */
10528 switch (GET_CODE (op0))
10530 case ZERO_EXTRACT:
10531 /* If we are extracting a single bit from a variable position in
10532 a constant that has only a single bit set and are comparing it
10533 with zero, we can convert this into an equality comparison
10534 between the position and the location of the single bit. */
10535 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10536 have already reduced the shift count modulo the word size. */
10537 if (!SHIFT_COUNT_TRUNCATED
10538 && GET_CODE (XEXP (op0, 0)) == CONST_INT
10539 && XEXP (op0, 1) == const1_rtx
10540 && equality_comparison_p && const_op == 0
10541 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10543 if (BITS_BIG_ENDIAN)
10545 enum machine_mode new_mode
10546 = mode_for_extraction (EP_extzv, 1);
10547 if (new_mode == MAX_MACHINE_MODE)
10548 i = BITS_PER_WORD - 1 - i;
10549 else
10551 mode = new_mode;
10552 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10556 op0 = XEXP (op0, 2);
10557 op1 = GEN_INT (i);
10558 const_op = i;
10560 /* Result is nonzero iff shift count is equal to I. */
10561 code = reverse_condition (code);
10562 continue;
10565 /* ... fall through ... */
10567 case SIGN_EXTRACT:
10568 tem = expand_compound_operation (op0);
10569 if (tem != op0)
10571 op0 = tem;
10572 continue;
10574 break;
10576 case NOT:
10577 /* If testing for equality, we can take the NOT of the constant. */
10578 if (equality_comparison_p
10579 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10581 op0 = XEXP (op0, 0);
10582 op1 = tem;
10583 continue;
10586 /* If just looking at the sign bit, reverse the sense of the
10587 comparison. */
10588 if (sign_bit_comparison_p)
10590 op0 = XEXP (op0, 0);
10591 code = (code == GE ? LT : GE);
10592 continue;
10594 break;
10596 case NEG:
10597 /* If testing for equality, we can take the NEG of the constant. */
10598 if (equality_comparison_p
10599 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10601 op0 = XEXP (op0, 0);
10602 op1 = tem;
10603 continue;
10606 /* The remaining cases only apply to comparisons with zero. */
10607 if (const_op != 0)
10608 break;
10610 /* When X is ABS or is known positive,
10611 (neg X) is < 0 if and only if X != 0. */
10613 if (sign_bit_comparison_p
10614 && (GET_CODE (XEXP (op0, 0)) == ABS
10615 || (mode_width <= HOST_BITS_PER_WIDE_INT
10616 && (nonzero_bits (XEXP (op0, 0), mode)
10617 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10619 op0 = XEXP (op0, 0);
10620 code = (code == LT ? NE : EQ);
10621 continue;
10624 /* If we have NEG of something whose two high-order bits are the
10625 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10626 if (num_sign_bit_copies (op0, mode) >= 2)
10628 op0 = XEXP (op0, 0);
10629 code = swap_condition (code);
10630 continue;
10632 break;
10634 case ROTATE:
10635 /* If we are testing equality and our count is a constant, we
10636 can perform the inverse operation on our RHS. */
10637 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10638 && (tem = simplify_binary_operation (ROTATERT, mode,
10639 op1, XEXP (op0, 1))) != 0)
10641 op0 = XEXP (op0, 0);
10642 op1 = tem;
10643 continue;
10646 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10647 a particular bit. Convert it to an AND of a constant of that
10648 bit. This will be converted into a ZERO_EXTRACT. */
10649 if (const_op == 0 && sign_bit_comparison_p
10650 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10651 && mode_width <= HOST_BITS_PER_WIDE_INT)
10653 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10654 ((HOST_WIDE_INT) 1
10655 << (mode_width - 1
10656 - INTVAL (XEXP (op0, 1)))));
10657 code = (code == LT ? NE : EQ);
10658 continue;
10661 /* Fall through. */
10663 case ABS:
10664 /* ABS is ignorable inside an equality comparison with zero. */
10665 if (const_op == 0 && equality_comparison_p)
10667 op0 = XEXP (op0, 0);
10668 continue;
10670 break;
10672 case SIGN_EXTEND:
10673 /* Can simplify (compare (zero/sign_extend FOO) CONST)
10674 to (compare FOO CONST) if CONST fits in FOO's mode and we
10675 are either testing inequality or have an unsigned comparison
10676 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10677 if (! unsigned_comparison_p
10678 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10679 <= HOST_BITS_PER_WIDE_INT)
10680 && ((unsigned HOST_WIDE_INT) const_op
10681 < (((unsigned HOST_WIDE_INT) 1
10682 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10684 op0 = XEXP (op0, 0);
10685 continue;
10687 break;
10689 case SUBREG:
10690 /* Check for the case where we are comparing A - C1 with C2,
10691 both constants are smaller than 1/2 the maximum positive
10692 value in MODE, and the comparison is equality or unsigned.
10693 In that case, if A is either zero-extended to MODE or has
10694 sufficient sign bits so that the high-order bit in MODE
10695 is a copy of the sign in the inner mode, we can prove that it is
10696 safe to do the operation in the wider mode. This simplifies
10697 many range checks. */
10699 if (mode_width <= HOST_BITS_PER_WIDE_INT
10700 && subreg_lowpart_p (op0)
10701 && GET_CODE (SUBREG_REG (op0)) == PLUS
10702 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10703 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10704 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10705 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10706 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10707 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10708 GET_MODE (SUBREG_REG (op0)))
10709 & ~GET_MODE_MASK (mode))
10710 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10711 GET_MODE (SUBREG_REG (op0)))
10712 > (unsigned int)
10713 (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10714 - GET_MODE_BITSIZE (mode)))))
10716 op0 = SUBREG_REG (op0);
10717 continue;
10720 /* If the inner mode is narrower and we are extracting the low part,
10721 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10722 if (subreg_lowpart_p (op0)
10723 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10724 /* Fall through */ ;
10725 else
10726 break;
10728 /* ... fall through ... */
10730 case ZERO_EXTEND:
10731 if ((unsigned_comparison_p || equality_comparison_p)
10732 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10733 <= HOST_BITS_PER_WIDE_INT)
10734 && ((unsigned HOST_WIDE_INT) const_op
10735 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10737 op0 = XEXP (op0, 0);
10738 continue;
10740 break;
10742 case PLUS:
10743 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10744 this for equality comparisons due to pathological cases involving
10745 overflows. */
10746 if (equality_comparison_p
10747 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10748 op1, XEXP (op0, 1))))
10750 op0 = XEXP (op0, 0);
10751 op1 = tem;
10752 continue;
10755 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10756 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10757 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10759 op0 = XEXP (XEXP (op0, 0), 0);
10760 code = (code == LT ? EQ : NE);
10761 continue;
10763 break;
10765 case MINUS:
10766 /* We used to optimize signed comparisons against zero, but that
10767 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10768 arrive here as equality comparisons, or (GEU, LTU) are
10769 optimized away. No need to special-case them. */
10771 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10772 (eq B (minus A C)), whichever simplifies. We can only do
10773 this for equality comparisons due to pathological cases involving
10774 overflows. */
10775 if (equality_comparison_p
10776 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10777 XEXP (op0, 1), op1)))
10779 op0 = XEXP (op0, 0);
10780 op1 = tem;
10781 continue;
10784 if (equality_comparison_p
10785 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10786 XEXP (op0, 0), op1)))
10788 op0 = XEXP (op0, 1);
10789 op1 = tem;
10790 continue;
10793 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10794 of bits in X minus 1, is one iff X > 0. */
10795 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10796 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10797 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10798 == mode_width - 1
10799 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10801 op0 = XEXP (op0, 1);
10802 code = (code == GE ? LE : GT);
10803 continue;
10805 break;
10807 case XOR:
10808 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10809 if C is zero or B is a constant. */
10810 if (equality_comparison_p
10811 && 0 != (tem = simplify_binary_operation (XOR, mode,
10812 XEXP (op0, 1), op1)))
10814 op0 = XEXP (op0, 0);
10815 op1 = tem;
10816 continue;
10818 break;
10820 case EQ: case NE:
10821 case UNEQ: case LTGT:
10822 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10823 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10824 case UNORDERED: case ORDERED:
10825 /* We can't do anything if OP0 is a condition code value, rather
10826 than an actual data value. */
10827 if (const_op != 0
10828 || CC0_P (XEXP (op0, 0))
10829 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10830 break;
10832 /* Get the two operands being compared. */
10833 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10834 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10835 else
10836 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10838 /* Check for the cases where we simply want the result of the
10839 earlier test or the opposite of that result. */
10840 if (code == NE || code == EQ
10841 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10842 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10843 && (STORE_FLAG_VALUE
10844 & (((HOST_WIDE_INT) 1
10845 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10846 && (code == LT || code == GE)))
10848 enum rtx_code new_code;
10849 if (code == LT || code == NE)
10850 new_code = GET_CODE (op0);
10851 else
10852 new_code = combine_reversed_comparison_code (op0);
10854 if (new_code != UNKNOWN)
10856 code = new_code;
10857 op0 = tem;
10858 op1 = tem1;
10859 continue;
10862 break;
10864 case IOR:
10865 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10866 iff X <= 0. */
10867 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10868 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10869 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10871 op0 = XEXP (op0, 1);
10872 code = (code == GE ? GT : LE);
10873 continue;
10875 break;
10877 case AND:
10878 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10879 will be converted to a ZERO_EXTRACT later. */
10880 if (const_op == 0 && equality_comparison_p
10881 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10882 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10884 op0 = simplify_and_const_int
10885 (op0, mode, gen_rtx_LSHIFTRT (mode,
10886 XEXP (op0, 1),
10887 XEXP (XEXP (op0, 0), 1)),
10888 (HOST_WIDE_INT) 1);
10889 continue;
10892 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10893 zero and X is a comparison and C1 and C2 describe only bits set
10894 in STORE_FLAG_VALUE, we can compare with X. */
10895 if (const_op == 0 && equality_comparison_p
10896 && mode_width <= HOST_BITS_PER_WIDE_INT
10897 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10898 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10899 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10900 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10901 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10903 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10904 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10905 if ((~STORE_FLAG_VALUE & mask) == 0
10906 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10907 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10908 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10910 op0 = XEXP (XEXP (op0, 0), 0);
10911 continue;
10915 /* If we are doing an equality comparison of an AND of a bit equal
10916 to the sign bit, replace this with a LT or GE comparison of
10917 the underlying value. */
10918 if (equality_comparison_p
10919 && const_op == 0
10920 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10921 && mode_width <= HOST_BITS_PER_WIDE_INT
10922 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10923 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10925 op0 = XEXP (op0, 0);
10926 code = (code == EQ ? GE : LT);
10927 continue;
10930 /* If this AND operation is really a ZERO_EXTEND from a narrower
10931 mode, the constant fits within that mode, and this is either an
10932 equality or unsigned comparison, try to do this comparison in
10933 the narrower mode. */
10934 if ((equality_comparison_p || unsigned_comparison_p)
10935 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10936 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10937 & GET_MODE_MASK (mode))
10938 + 1)) >= 0
10939 && const_op >> i == 0
10940 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10942 op0 = gen_lowpart (tmode, XEXP (op0, 0));
10943 continue;
10946 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10947 fits in both M1 and M2 and the SUBREG is either paradoxical
10948 or represents the low part, permute the SUBREG and the AND
10949 and try again. */
10950 if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10952 unsigned HOST_WIDE_INT c1;
10953 tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10954 /* Require an integral mode, to avoid creating something like
10955 (AND:SF ...). */
10956 if (SCALAR_INT_MODE_P (tmode)
10957 /* It is unsafe to commute the AND into the SUBREG if the
10958 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10959 not defined. As originally written the upper bits
10960 have a defined value due to the AND operation.
10961 However, if we commute the AND inside the SUBREG then
10962 they no longer have defined values and the meaning of
10963 the code has been changed. */
10964 && (0
10965 #ifdef WORD_REGISTER_OPERATIONS
10966 || (mode_width > GET_MODE_BITSIZE (tmode)
10967 && mode_width <= BITS_PER_WORD)
10968 #endif
10969 || (mode_width <= GET_MODE_BITSIZE (tmode)
10970 && subreg_lowpart_p (XEXP (op0, 0))))
10971 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10972 && mode_width <= HOST_BITS_PER_WIDE_INT
10973 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10974 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10975 && (c1 & ~GET_MODE_MASK (tmode)) == 0
10976 && c1 != mask
10977 && c1 != GET_MODE_MASK (tmode))
10979 op0 = gen_binary (AND, tmode,
10980 SUBREG_REG (XEXP (op0, 0)),
10981 gen_int_mode (c1, tmode));
10982 op0 = gen_lowpart (mode, op0);
10983 continue;
10987 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
10988 if (const_op == 0 && equality_comparison_p
10989 && XEXP (op0, 1) == const1_rtx
10990 && GET_CODE (XEXP (op0, 0)) == NOT)
10992 op0 = simplify_and_const_int
10993 (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10994 code = (code == NE ? EQ : NE);
10995 continue;
10998 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10999 (eq (and (lshiftrt X) 1) 0).
11000 Also handle the case where (not X) is expressed using xor. */
11001 if (const_op == 0 && equality_comparison_p
11002 && XEXP (op0, 1) == const1_rtx
11003 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11005 rtx shift_op = XEXP (XEXP (op0, 0), 0);
11006 rtx shift_count = XEXP (XEXP (op0, 0), 1);
11008 if (GET_CODE (shift_op) == NOT
11009 || (GET_CODE (shift_op) == XOR
11010 && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
11011 && GET_CODE (shift_count) == CONST_INT
11012 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11013 && (INTVAL (XEXP (shift_op, 1))
11014 == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11016 op0 = simplify_and_const_int
11017 (NULL_RTX, mode,
11018 gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11019 (HOST_WIDE_INT) 1);
11020 code = (code == NE ? EQ : NE);
11021 continue;
11024 break;
11026 case ASHIFT:
11027 /* If we have (compare (ashift FOO N) (const_int C)) and
11028 the high order N bits of FOO (N+1 if an inequality comparison)
11029 are known to be zero, we can do this by comparing FOO with C
11030 shifted right N bits so long as the low-order N bits of C are
11031 zero. */
11032 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11033 && INTVAL (XEXP (op0, 1)) >= 0
11034 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11035 < HOST_BITS_PER_WIDE_INT)
11036 && ((const_op
11037 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11038 && mode_width <= HOST_BITS_PER_WIDE_INT
11039 && (nonzero_bits (XEXP (op0, 0), mode)
11040 & ~(mask >> (INTVAL (XEXP (op0, 1))
11041 + ! equality_comparison_p))) == 0)
11043 /* We must perform a logical shift, not an arithmetic one,
11044 as we want the top N bits of C to be zero. */
11045 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11047 temp >>= INTVAL (XEXP (op0, 1));
11048 op1 = gen_int_mode (temp, mode);
11049 op0 = XEXP (op0, 0);
11050 continue;
11053 /* If we are doing a sign bit comparison, it means we are testing
11054 a particular bit. Convert it to the appropriate AND. */
11055 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
11056 && mode_width <= HOST_BITS_PER_WIDE_INT)
11058 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11059 ((HOST_WIDE_INT) 1
11060 << (mode_width - 1
11061 - INTVAL (XEXP (op0, 1)))));
11062 code = (code == LT ? NE : EQ);
11063 continue;
11066 /* If this an equality comparison with zero and we are shifting
11067 the low bit to the sign bit, we can convert this to an AND of the
11068 low-order bit. */
11069 if (const_op == 0 && equality_comparison_p
11070 && GET_CODE (XEXP (op0, 1)) == CONST_INT
11071 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11072 == mode_width - 1)
11074 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11075 (HOST_WIDE_INT) 1);
11076 continue;
11078 break;
11080 case ASHIFTRT:
11081 /* If this is an equality comparison with zero, we can do this
11082 as a logical shift, which might be much simpler. */
11083 if (equality_comparison_p && const_op == 0
11084 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
11086 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11087 XEXP (op0, 0),
11088 INTVAL (XEXP (op0, 1)));
11089 continue;
11092 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11093 do the comparison in a narrower mode. */
11094 if (! unsigned_comparison_p
11095 && GET_CODE (XEXP (op0, 1)) == CONST_INT
11096 && GET_CODE (XEXP (op0, 0)) == ASHIFT
11097 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11098 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11099 MODE_INT, 1)) != BLKmode
11100 && (((unsigned HOST_WIDE_INT) const_op
11101 + (GET_MODE_MASK (tmode) >> 1) + 1)
11102 <= GET_MODE_MASK (tmode)))
11104 op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
11105 continue;
11108 /* Likewise if OP0 is a PLUS of a sign extension with a
11109 constant, which is usually represented with the PLUS
11110 between the shifts. */
11111 if (! unsigned_comparison_p
11112 && GET_CODE (XEXP (op0, 1)) == CONST_INT
11113 && GET_CODE (XEXP (op0, 0)) == PLUS
11114 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11115 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11116 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11117 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11118 MODE_INT, 1)) != BLKmode
11119 && (((unsigned HOST_WIDE_INT) const_op
11120 + (GET_MODE_MASK (tmode) >> 1) + 1)
11121 <= GET_MODE_MASK (tmode)))
11123 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11124 rtx add_const = XEXP (XEXP (op0, 0), 1);
11125 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
11126 XEXP (op0, 1));
11128 op0 = gen_binary (PLUS, tmode,
11129 gen_lowpart (tmode, inner),
11130 new_const);
11131 continue;
11134 /* ... fall through ... */
11135 case LSHIFTRT:
11136 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11137 the low order N bits of FOO are known to be zero, we can do this
11138 by comparing FOO with C shifted left N bits so long as no
11139 overflow occurs. */
11140 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11141 && INTVAL (XEXP (op0, 1)) >= 0
11142 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11143 && mode_width <= HOST_BITS_PER_WIDE_INT
11144 && (nonzero_bits (XEXP (op0, 0), mode)
11145 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11146 && (((unsigned HOST_WIDE_INT) const_op
11147 + (GET_CODE (op0) != LSHIFTRT
11148 ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11149 + 1)
11150 : 0))
11151 <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11153 /* If the shift was logical, then we must make the condition
11154 unsigned. */
11155 if (GET_CODE (op0) == LSHIFTRT)
11156 code = unsigned_condition (code);
11158 const_op <<= INTVAL (XEXP (op0, 1));
11159 op1 = GEN_INT (const_op);
11160 op0 = XEXP (op0, 0);
11161 continue;
11164 /* If we are using this shift to extract just the sign bit, we
11165 can replace this with an LT or GE comparison. */
11166 if (const_op == 0
11167 && (equality_comparison_p || sign_bit_comparison_p)
11168 && GET_CODE (XEXP (op0, 1)) == CONST_INT
11169 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11170 == mode_width - 1)
11172 op0 = XEXP (op0, 0);
11173 code = (code == NE || code == GT ? LT : GE);
11174 continue;
11176 break;
11178 default:
11179 break;
11182 break;
11185 /* Now make any compound operations involved in this comparison. Then,
11186 check for an outmost SUBREG on OP0 that is not doing anything or is
11187 paradoxical. The latter transformation must only be performed when
11188 it is known that the "extra" bits will be the same in op0 and op1 or
11189 that they don't matter. There are three cases to consider:
11191 1. SUBREG_REG (op0) is a register. In this case the bits are don't
11192 care bits and we can assume they have any convenient value. So
11193 making the transformation is safe.
11195 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11196 In this case the upper bits of op0 are undefined. We should not make
11197 the simplification in that case as we do not know the contents of
11198 those bits.
11200 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11201 NIL. In that case we know those bits are zeros or ones. We must
11202 also be sure that they are the same as the upper bits of op1.
11204 We can never remove a SUBREG for a non-equality comparison because
11205 the sign bit is in a different place in the underlying object. */
11207 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11208 op1 = make_compound_operation (op1, SET);
11210 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11211 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11212 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11213 && (code == NE || code == EQ))
11215 if (GET_MODE_SIZE (GET_MODE (op0))
11216 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11218 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
11219 implemented. */
11220 if (GET_CODE (SUBREG_REG (op0)) == REG)
11222 op0 = SUBREG_REG (op0);
11223 op1 = gen_lowpart (GET_MODE (op0), op1);
11226 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11227 <= HOST_BITS_PER_WIDE_INT)
11228 && (nonzero_bits (SUBREG_REG (op0),
11229 GET_MODE (SUBREG_REG (op0)))
11230 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11232 tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
11234 if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11235 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11236 op0 = SUBREG_REG (op0), op1 = tem;
11240 /* We now do the opposite procedure: Some machines don't have compare
11241 insns in all modes. If OP0's mode is an integer mode smaller than a
11242 word and we can't do a compare in that mode, see if there is a larger
11243 mode for which we can do the compare. There are a number of cases in
11244 which we can use the wider mode. */
11246 mode = GET_MODE (op0);
11247 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11248 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11249 && ! have_insn_for (COMPARE, mode))
11250 for (tmode = GET_MODE_WIDER_MODE (mode);
11251 (tmode != VOIDmode
11252 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11253 tmode = GET_MODE_WIDER_MODE (tmode))
11254 if (have_insn_for (COMPARE, tmode))
11256 int zero_extended;
11258 /* If the only nonzero bits in OP0 and OP1 are those in the
11259 narrower mode and this is an equality or unsigned comparison,
11260 we can use the wider mode. Similarly for sign-extended
11261 values, in which case it is true for all comparisons. */
11262 zero_extended = ((code == EQ || code == NE
11263 || code == GEU || code == GTU
11264 || code == LEU || code == LTU)
11265 && (nonzero_bits (op0, tmode)
11266 & ~GET_MODE_MASK (mode)) == 0
11267 && ((GET_CODE (op1) == CONST_INT
11268 || (nonzero_bits (op1, tmode)
11269 & ~GET_MODE_MASK (mode)) == 0)));
11271 if (zero_extended
11272 || ((num_sign_bit_copies (op0, tmode)
11273 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11274 - GET_MODE_BITSIZE (mode)))
11275 && (num_sign_bit_copies (op1, tmode)
11276 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11277 - GET_MODE_BITSIZE (mode)))))
11279 /* If OP0 is an AND and we don't have an AND in MODE either,
11280 make a new AND in the proper mode. */
11281 if (GET_CODE (op0) == AND
11282 && !have_insn_for (AND, mode))
11283 op0 = gen_binary (AND, tmode,
11284 gen_lowpart (tmode,
11285 XEXP (op0, 0)),
11286 gen_lowpart (tmode,
11287 XEXP (op0, 1)));
11289 op0 = gen_lowpart (tmode, op0);
11290 if (zero_extended && GET_CODE (op1) == CONST_INT)
11291 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11292 op1 = gen_lowpart (tmode, op1);
11293 break;
11296 /* If this is a test for negative, we can make an explicit
11297 test of the sign bit. */
11299 if (op1 == const0_rtx && (code == LT || code == GE)
11300 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11302 op0 = gen_binary (AND, tmode,
11303 gen_lowpart (tmode, op0),
11304 GEN_INT ((HOST_WIDE_INT) 1
11305 << (GET_MODE_BITSIZE (mode) - 1)));
11306 code = (code == LT) ? NE : EQ;
11307 break;
11311 #ifdef CANONICALIZE_COMPARISON
11312 /* If this machine only supports a subset of valid comparisons, see if we
11313 can convert an unsupported one into a supported one. */
11314 CANONICALIZE_COMPARISON (code, op0, op1);
11315 #endif
11317 *pop0 = op0;
11318 *pop1 = op1;
11320 return code;
11323 /* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11324 searching backward. */
11325 static enum rtx_code
11326 combine_reversed_comparison_code (rtx exp)
11328 enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11329 rtx x;
11331 if (code1 != UNKNOWN
11332 || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11333 return code1;
11334 /* Otherwise try and find where the condition codes were last set and
11335 use that. */
11336 x = get_last_value (XEXP (exp, 0));
11337 if (!x || GET_CODE (x) != COMPARE)
11338 return UNKNOWN;
11339 return reversed_comparison_code_parts (GET_CODE (exp),
11340 XEXP (x, 0), XEXP (x, 1), NULL);
11343 /* Return comparison with reversed code of EXP and operands OP0 and OP1.
11344 Return NULL_RTX in case we fail to do the reversal. */
11345 static rtx
11346 reversed_comparison (rtx exp, enum machine_mode mode, rtx op0, rtx op1)
11348 enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11349 if (reversed_code == UNKNOWN)
11350 return NULL_RTX;
11351 else
11352 return gen_binary (reversed_code, mode, op0, op1);
11355 /* Utility function for following routine. Called when X is part of a value
11356 being stored into reg_last_set_value. Sets reg_last_set_table_tick
11357 for each register mentioned. Similar to mention_regs in cse.c */
11359 static void
11360 update_table_tick (rtx x)
11362 enum rtx_code code = GET_CODE (x);
11363 const char *fmt = GET_RTX_FORMAT (code);
11364 int i;
11366 if (code == REG)
11368 unsigned int regno = REGNO (x);
11369 unsigned int endregno
11370 = regno + (regno < FIRST_PSEUDO_REGISTER
11371 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11372 unsigned int r;
11374 for (r = regno; r < endregno; r++)
11375 reg_last_set_table_tick[r] = label_tick;
11377 return;
11380 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11381 /* Note that we can't have an "E" in values stored; see
11382 get_last_value_validate. */
11383 if (fmt[i] == 'e')
11385 /* Check for identical subexpressions. If x contains
11386 identical subexpression we only have to traverse one of
11387 them. */
11388 if (i == 0
11389 && (GET_RTX_CLASS (code) == '2'
11390 || GET_RTX_CLASS (code) == 'c'))
11392 /* Note that at this point x1 has already been
11393 processed. */
11394 rtx x0 = XEXP (x, 0);
11395 rtx x1 = XEXP (x, 1);
11397 /* If x0 and x1 are identical then there is no need to
11398 process x0. */
11399 if (x0 == x1)
11400 break;
11402 /* If x0 is identical to a subexpression of x1 then while
11403 processing x1, x0 has already been processed. Thus we
11404 are done with x. */
11405 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11406 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11407 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11408 break;
11410 /* If x1 is identical to a subexpression of x0 then we
11411 still have to process the rest of x0. */
11412 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11413 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11414 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11416 update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11417 break;
11421 update_table_tick (XEXP (x, i));
11425 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11426 are saying that the register is clobbered and we no longer know its
11427 value. If INSN is zero, don't update reg_last_set; this is only permitted
11428 with VALUE also zero and is used to invalidate the register. */
11430 static void
11431 record_value_for_reg (rtx reg, rtx insn, rtx value)
11433 unsigned int regno = REGNO (reg);
11434 unsigned int endregno
11435 = regno + (regno < FIRST_PSEUDO_REGISTER
11436 ? hard_regno_nregs[regno][GET_MODE (reg)] : 1);
11437 unsigned int i;
11439 /* If VALUE contains REG and we have a previous value for REG, substitute
11440 the previous value. */
11441 if (value && insn && reg_overlap_mentioned_p (reg, value))
11443 rtx tem;
11445 /* Set things up so get_last_value is allowed to see anything set up to
11446 our insn. */
11447 subst_low_cuid = INSN_CUID (insn);
11448 tem = get_last_value (reg);
11450 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11451 it isn't going to be useful and will take a lot of time to process,
11452 so just use the CLOBBER. */
11454 if (tem)
11456 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11457 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11458 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11459 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11460 tem = XEXP (tem, 0);
11462 value = replace_rtx (copy_rtx (value), reg, tem);
11466 /* For each register modified, show we don't know its value, that
11467 we don't know about its bitwise content, that its value has been
11468 updated, and that we don't know the location of the death of the
11469 register. */
11470 for (i = regno; i < endregno; i++)
11472 if (insn)
11473 reg_last_set[i] = insn;
11475 reg_last_set_value[i] = 0;
11476 reg_last_set_mode[i] = 0;
11477 reg_last_set_nonzero_bits[i] = 0;
11478 reg_last_set_sign_bit_copies[i] = 0;
11479 reg_last_death[i] = 0;
11482 /* Mark registers that are being referenced in this value. */
11483 if (value)
11484 update_table_tick (value);
11486 /* Now update the status of each register being set.
11487 If someone is using this register in this block, set this register
11488 to invalid since we will get confused between the two lives in this
11489 basic block. This makes using this register always invalid. In cse, we
11490 scan the table to invalidate all entries using this register, but this
11491 is too much work for us. */
11493 for (i = regno; i < endregno; i++)
11495 reg_last_set_label[i] = label_tick;
11496 if (value && reg_last_set_table_tick[i] == label_tick)
11497 reg_last_set_invalid[i] = 1;
11498 else
11499 reg_last_set_invalid[i] = 0;
11502 /* The value being assigned might refer to X (like in "x++;"). In that
11503 case, we must replace it with (clobber (const_int 0)) to prevent
11504 infinite loops. */
11505 if (value && ! get_last_value_validate (&value, insn,
11506 reg_last_set_label[regno], 0))
11508 value = copy_rtx (value);
11509 if (! get_last_value_validate (&value, insn,
11510 reg_last_set_label[regno], 1))
11511 value = 0;
11514 /* For the main register being modified, update the value, the mode, the
11515 nonzero bits, and the number of sign bit copies. */
11517 reg_last_set_value[regno] = value;
11519 if (value)
11521 enum machine_mode mode = GET_MODE (reg);
11522 subst_low_cuid = INSN_CUID (insn);
11523 reg_last_set_mode[regno] = mode;
11524 if (GET_MODE_CLASS (mode) == MODE_INT
11525 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11526 mode = nonzero_bits_mode;
11527 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11528 reg_last_set_sign_bit_copies[regno]
11529 = num_sign_bit_copies (value, GET_MODE (reg));
11533 /* Called via note_stores from record_dead_and_set_regs to handle one
11534 SET or CLOBBER in an insn. DATA is the instruction in which the
11535 set is occurring. */
11537 static void
11538 record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
11540 rtx record_dead_insn = (rtx) data;
11542 if (GET_CODE (dest) == SUBREG)
11543 dest = SUBREG_REG (dest);
11545 if (GET_CODE (dest) == REG)
11547 /* If we are setting the whole register, we know its value. Otherwise
11548 show that we don't know the value. We can handle SUBREG in
11549 some cases. */
11550 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11551 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11552 else if (GET_CODE (setter) == SET
11553 && GET_CODE (SET_DEST (setter)) == SUBREG
11554 && SUBREG_REG (SET_DEST (setter)) == dest
11555 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11556 && subreg_lowpart_p (SET_DEST (setter)))
11557 record_value_for_reg (dest, record_dead_insn,
11558 gen_lowpart (GET_MODE (dest),
11559 SET_SRC (setter)));
11560 else
11561 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11563 else if (GET_CODE (dest) == MEM
11564 /* Ignore pushes, they clobber nothing. */
11565 && ! push_operand (dest, GET_MODE (dest)))
11566 mem_last_set = INSN_CUID (record_dead_insn);
11569 /* Update the records of when each REG was most recently set or killed
11570 for the things done by INSN. This is the last thing done in processing
11571 INSN in the combiner loop.
11573 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11574 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11575 and also the similar information mem_last_set (which insn most recently
11576 modified memory) and last_call_cuid (which insn was the most recent
11577 subroutine call). */
11579 static void
11580 record_dead_and_set_regs (rtx insn)
11582 rtx link;
11583 unsigned int i;
11585 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11587 if (REG_NOTE_KIND (link) == REG_DEAD
11588 && GET_CODE (XEXP (link, 0)) == REG)
11590 unsigned int regno = REGNO (XEXP (link, 0));
11591 unsigned int endregno
11592 = regno + (regno < FIRST_PSEUDO_REGISTER
11593 ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))]
11594 : 1);
11596 for (i = regno; i < endregno; i++)
11597 reg_last_death[i] = insn;
11599 else if (REG_NOTE_KIND (link) == REG_INC)
11600 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11603 if (GET_CODE (insn) == CALL_INSN)
11605 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11606 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11608 reg_last_set_value[i] = 0;
11609 reg_last_set_mode[i] = 0;
11610 reg_last_set_nonzero_bits[i] = 0;
11611 reg_last_set_sign_bit_copies[i] = 0;
11612 reg_last_death[i] = 0;
11615 last_call_cuid = mem_last_set = INSN_CUID (insn);
11617 /* Don't bother recording what this insn does. It might set the
11618 return value register, but we can't combine into a call
11619 pattern anyway, so there's no point trying (and it may cause
11620 a crash, if e.g. we wind up asking for last_set_value of a
11621 SUBREG of the return value register). */
11622 return;
11625 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11628 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11629 register present in the SUBREG, so for each such SUBREG go back and
11630 adjust nonzero and sign bit information of the registers that are
11631 known to have some zero/sign bits set.
11633 This is needed because when combine blows the SUBREGs away, the
11634 information on zero/sign bits is lost and further combines can be
11635 missed because of that. */
11637 static void
11638 record_promoted_value (rtx insn, rtx subreg)
11640 rtx links, set;
11641 unsigned int regno = REGNO (SUBREG_REG (subreg));
11642 enum machine_mode mode = GET_MODE (subreg);
11644 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11645 return;
11647 for (links = LOG_LINKS (insn); links;)
11649 insn = XEXP (links, 0);
11650 set = single_set (insn);
11652 if (! set || GET_CODE (SET_DEST (set)) != REG
11653 || REGNO (SET_DEST (set)) != regno
11654 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11656 links = XEXP (links, 1);
11657 continue;
11660 if (reg_last_set[regno] == insn)
11662 if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11663 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11666 if (GET_CODE (SET_SRC (set)) == REG)
11668 regno = REGNO (SET_SRC (set));
11669 links = LOG_LINKS (insn);
11671 else
11672 break;
11676 /* Scan X for promoted SUBREGs. For each one found,
11677 note what it implies to the registers used in it. */
11679 static void
11680 check_promoted_subreg (rtx insn, rtx x)
11682 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11683 && GET_CODE (SUBREG_REG (x)) == REG)
11684 record_promoted_value (insn, x);
11685 else
11687 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11688 int i, j;
11690 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11691 switch (format[i])
11693 case 'e':
11694 check_promoted_subreg (insn, XEXP (x, i));
11695 break;
11696 case 'V':
11697 case 'E':
11698 if (XVEC (x, i) != 0)
11699 for (j = 0; j < XVECLEN (x, i); j++)
11700 check_promoted_subreg (insn, XVECEXP (x, i, j));
11701 break;
11706 /* Utility routine for the following function. Verify that all the registers
11707 mentioned in *LOC are valid when *LOC was part of a value set when
11708 label_tick == TICK. Return 0 if some are not.
11710 If REPLACE is nonzero, replace the invalid reference with
11711 (clobber (const_int 0)) and return 1. This replacement is useful because
11712 we often can get useful information about the form of a value (e.g., if
11713 it was produced by a shift that always produces -1 or 0) even though
11714 we don't know exactly what registers it was produced from. */
11716 static int
11717 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11719 rtx x = *loc;
11720 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11721 int len = GET_RTX_LENGTH (GET_CODE (x));
11722 int i;
11724 if (GET_CODE (x) == REG)
11726 unsigned int regno = REGNO (x);
11727 unsigned int endregno
11728 = regno + (regno < FIRST_PSEUDO_REGISTER
11729 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11730 unsigned int j;
11732 for (j = regno; j < endregno; j++)
11733 if (reg_last_set_invalid[j]
11734 /* If this is a pseudo-register that was only set once and not
11735 live at the beginning of the function, it is always valid. */
11736 || (! (regno >= FIRST_PSEUDO_REGISTER
11737 && REG_N_SETS (regno) == 1
11738 && (! REGNO_REG_SET_P
11739 (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))
11740 && reg_last_set_label[j] > tick))
11742 if (replace)
11743 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11744 return replace;
11747 return 1;
11749 /* If this is a memory reference, make sure that there were
11750 no stores after it that might have clobbered the value. We don't
11751 have alias info, so we assume any store invalidates it. */
11752 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11753 && INSN_CUID (insn) <= mem_last_set)
11755 if (replace)
11756 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11757 return replace;
11760 for (i = 0; i < len; i++)
11762 if (fmt[i] == 'e')
11764 /* Check for identical subexpressions. If x contains
11765 identical subexpression we only have to traverse one of
11766 them. */
11767 if (i == 1
11768 && (GET_RTX_CLASS (GET_CODE (x)) == '2'
11769 || GET_RTX_CLASS (GET_CODE (x)) == 'c'))
11771 /* Note that at this point x0 has already been checked
11772 and found valid. */
11773 rtx x0 = XEXP (x, 0);
11774 rtx x1 = XEXP (x, 1);
11776 /* If x0 and x1 are identical then x is also valid. */
11777 if (x0 == x1)
11778 return 1;
11780 /* If x1 is identical to a subexpression of x0 then
11781 while checking x0, x1 has already been checked. Thus
11782 it is valid and so as x. */
11783 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11784 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11785 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11786 return 1;
11788 /* If x0 is identical to a subexpression of x1 then x is
11789 valid iff the rest of x1 is valid. */
11790 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11791 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11792 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11793 return
11794 get_last_value_validate (&XEXP (x1,
11795 x0 == XEXP (x1, 0) ? 1 : 0),
11796 insn, tick, replace);
11799 if (get_last_value_validate (&XEXP (x, i), insn, tick,
11800 replace) == 0)
11801 return 0;
11803 /* Don't bother with these. They shouldn't occur anyway. */
11804 else if (fmt[i] == 'E')
11805 return 0;
11808 /* If we haven't found a reason for it to be invalid, it is valid. */
11809 return 1;
11812 /* Get the last value assigned to X, if known. Some registers
11813 in the value may be replaced with (clobber (const_int 0)) if their value
11814 is known longer known reliably. */
11816 static rtx
11817 get_last_value (rtx x)
11819 unsigned int regno;
11820 rtx value;
11822 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11823 then convert it to the desired mode. If this is a paradoxical SUBREG,
11824 we cannot predict what values the "extra" bits might have. */
11825 if (GET_CODE (x) == SUBREG
11826 && subreg_lowpart_p (x)
11827 && (GET_MODE_SIZE (GET_MODE (x))
11828 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11829 && (value = get_last_value (SUBREG_REG (x))) != 0)
11830 return gen_lowpart (GET_MODE (x), value);
11832 if (GET_CODE (x) != REG)
11833 return 0;
11835 regno = REGNO (x);
11836 value = reg_last_set_value[regno];
11838 /* If we don't have a value, or if it isn't for this basic block and
11839 it's either a hard register, set more than once, or it's a live
11840 at the beginning of the function, return 0.
11842 Because if it's not live at the beginning of the function then the reg
11843 is always set before being used (is never used without being set).
11844 And, if it's set only once, and it's always set before use, then all
11845 uses must have the same last value, even if it's not from this basic
11846 block. */
11848 if (value == 0
11849 || (reg_last_set_label[regno] != label_tick
11850 && (regno < FIRST_PSEUDO_REGISTER
11851 || REG_N_SETS (regno) != 1
11852 || (REGNO_REG_SET_P
11853 (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))))
11854 return 0;
11856 /* If the value was set in a later insn than the ones we are processing,
11857 we can't use it even if the register was only set once. */
11858 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11859 return 0;
11861 /* If the value has all its registers valid, return it. */
11862 if (get_last_value_validate (&value, reg_last_set[regno],
11863 reg_last_set_label[regno], 0))
11864 return value;
11866 /* Otherwise, make a copy and replace any invalid register with
11867 (clobber (const_int 0)). If that fails for some reason, return 0. */
11869 value = copy_rtx (value);
11870 if (get_last_value_validate (&value, reg_last_set[regno],
11871 reg_last_set_label[regno], 1))
11872 return value;
11874 return 0;
11877 /* Return nonzero if expression X refers to a REG or to memory
11878 that is set in an instruction more recent than FROM_CUID. */
11880 static int
11881 use_crosses_set_p (rtx x, int from_cuid)
11883 const char *fmt;
11884 int i;
11885 enum rtx_code code = GET_CODE (x);
11887 if (code == REG)
11889 unsigned int regno = REGNO (x);
11890 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11891 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11893 #ifdef PUSH_ROUNDING
11894 /* Don't allow uses of the stack pointer to be moved,
11895 because we don't know whether the move crosses a push insn. */
11896 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11897 return 1;
11898 #endif
11899 for (; regno < endreg; regno++)
11900 if (reg_last_set[regno]
11901 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11902 return 1;
11903 return 0;
11906 if (code == MEM && mem_last_set > from_cuid)
11907 return 1;
11909 fmt = GET_RTX_FORMAT (code);
11911 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11913 if (fmt[i] == 'E')
11915 int j;
11916 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11917 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11918 return 1;
11920 else if (fmt[i] == 'e'
11921 && use_crosses_set_p (XEXP (x, i), from_cuid))
11922 return 1;
11924 return 0;
11927 /* Define three variables used for communication between the following
11928 routines. */
11930 static unsigned int reg_dead_regno, reg_dead_endregno;
11931 static int reg_dead_flag;
11933 /* Function called via note_stores from reg_dead_at_p.
11935 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11936 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11938 static void
11939 reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11941 unsigned int regno, endregno;
11943 if (GET_CODE (dest) != REG)
11944 return;
11946 regno = REGNO (dest);
11947 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11948 ? hard_regno_nregs[regno][GET_MODE (dest)] : 1);
11950 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11951 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11954 /* Return nonzero if REG is known to be dead at INSN.
11956 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11957 referencing REG, it is dead. If we hit a SET referencing REG, it is
11958 live. Otherwise, see if it is live or dead at the start of the basic
11959 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11960 must be assumed to be always live. */
11962 static int
11963 reg_dead_at_p (rtx reg, rtx insn)
11965 basic_block block;
11966 unsigned int i;
11968 /* Set variables for reg_dead_at_p_1. */
11969 reg_dead_regno = REGNO (reg);
11970 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11971 ? hard_regno_nregs[reg_dead_regno]
11972 [GET_MODE (reg)]
11973 : 1);
11975 reg_dead_flag = 0;
11977 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11978 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11980 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11981 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11982 return 0;
11985 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11986 beginning of function. */
11987 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11988 insn = prev_nonnote_insn (insn))
11990 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11991 if (reg_dead_flag)
11992 return reg_dead_flag == 1 ? 1 : 0;
11994 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11995 return 1;
11998 /* Get the basic block that we were in. */
11999 if (insn == 0)
12000 block = ENTRY_BLOCK_PTR->next_bb;
12001 else
12003 FOR_EACH_BB (block)
12004 if (insn == BB_HEAD (block))
12005 break;
12007 if (block == EXIT_BLOCK_PTR)
12008 return 0;
12011 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12012 if (REGNO_REG_SET_P (block->global_live_at_start, i))
12013 return 0;
12015 return 1;
12018 /* Note hard registers in X that are used. This code is similar to
12019 that in flow.c, but much simpler since we don't care about pseudos. */
12021 static void
12022 mark_used_regs_combine (rtx x)
12024 RTX_CODE code = GET_CODE (x);
12025 unsigned int regno;
12026 int i;
12028 switch (code)
12030 case LABEL_REF:
12031 case SYMBOL_REF:
12032 case CONST_INT:
12033 case CONST:
12034 case CONST_DOUBLE:
12035 case CONST_VECTOR:
12036 case PC:
12037 case ADDR_VEC:
12038 case ADDR_DIFF_VEC:
12039 case ASM_INPUT:
12040 #ifdef HAVE_cc0
12041 /* CC0 must die in the insn after it is set, so we don't need to take
12042 special note of it here. */
12043 case CC0:
12044 #endif
12045 return;
12047 case CLOBBER:
12048 /* If we are clobbering a MEM, mark any hard registers inside the
12049 address as used. */
12050 if (GET_CODE (XEXP (x, 0)) == MEM)
12051 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12052 return;
12054 case REG:
12055 regno = REGNO (x);
12056 /* A hard reg in a wide mode may really be multiple registers.
12057 If so, mark all of them just like the first. */
12058 if (regno < FIRST_PSEUDO_REGISTER)
12060 unsigned int endregno, r;
12062 /* None of this applies to the stack, frame or arg pointers. */
12063 if (regno == STACK_POINTER_REGNUM
12064 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12065 || regno == HARD_FRAME_POINTER_REGNUM
12066 #endif
12067 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12068 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12069 #endif
12070 || regno == FRAME_POINTER_REGNUM)
12071 return;
12073 endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
12074 for (r = regno; r < endregno; r++)
12075 SET_HARD_REG_BIT (newpat_used_regs, r);
12077 return;
12079 case SET:
12081 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12082 the address. */
12083 rtx testreg = SET_DEST (x);
12085 while (GET_CODE (testreg) == SUBREG
12086 || GET_CODE (testreg) == ZERO_EXTRACT
12087 || GET_CODE (testreg) == SIGN_EXTRACT
12088 || GET_CODE (testreg) == STRICT_LOW_PART)
12089 testreg = XEXP (testreg, 0);
12091 if (GET_CODE (testreg) == MEM)
12092 mark_used_regs_combine (XEXP (testreg, 0));
12094 mark_used_regs_combine (SET_SRC (x));
12096 return;
12098 default:
12099 break;
12102 /* Recursively scan the operands of this expression. */
12105 const char *fmt = GET_RTX_FORMAT (code);
12107 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12109 if (fmt[i] == 'e')
12110 mark_used_regs_combine (XEXP (x, i));
12111 else if (fmt[i] == 'E')
12113 int j;
12115 for (j = 0; j < XVECLEN (x, i); j++)
12116 mark_used_regs_combine (XVECEXP (x, i, j));
12122 /* Remove register number REGNO from the dead registers list of INSN.
12124 Return the note used to record the death, if there was one. */
12127 remove_death (unsigned int regno, rtx insn)
12129 rtx note = find_regno_note (insn, REG_DEAD, regno);
12131 if (note)
12133 REG_N_DEATHS (regno)--;
12134 remove_note (insn, note);
12137 return note;
12140 /* For each register (hardware or pseudo) used within expression X, if its
12141 death is in an instruction with cuid between FROM_CUID (inclusive) and
12142 TO_INSN (exclusive), put a REG_DEAD note for that register in the
12143 list headed by PNOTES.
12145 That said, don't move registers killed by maybe_kill_insn.
12147 This is done when X is being merged by combination into TO_INSN. These
12148 notes will then be distributed as needed. */
12150 static void
12151 move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
12152 rtx *pnotes)
12154 const char *fmt;
12155 int len, i;
12156 enum rtx_code code = GET_CODE (x);
12158 if (code == REG)
12160 unsigned int regno = REGNO (x);
12161 rtx where_dead = reg_last_death[regno];
12162 rtx before_dead, after_dead;
12164 /* Don't move the register if it gets killed in between from and to. */
12165 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12166 && ! reg_referenced_p (x, maybe_kill_insn))
12167 return;
12169 /* WHERE_DEAD could be a USE insn made by combine, so first we
12170 make sure that we have insns with valid INSN_CUID values. */
12171 before_dead = where_dead;
12172 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
12173 before_dead = PREV_INSN (before_dead);
12175 after_dead = where_dead;
12176 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
12177 after_dead = NEXT_INSN (after_dead);
12179 if (before_dead && after_dead
12180 && INSN_CUID (before_dead) >= from_cuid
12181 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
12182 || (where_dead != after_dead
12183 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
12185 rtx note = remove_death (regno, where_dead);
12187 /* It is possible for the call above to return 0. This can occur
12188 when reg_last_death points to I2 or I1 that we combined with.
12189 In that case make a new note.
12191 We must also check for the case where X is a hard register
12192 and NOTE is a death note for a range of hard registers
12193 including X. In that case, we must put REG_DEAD notes for
12194 the remaining registers in place of NOTE. */
12196 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12197 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12198 > GET_MODE_SIZE (GET_MODE (x))))
12200 unsigned int deadregno = REGNO (XEXP (note, 0));
12201 unsigned int deadend
12202 = (deadregno + hard_regno_nregs[deadregno]
12203 [GET_MODE (XEXP (note, 0))]);
12204 unsigned int ourend
12205 = regno + hard_regno_nregs[regno][GET_MODE (x)];
12206 unsigned int i;
12208 for (i = deadregno; i < deadend; i++)
12209 if (i < regno || i >= ourend)
12210 REG_NOTES (where_dead)
12211 = gen_rtx_EXPR_LIST (REG_DEAD,
12212 regno_reg_rtx[i],
12213 REG_NOTES (where_dead));
12216 /* If we didn't find any note, or if we found a REG_DEAD note that
12217 covers only part of the given reg, and we have a multi-reg hard
12218 register, then to be safe we must check for REG_DEAD notes
12219 for each register other than the first. They could have
12220 their own REG_DEAD notes lying around. */
12221 else if ((note == 0
12222 || (note != 0
12223 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12224 < GET_MODE_SIZE (GET_MODE (x)))))
12225 && regno < FIRST_PSEUDO_REGISTER
12226 && hard_regno_nregs[regno][GET_MODE (x)] > 1)
12228 unsigned int ourend
12229 = regno + hard_regno_nregs[regno][GET_MODE (x)];
12230 unsigned int i, offset;
12231 rtx oldnotes = 0;
12233 if (note)
12234 offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
12235 else
12236 offset = 1;
12238 for (i = regno + offset; i < ourend; i++)
12239 move_deaths (regno_reg_rtx[i],
12240 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
12243 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12245 XEXP (note, 1) = *pnotes;
12246 *pnotes = note;
12248 else
12249 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
12251 REG_N_DEATHS (regno)++;
12254 return;
12257 else if (GET_CODE (x) == SET)
12259 rtx dest = SET_DEST (x);
12261 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12263 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12264 that accesses one word of a multi-word item, some
12265 piece of everything register in the expression is used by
12266 this insn, so remove any old death. */
12267 /* ??? So why do we test for equality of the sizes? */
12269 if (GET_CODE (dest) == ZERO_EXTRACT
12270 || GET_CODE (dest) == STRICT_LOW_PART
12271 || (GET_CODE (dest) == SUBREG
12272 && (((GET_MODE_SIZE (GET_MODE (dest))
12273 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12274 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12275 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12277 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12278 return;
12281 /* If this is some other SUBREG, we know it replaces the entire
12282 value, so use that as the destination. */
12283 if (GET_CODE (dest) == SUBREG)
12284 dest = SUBREG_REG (dest);
12286 /* If this is a MEM, adjust deaths of anything used in the address.
12287 For a REG (the only other possibility), the entire value is
12288 being replaced so the old value is not used in this insn. */
12290 if (GET_CODE (dest) == MEM)
12291 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12292 to_insn, pnotes);
12293 return;
12296 else if (GET_CODE (x) == CLOBBER)
12297 return;
12299 len = GET_RTX_LENGTH (code);
12300 fmt = GET_RTX_FORMAT (code);
12302 for (i = 0; i < len; i++)
12304 if (fmt[i] == 'E')
12306 int j;
12307 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12308 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12309 to_insn, pnotes);
12311 else if (fmt[i] == 'e')
12312 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12316 /* Return 1 if X is the target of a bit-field assignment in BODY, the
12317 pattern of an insn. X must be a REG. */
12319 static int
12320 reg_bitfield_target_p (rtx x, rtx body)
12322 int i;
12324 if (GET_CODE (body) == SET)
12326 rtx dest = SET_DEST (body);
12327 rtx target;
12328 unsigned int regno, tregno, endregno, endtregno;
12330 if (GET_CODE (dest) == ZERO_EXTRACT)
12331 target = XEXP (dest, 0);
12332 else if (GET_CODE (dest) == STRICT_LOW_PART)
12333 target = SUBREG_REG (XEXP (dest, 0));
12334 else
12335 return 0;
12337 if (GET_CODE (target) == SUBREG)
12338 target = SUBREG_REG (target);
12340 if (GET_CODE (target) != REG)
12341 return 0;
12343 tregno = REGNO (target), regno = REGNO (x);
12344 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12345 return target == x;
12347 endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)];
12348 endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
12350 return endregno > tregno && regno < endtregno;
12353 else if (GET_CODE (body) == PARALLEL)
12354 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12355 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12356 return 1;
12358 return 0;
12361 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12362 as appropriate. I3 and I2 are the insns resulting from the combination
12363 insns including FROM (I2 may be zero).
12365 Each note in the list is either ignored or placed on some insns, depending
12366 on the type of note. */
12368 static void
12369 distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
12371 rtx note, next_note;
12372 rtx tem;
12374 for (note = notes; note; note = next_note)
12376 rtx place = 0, place2 = 0;
12378 /* If this NOTE references a pseudo register, ensure it references
12379 the latest copy of that register. */
12380 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12381 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12382 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12384 next_note = XEXP (note, 1);
12385 switch (REG_NOTE_KIND (note))
12387 case REG_BR_PROB:
12388 case REG_BR_PRED:
12389 /* Doesn't matter much where we put this, as long as it's somewhere.
12390 It is preferable to keep these notes on branches, which is most
12391 likely to be i3. */
12392 place = i3;
12393 break;
12395 case REG_VALUE_PROFILE:
12396 /* Just get rid of this note, as it is unused later anyway. */
12397 break;
12399 case REG_VTABLE_REF:
12400 /* ??? Should remain with *a particular* memory load. Given the
12401 nature of vtable data, the last insn seems relatively safe. */
12402 place = i3;
12403 break;
12405 case REG_NON_LOCAL_GOTO:
12406 if (GET_CODE (i3) == JUMP_INSN)
12407 place = i3;
12408 else if (i2 && GET_CODE (i2) == JUMP_INSN)
12409 place = i2;
12410 else
12411 abort ();
12412 break;
12414 case REG_EH_REGION:
12415 /* These notes must remain with the call or trapping instruction. */
12416 if (GET_CODE (i3) == CALL_INSN)
12417 place = i3;
12418 else if (i2 && GET_CODE (i2) == CALL_INSN)
12419 place = i2;
12420 else if (flag_non_call_exceptions)
12422 if (may_trap_p (i3))
12423 place = i3;
12424 else if (i2 && may_trap_p (i2))
12425 place = i2;
12426 /* ??? Otherwise assume we've combined things such that we
12427 can now prove that the instructions can't trap. Drop the
12428 note in this case. */
12430 else
12431 abort ();
12432 break;
12434 case REG_ALWAYS_RETURN:
12435 case REG_NORETURN:
12436 case REG_SETJMP:
12437 /* These notes must remain with the call. It should not be
12438 possible for both I2 and I3 to be a call. */
12439 if (GET_CODE (i3) == CALL_INSN)
12440 place = i3;
12441 else if (i2 && GET_CODE (i2) == CALL_INSN)
12442 place = i2;
12443 else
12444 abort ();
12445 break;
12447 case REG_UNUSED:
12448 /* Any clobbers for i3 may still exist, and so we must process
12449 REG_UNUSED notes from that insn.
12451 Any clobbers from i2 or i1 can only exist if they were added by
12452 recog_for_combine. In that case, recog_for_combine created the
12453 necessary REG_UNUSED notes. Trying to keep any original
12454 REG_UNUSED notes from these insns can cause incorrect output
12455 if it is for the same register as the original i3 dest.
12456 In that case, we will notice that the register is set in i3,
12457 and then add a REG_UNUSED note for the destination of i3, which
12458 is wrong. However, it is possible to have REG_UNUSED notes from
12459 i2 or i1 for register which were both used and clobbered, so
12460 we keep notes from i2 or i1 if they will turn into REG_DEAD
12461 notes. */
12463 /* If this register is set or clobbered in I3, put the note there
12464 unless there is one already. */
12465 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12467 if (from_insn != i3)
12468 break;
12470 if (! (GET_CODE (XEXP (note, 0)) == REG
12471 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12472 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12473 place = i3;
12475 /* Otherwise, if this register is used by I3, then this register
12476 now dies here, so we must put a REG_DEAD note here unless there
12477 is one already. */
12478 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12479 && ! (GET_CODE (XEXP (note, 0)) == REG
12480 ? find_regno_note (i3, REG_DEAD,
12481 REGNO (XEXP (note, 0)))
12482 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12484 PUT_REG_NOTE_KIND (note, REG_DEAD);
12485 place = i3;
12487 break;
12489 case REG_EQUAL:
12490 case REG_EQUIV:
12491 case REG_NOALIAS:
12492 /* These notes say something about results of an insn. We can
12493 only support them if they used to be on I3 in which case they
12494 remain on I3. Otherwise they are ignored.
12496 If the note refers to an expression that is not a constant, we
12497 must also ignore the note since we cannot tell whether the
12498 equivalence is still true. It might be possible to do
12499 slightly better than this (we only have a problem if I2DEST
12500 or I1DEST is present in the expression), but it doesn't
12501 seem worth the trouble. */
12503 if (from_insn == i3
12504 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12505 place = i3;
12506 break;
12508 case REG_INC:
12509 case REG_NO_CONFLICT:
12510 /* These notes say something about how a register is used. They must
12511 be present on any use of the register in I2 or I3. */
12512 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12513 place = i3;
12515 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12517 if (place)
12518 place2 = i2;
12519 else
12520 place = i2;
12522 break;
12524 case REG_LABEL:
12525 /* This can show up in several ways -- either directly in the
12526 pattern, or hidden off in the constant pool with (or without?)
12527 a REG_EQUAL note. */
12528 /* ??? Ignore the without-reg_equal-note problem for now. */
12529 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12530 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12531 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12532 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12533 place = i3;
12535 if (i2
12536 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12537 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12538 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12539 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12541 if (place)
12542 place2 = i2;
12543 else
12544 place = i2;
12547 /* Don't attach REG_LABEL note to a JUMP_INSN which has
12548 JUMP_LABEL already. Instead, decrement LABEL_NUSES. */
12549 if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12551 if (JUMP_LABEL (place) != XEXP (note, 0))
12552 abort ();
12553 if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12554 LABEL_NUSES (JUMP_LABEL (place))--;
12555 place = 0;
12557 if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12559 if (JUMP_LABEL (place2) != XEXP (note, 0))
12560 abort ();
12561 if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12562 LABEL_NUSES (JUMP_LABEL (place2))--;
12563 place2 = 0;
12565 break;
12567 case REG_NONNEG:
12568 /* This note says something about the value of a register prior
12569 to the execution of an insn. It is too much trouble to see
12570 if the note is still correct in all situations. It is better
12571 to simply delete it. */
12572 break;
12574 case REG_RETVAL:
12575 /* If the insn previously containing this note still exists,
12576 put it back where it was. Otherwise move it to the previous
12577 insn. Adjust the corresponding REG_LIBCALL note. */
12578 if (GET_CODE (from_insn) != NOTE)
12579 place = from_insn;
12580 else
12582 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12583 place = prev_real_insn (from_insn);
12584 if (tem && place)
12585 XEXP (tem, 0) = place;
12586 /* If we're deleting the last remaining instruction of a
12587 libcall sequence, don't add the notes. */
12588 else if (XEXP (note, 0) == from_insn)
12589 tem = place = 0;
12591 break;
12593 case REG_LIBCALL:
12594 /* This is handled similarly to REG_RETVAL. */
12595 if (GET_CODE (from_insn) != NOTE)
12596 place = from_insn;
12597 else
12599 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12600 place = next_real_insn (from_insn);
12601 if (tem && place)
12602 XEXP (tem, 0) = place;
12603 /* If we're deleting the last remaining instruction of a
12604 libcall sequence, don't add the notes. */
12605 else if (XEXP (note, 0) == from_insn)
12606 tem = place = 0;
12608 break;
12610 case REG_DEAD:
12611 /* If the register is used as an input in I3, it dies there.
12612 Similarly for I2, if it is nonzero and adjacent to I3.
12614 If the register is not used as an input in either I3 or I2
12615 and it is not one of the registers we were supposed to eliminate,
12616 there are two possibilities. We might have a non-adjacent I2
12617 or we might have somehow eliminated an additional register
12618 from a computation. For example, we might have had A & B where
12619 we discover that B will always be zero. In this case we will
12620 eliminate the reference to A.
12622 In both cases, we must search to see if we can find a previous
12623 use of A and put the death note there. */
12625 if (from_insn
12626 && GET_CODE (from_insn) == CALL_INSN
12627 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12628 place = from_insn;
12629 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12630 place = i3;
12631 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12632 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12633 place = i2;
12635 if (place == 0)
12637 basic_block bb = this_basic_block;
12639 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12641 if (! INSN_P (tem))
12643 if (tem == BB_HEAD (bb))
12644 break;
12645 continue;
12648 /* If the register is being set at TEM, see if that is all
12649 TEM is doing. If so, delete TEM. Otherwise, make this
12650 into a REG_UNUSED note instead. */
12651 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12653 rtx set = single_set (tem);
12654 rtx inner_dest = 0;
12655 #ifdef HAVE_cc0
12656 rtx cc0_setter = NULL_RTX;
12657 #endif
12659 if (set != 0)
12660 for (inner_dest = SET_DEST (set);
12661 (GET_CODE (inner_dest) == STRICT_LOW_PART
12662 || GET_CODE (inner_dest) == SUBREG
12663 || GET_CODE (inner_dest) == ZERO_EXTRACT);
12664 inner_dest = XEXP (inner_dest, 0))
12667 /* Verify that it was the set, and not a clobber that
12668 modified the register.
12670 CC0 targets must be careful to maintain setter/user
12671 pairs. If we cannot delete the setter due to side
12672 effects, mark the user with an UNUSED note instead
12673 of deleting it. */
12675 if (set != 0 && ! side_effects_p (SET_SRC (set))
12676 && rtx_equal_p (XEXP (note, 0), inner_dest)
12677 #ifdef HAVE_cc0
12678 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12679 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12680 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12681 #endif
12684 /* Move the notes and links of TEM elsewhere.
12685 This might delete other dead insns recursively.
12686 First set the pattern to something that won't use
12687 any register. */
12688 rtx old_notes = REG_NOTES (tem);
12690 PATTERN (tem) = pc_rtx;
12691 REG_NOTES (tem) = NULL;
12693 distribute_notes (old_notes, tem, tem, NULL_RTX);
12694 distribute_links (LOG_LINKS (tem));
12696 PUT_CODE (tem, NOTE);
12697 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12698 NOTE_SOURCE_FILE (tem) = 0;
12700 #ifdef HAVE_cc0
12701 /* Delete the setter too. */
12702 if (cc0_setter)
12704 PATTERN (cc0_setter) = pc_rtx;
12705 old_notes = REG_NOTES (cc0_setter);
12706 REG_NOTES (cc0_setter) = NULL;
12708 distribute_notes (old_notes, cc0_setter,
12709 cc0_setter, NULL_RTX);
12710 distribute_links (LOG_LINKS (cc0_setter));
12712 PUT_CODE (cc0_setter, NOTE);
12713 NOTE_LINE_NUMBER (cc0_setter)
12714 = NOTE_INSN_DELETED;
12715 NOTE_SOURCE_FILE (cc0_setter) = 0;
12717 #endif
12719 /* If the register is both set and used here, put the
12720 REG_DEAD note here, but place a REG_UNUSED note
12721 here too unless there already is one. */
12722 else if (reg_referenced_p (XEXP (note, 0),
12723 PATTERN (tem)))
12725 place = tem;
12727 if (! find_regno_note (tem, REG_UNUSED,
12728 REGNO (XEXP (note, 0))))
12729 REG_NOTES (tem)
12730 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12731 REG_NOTES (tem));
12733 else
12735 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12737 /* If there isn't already a REG_UNUSED note, put one
12738 here. */
12739 if (! find_regno_note (tem, REG_UNUSED,
12740 REGNO (XEXP (note, 0))))
12741 place = tem;
12742 break;
12745 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12746 || (GET_CODE (tem) == CALL_INSN
12747 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12749 place = tem;
12751 /* If we are doing a 3->2 combination, and we have a
12752 register which formerly died in i3 and was not used
12753 by i2, which now no longer dies in i3 and is used in
12754 i2 but does not die in i2, and place is between i2
12755 and i3, then we may need to move a link from place to
12756 i2. */
12757 if (i2 && INSN_UID (place) <= max_uid_cuid
12758 && INSN_CUID (place) > INSN_CUID (i2)
12759 && from_insn
12760 && INSN_CUID (from_insn) > INSN_CUID (i2)
12761 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12763 rtx links = LOG_LINKS (place);
12764 LOG_LINKS (place) = 0;
12765 distribute_links (links);
12767 break;
12770 if (tem == BB_HEAD (bb))
12771 break;
12774 /* We haven't found an insn for the death note and it
12775 is still a REG_DEAD note, but we have hit the beginning
12776 of the block. If the existing life info says the reg
12777 was dead, there's nothing left to do. Otherwise, we'll
12778 need to do a global life update after combine. */
12779 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12780 && REGNO_REG_SET_P (bb->global_live_at_start,
12781 REGNO (XEXP (note, 0))))
12782 SET_BIT (refresh_blocks, this_basic_block->index);
12785 /* If the register is set or already dead at PLACE, we needn't do
12786 anything with this note if it is still a REG_DEAD note.
12787 We can here if it is set at all, not if is it totally replace,
12788 which is what `dead_or_set_p' checks, so also check for it being
12789 set partially. */
12791 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12793 unsigned int regno = REGNO (XEXP (note, 0));
12795 /* Similarly, if the instruction on which we want to place
12796 the note is a noop, we'll need do a global live update
12797 after we remove them in delete_noop_moves. */
12798 if (noop_move_p (place))
12799 SET_BIT (refresh_blocks, this_basic_block->index);
12801 if (dead_or_set_p (place, XEXP (note, 0))
12802 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12804 /* Unless the register previously died in PLACE, clear
12805 reg_last_death. [I no longer understand why this is
12806 being done.] */
12807 if (reg_last_death[regno] != place)
12808 reg_last_death[regno] = 0;
12809 place = 0;
12811 else
12812 reg_last_death[regno] = place;
12814 /* If this is a death note for a hard reg that is occupying
12815 multiple registers, ensure that we are still using all
12816 parts of the object. If we find a piece of the object
12817 that is unused, we must arrange for an appropriate REG_DEAD
12818 note to be added for it. However, we can't just emit a USE
12819 and tag the note to it, since the register might actually
12820 be dead; so we recourse, and the recursive call then finds
12821 the previous insn that used this register. */
12823 if (place && regno < FIRST_PSEUDO_REGISTER
12824 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
12826 unsigned int endregno
12827 = regno + hard_regno_nregs[regno]
12828 [GET_MODE (XEXP (note, 0))];
12829 int all_used = 1;
12830 unsigned int i;
12832 for (i = regno; i < endregno; i++)
12833 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12834 && ! find_regno_fusage (place, USE, i))
12835 || dead_or_set_regno_p (place, i))
12836 all_used = 0;
12838 if (! all_used)
12840 /* Put only REG_DEAD notes for pieces that are
12841 not already dead or set. */
12843 for (i = regno; i < endregno;
12844 i += hard_regno_nregs[i][reg_raw_mode[i]])
12846 rtx piece = regno_reg_rtx[i];
12847 basic_block bb = this_basic_block;
12849 if (! dead_or_set_p (place, piece)
12850 && ! reg_bitfield_target_p (piece,
12851 PATTERN (place)))
12853 rtx new_note
12854 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12856 distribute_notes (new_note, place, place,
12857 NULL_RTX);
12859 else if (! refers_to_regno_p (i, i + 1,
12860 PATTERN (place), 0)
12861 && ! find_regno_fusage (place, USE, i))
12862 for (tem = PREV_INSN (place); ;
12863 tem = PREV_INSN (tem))
12865 if (! INSN_P (tem))
12867 if (tem == BB_HEAD (bb))
12869 SET_BIT (refresh_blocks,
12870 this_basic_block->index);
12871 break;
12873 continue;
12875 if (dead_or_set_p (tem, piece)
12876 || reg_bitfield_target_p (piece,
12877 PATTERN (tem)))
12879 REG_NOTES (tem)
12880 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12881 REG_NOTES (tem));
12882 break;
12888 place = 0;
12892 break;
12894 default:
12895 /* Any other notes should not be present at this point in the
12896 compilation. */
12897 abort ();
12900 if (place)
12902 XEXP (note, 1) = REG_NOTES (place);
12903 REG_NOTES (place) = note;
12905 else if ((REG_NOTE_KIND (note) == REG_DEAD
12906 || REG_NOTE_KIND (note) == REG_UNUSED)
12907 && GET_CODE (XEXP (note, 0)) == REG)
12908 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12910 if (place2)
12912 if ((REG_NOTE_KIND (note) == REG_DEAD
12913 || REG_NOTE_KIND (note) == REG_UNUSED)
12914 && GET_CODE (XEXP (note, 0)) == REG)
12915 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12917 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12918 REG_NOTE_KIND (note),
12919 XEXP (note, 0),
12920 REG_NOTES (place2));
12925 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12926 I3, I2, and I1 to new locations. This is also called to add a link
12927 pointing at I3 when I3's destination is changed. */
12929 static void
12930 distribute_links (rtx links)
12932 rtx link, next_link;
12934 for (link = links; link; link = next_link)
12936 rtx place = 0;
12937 rtx insn;
12938 rtx set, reg;
12940 next_link = XEXP (link, 1);
12942 /* If the insn that this link points to is a NOTE or isn't a single
12943 set, ignore it. In the latter case, it isn't clear what we
12944 can do other than ignore the link, since we can't tell which
12945 register it was for. Such links wouldn't be used by combine
12946 anyway.
12948 It is not possible for the destination of the target of the link to
12949 have been changed by combine. The only potential of this is if we
12950 replace I3, I2, and I1 by I3 and I2. But in that case the
12951 destination of I2 also remains unchanged. */
12953 if (GET_CODE (XEXP (link, 0)) == NOTE
12954 || (set = single_set (XEXP (link, 0))) == 0)
12955 continue;
12957 reg = SET_DEST (set);
12958 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12959 || GET_CODE (reg) == SIGN_EXTRACT
12960 || GET_CODE (reg) == STRICT_LOW_PART)
12961 reg = XEXP (reg, 0);
12963 /* A LOG_LINK is defined as being placed on the first insn that uses
12964 a register and points to the insn that sets the register. Start
12965 searching at the next insn after the target of the link and stop
12966 when we reach a set of the register or the end of the basic block.
12968 Note that this correctly handles the link that used to point from
12969 I3 to I2. Also note that not much searching is typically done here
12970 since most links don't point very far away. */
12972 for (insn = NEXT_INSN (XEXP (link, 0));
12973 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12974 || BB_HEAD (this_basic_block->next_bb) != insn));
12975 insn = NEXT_INSN (insn))
12976 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12978 if (reg_referenced_p (reg, PATTERN (insn)))
12979 place = insn;
12980 break;
12982 else if (GET_CODE (insn) == CALL_INSN
12983 && find_reg_fusage (insn, USE, reg))
12985 place = insn;
12986 break;
12988 else if (INSN_P (insn) && reg_set_p (reg, insn))
12989 break;
12991 /* If we found a place to put the link, place it there unless there
12992 is already a link to the same insn as LINK at that point. */
12994 if (place)
12996 rtx link2;
12998 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12999 if (XEXP (link2, 0) == XEXP (link, 0))
13000 break;
13002 if (link2 == 0)
13004 XEXP (link, 1) = LOG_LINKS (place);
13005 LOG_LINKS (place) = link;
13007 /* Set added_links_insn to the earliest insn we added a
13008 link to. */
13009 if (added_links_insn == 0
13010 || INSN_CUID (added_links_insn) > INSN_CUID (place))
13011 added_links_insn = place;
13017 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
13019 static int
13020 insn_cuid (rtx insn)
13022 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
13023 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
13024 insn = NEXT_INSN (insn);
13026 if (INSN_UID (insn) > max_uid_cuid)
13027 abort ();
13029 return INSN_CUID (insn);
13032 void
13033 dump_combine_stats (FILE *file)
13035 fnotice
13036 (file,
13037 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13038 combine_attempts, combine_merges, combine_extras, combine_successes);
13041 void
13042 dump_combine_total_stats (FILE *file)
13044 fnotice
13045 (file,
13046 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13047 total_attempts, total_merges, total_extras, total_successes);