oops - minor formatting tidy ups to previous delta
[official-gcc.git] / gcc / combine.c
blob236ac2ef72c1e58f35efa5491c6136811222127f
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
77 #include "config.h"
78 #include "system.h"
79 #include "rtl.h"
80 #include "tm_p.h"
81 #include "flags.h"
82 #include "regs.h"
83 #include "hard-reg-set.h"
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "function.h"
87 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88 #include "expr.h"
89 #include "insn-attr.h"
90 #include "recog.h"
91 #include "real.h"
92 #include "toplev.h"
94 /* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96 #define gen_lowpart dont_use_gen_lowpart_you_dummy
98 /* Number of attempts to combine instructions in this function. */
100 static int combine_attempts;
102 /* Number of attempts that got as far as substitution in this function. */
104 static int combine_merges;
106 /* Number of instructions combined with added SETs in this function. */
108 static int combine_extras;
110 /* Number of instructions combined in this function. */
112 static int combine_successes;
114 /* Totals over entire compilation. */
116 static int total_attempts, total_merges, total_extras, total_successes;
119 /* Vector mapping INSN_UIDs to cuids.
120 The cuids are like uids but increase monotonically always.
121 Combine always uses cuids so that it can compare them.
122 But actually renumbering the uids, which we used to do,
123 proves to be a bad idea because it makes it hard to compare
124 the dumps produced by earlier passes with those from later passes. */
126 static int *uid_cuid;
127 static int max_uid_cuid;
129 /* Get the cuid of an insn. */
131 #define INSN_CUID(INSN) \
132 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
134 /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135 BITS_PER_WORD would invoke undefined behavior. Work around it. */
137 #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138 (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
140 /* Maximum register number, which is the size of the tables below. */
142 static unsigned int combine_max_regno;
144 /* Record last point of death of (hard or pseudo) register n. */
146 static rtx *reg_last_death;
148 /* Record last point of modification of (hard or pseudo) register n. */
150 static rtx *reg_last_set;
152 /* Record the cuid of the last insn that invalidated memory
153 (anything that writes memory, and subroutine calls, but not pushes). */
155 static int mem_last_set;
157 /* Record the cuid of the last CALL_INSN
158 so we can tell whether a potential combination crosses any calls. */
160 static int last_call_cuid;
162 /* When `subst' is called, this is the insn that is being modified
163 (by combining in a previous insn). The PATTERN of this insn
164 is still the old pattern partially modified and it should not be
165 looked at, but this may be used to examine the successors of the insn
166 to judge whether a simplification is valid. */
168 static rtx subst_insn;
170 /* This is an insn that belongs before subst_insn, but is not currently
171 on the insn chain. */
173 static rtx subst_prev_insn;
175 /* This is the lowest CUID that `subst' is currently dealing with.
176 get_last_value will not return a value if the register was set at or
177 after this CUID. If not for this mechanism, we could get confused if
178 I2 or I1 in try_combine were an insn that used the old value of a register
179 to obtain a new value. In that case, we might erroneously get the
180 new value of the register when we wanted the old one. */
182 static int subst_low_cuid;
184 /* This contains any hard registers that are used in newpat; reg_dead_at_p
185 must consider all these registers to be always live. */
187 static HARD_REG_SET newpat_used_regs;
189 /* This is an insn to which a LOG_LINKS entry has been added. If this
190 insn is the earlier than I2 or I3, combine should rescan starting at
191 that location. */
193 static rtx added_links_insn;
195 /* Basic block in which we are performing combines. */
196 static basic_block this_basic_block;
198 /* A bitmap indicating which blocks had registers go dead at entry.
199 After combine, we'll need to re-do global life analysis with
200 those blocks as starting points. */
201 static sbitmap refresh_blocks;
202 static int need_refresh;
204 /* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
206 operation being processed is redundant given a prior operation performed
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
217 Therefore, we maintain the following arrays:
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
248 /* Record last value assigned to (hard or pseudo) register n. */
250 static rtx *reg_last_set_value;
252 /* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
255 static int *reg_last_set_label;
257 /* Record the value of label_tick when an expression involving register n
258 is placed in reg_last_set_value. */
260 static int *reg_last_set_table_tick;
262 /* Set non-zero if references to register n in expressions should not be
263 used. */
265 static char *reg_last_set_invalid;
267 /* Incremented for each label. */
269 static int label_tick;
271 /* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
276 We record in the following array what we know about the nonzero
277 bits of a register, specifically which bits are known to be zero.
279 If an entry is zero, it means that we don't know anything special. */
281 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
283 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
286 static enum machine_mode nonzero_bits_mode;
288 /* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
291 static unsigned char *reg_sign_bit_copies;
293 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
298 static int nonzero_sign_valid;
300 /* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
305 static enum machine_mode *reg_last_set_mode;
306 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307 static char *reg_last_set_sign_bit_copies;
309 /* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
313 struct undo
315 struct undo *next;
316 int is_int;
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
321 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
324 other_insn is nonzero if we have modified some other insn in the process
325 of working on subst_insn. It must be verified too. */
327 struct undobuf
329 struct undo *undos;
330 struct undo *frees;
331 rtx other_insn;
334 static struct undobuf undobuf;
336 /* Number of times the pseudo being substituted for
337 was found and replaced. */
339 static int n_occurrences;
341 static void do_SUBST PARAMS ((rtx *, rtx));
342 static void do_SUBST_INT PARAMS ((int *, int));
343 static void init_reg_last_arrays PARAMS ((void));
344 static void setup_incoming_promotions PARAMS ((void));
345 static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
346 static int cant_combine_insn_p PARAMS ((rtx));
347 static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
348 static int sets_function_arg_p PARAMS ((rtx));
349 static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
350 static int contains_muldiv PARAMS ((rtx));
351 static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
352 static void undo_all PARAMS ((void));
353 static void undo_commit PARAMS ((void));
354 static rtx *find_split_point PARAMS ((rtx *, rtx));
355 static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
356 static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
357 static rtx simplify_if_then_else PARAMS ((rtx));
358 static rtx simplify_set PARAMS ((rtx));
359 static rtx simplify_logical PARAMS ((rtx, int));
360 static rtx expand_compound_operation PARAMS ((rtx));
361 static rtx expand_field_assignment PARAMS ((rtx));
362 static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
363 rtx, unsigned HOST_WIDE_INT, int,
364 int, int));
365 static rtx extract_left_shift PARAMS ((rtx, int));
366 static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
367 static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
368 unsigned HOST_WIDE_INT *));
369 static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
370 unsigned HOST_WIDE_INT, rtx, int));
371 static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
372 static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
373 static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
374 static rtx make_field_assignment PARAMS ((rtx));
375 static rtx apply_distributive_law PARAMS ((rtx));
376 static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
377 unsigned HOST_WIDE_INT));
378 static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
379 static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
380 static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
381 enum rtx_code, HOST_WIDE_INT,
382 enum machine_mode, int *));
383 static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
384 rtx, int));
385 static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
386 static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
387 static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
388 rtx, rtx));
389 static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
390 static void update_table_tick PARAMS ((rtx));
391 static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
392 static void check_promoted_subreg PARAMS ((rtx, rtx));
393 static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
394 static void record_dead_and_set_regs PARAMS ((rtx));
395 static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
396 static rtx get_last_value PARAMS ((rtx));
397 static int use_crosses_set_p PARAMS ((rtx, int));
398 static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
399 static int reg_dead_at_p PARAMS ((rtx, rtx));
400 static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
401 static int reg_bitfield_target_p PARAMS ((rtx, rtx));
402 static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
403 static void distribute_links PARAMS ((rtx));
404 static void mark_used_regs_combine PARAMS ((rtx));
405 static int insn_cuid PARAMS ((rtx));
406 static void record_promoted_value PARAMS ((rtx, rtx));
407 static rtx reversed_comparison PARAMS ((rtx, enum machine_mode, rtx, rtx));
408 static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
410 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
411 insn. The substitution can be undone by undo_all. If INTO is already
412 set to NEWVAL, do not record this change. Because computing NEWVAL might
413 also call SUBST, we have to compute it before we put anything into
414 the undo table. */
416 static void
417 do_SUBST (into, newval)
418 rtx *into, newval;
420 struct undo *buf;
421 rtx oldval = *into;
423 if (oldval == newval)
424 return;
426 /* We'd like to catch as many invalid transformations here as
427 possible. Unfortunately, there are way too many mode changes
428 that are perfectly valid, so we'd waste too much effort for
429 little gain doing the checks here. Focus on catching invalid
430 transformations involving integer constants. */
431 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
432 && GET_CODE (newval) == CONST_INT)
434 /* Sanity check that we're replacing oldval with a CONST_INT
435 that is a valid sign-extension for the original mode. */
436 if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
437 GET_MODE (oldval)))
438 abort ();
440 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
441 CONST_INT is not valid, because after the replacement, the
442 original mode would be gone. Unfortunately, we can't tell
443 when do_SUBST is called to replace the operand thereof, so we
444 perform this test on oldval instead, checking whether an
445 invalid replacement took place before we got here. */
446 if ((GET_CODE (oldval) == SUBREG
447 && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
448 || (GET_CODE (oldval) == ZERO_EXTEND
449 && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
450 abort ();
453 if (undobuf.frees)
454 buf = undobuf.frees, undobuf.frees = buf->next;
455 else
456 buf = (struct undo *) xmalloc (sizeof (struct undo));
458 buf->is_int = 0;
459 buf->where.r = into;
460 buf->old_contents.r = oldval;
461 *into = newval;
463 buf->next = undobuf.undos, undobuf.undos = buf;
466 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
468 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
469 for the value of a HOST_WIDE_INT value (including CONST_INT) is
470 not safe. */
472 static void
473 do_SUBST_INT (into, newval)
474 int *into, newval;
476 struct undo *buf;
477 int oldval = *into;
479 if (oldval == newval)
480 return;
482 if (undobuf.frees)
483 buf = undobuf.frees, undobuf.frees = buf->next;
484 else
485 buf = (struct undo *) xmalloc (sizeof (struct undo));
487 buf->is_int = 1;
488 buf->where.i = into;
489 buf->old_contents.i = oldval;
490 *into = newval;
492 buf->next = undobuf.undos, undobuf.undos = buf;
495 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
497 /* Main entry point for combiner. F is the first insn of the function.
498 NREGS is the first unused pseudo-reg number.
500 Return non-zero if the combiner has turned an indirect jump
501 instruction into a direct jump. */
503 combine_instructions (f, nregs)
504 rtx f;
505 unsigned int nregs;
507 rtx insn, next;
508 #ifdef HAVE_cc0
509 rtx prev;
510 #endif
511 int i;
512 rtx links, nextlinks;
514 int new_direct_jump_p = 0;
516 combine_attempts = 0;
517 combine_merges = 0;
518 combine_extras = 0;
519 combine_successes = 0;
521 combine_max_regno = nregs;
523 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
524 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
525 reg_sign_bit_copies
526 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
528 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
529 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
530 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
531 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
532 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
533 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
534 reg_last_set_mode
535 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
536 reg_last_set_nonzero_bits
537 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
538 reg_last_set_sign_bit_copies
539 = (char *) xmalloc (nregs * sizeof (char));
541 init_reg_last_arrays ();
543 init_recog_no_volatile ();
545 /* Compute maximum uid value so uid_cuid can be allocated. */
547 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
548 if (INSN_UID (insn) > i)
549 i = INSN_UID (insn);
551 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
552 max_uid_cuid = i;
554 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
556 /* Don't use reg_nonzero_bits when computing it. This can cause problems
557 when, for example, we have j <<= 1 in a loop. */
559 nonzero_sign_valid = 0;
561 /* Compute the mapping from uids to cuids.
562 Cuids are numbers assigned to insns, like uids,
563 except that cuids increase monotonically through the code.
565 Scan all SETs and see if we can deduce anything about what
566 bits are known to be zero for some registers and how many copies
567 of the sign bit are known to exist for those registers.
569 Also set any known values so that we can use it while searching
570 for what bits are known to be set. */
572 label_tick = 1;
574 /* We need to initialize it here, because record_dead_and_set_regs may call
575 get_last_value. */
576 subst_prev_insn = NULL_RTX;
578 setup_incoming_promotions ();
580 refresh_blocks = sbitmap_alloc (last_basic_block);
581 sbitmap_zero (refresh_blocks);
582 need_refresh = 0;
584 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
586 uid_cuid[INSN_UID (insn)] = ++i;
587 subst_low_cuid = i;
588 subst_insn = insn;
590 if (INSN_P (insn))
592 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
593 NULL);
594 record_dead_and_set_regs (insn);
596 #ifdef AUTO_INC_DEC
597 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
598 if (REG_NOTE_KIND (links) == REG_INC)
599 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
600 NULL);
601 #endif
604 if (GET_CODE (insn) == CODE_LABEL)
605 label_tick++;
608 nonzero_sign_valid = 1;
610 /* Now scan all the insns in forward order. */
612 label_tick = 1;
613 last_call_cuid = 0;
614 mem_last_set = 0;
615 init_reg_last_arrays ();
616 setup_incoming_promotions ();
618 FOR_EACH_BB (this_basic_block)
620 for (insn = this_basic_block->head;
621 insn != NEXT_INSN (this_basic_block->end);
622 insn = next ? next : NEXT_INSN (insn))
624 next = 0;
626 if (GET_CODE (insn) == CODE_LABEL)
627 label_tick++;
629 else if (INSN_P (insn))
631 /* See if we know about function return values before this
632 insn based upon SUBREG flags. */
633 check_promoted_subreg (insn, PATTERN (insn));
635 /* Try this insn with each insn it links back to. */
637 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
638 if ((next = try_combine (insn, XEXP (links, 0),
639 NULL_RTX, &new_direct_jump_p)) != 0)
640 goto retry;
642 /* Try each sequence of three linked insns ending with this one. */
644 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
646 rtx link = XEXP (links, 0);
648 /* If the linked insn has been replaced by a note, then there
649 is no point in pursuing this chain any further. */
650 if (GET_CODE (link) == NOTE)
651 continue;
653 for (nextlinks = LOG_LINKS (link);
654 nextlinks;
655 nextlinks = XEXP (nextlinks, 1))
656 if ((next = try_combine (insn, link,
657 XEXP (nextlinks, 0),
658 &new_direct_jump_p)) != 0)
659 goto retry;
662 #ifdef HAVE_cc0
663 /* Try to combine a jump insn that uses CC0
664 with a preceding insn that sets CC0, and maybe with its
665 logical predecessor as well.
666 This is how we make decrement-and-branch insns.
667 We need this special code because data flow connections
668 via CC0 do not get entered in LOG_LINKS. */
670 if (GET_CODE (insn) == JUMP_INSN
671 && (prev = prev_nonnote_insn (insn)) != 0
672 && GET_CODE (prev) == INSN
673 && sets_cc0_p (PATTERN (prev)))
675 if ((next = try_combine (insn, prev,
676 NULL_RTX, &new_direct_jump_p)) != 0)
677 goto retry;
679 for (nextlinks = LOG_LINKS (prev); nextlinks;
680 nextlinks = XEXP (nextlinks, 1))
681 if ((next = try_combine (insn, prev,
682 XEXP (nextlinks, 0),
683 &new_direct_jump_p)) != 0)
684 goto retry;
687 /* Do the same for an insn that explicitly references CC0. */
688 if (GET_CODE (insn) == INSN
689 && (prev = prev_nonnote_insn (insn)) != 0
690 && GET_CODE (prev) == INSN
691 && sets_cc0_p (PATTERN (prev))
692 && GET_CODE (PATTERN (insn)) == SET
693 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
695 if ((next = try_combine (insn, prev,
696 NULL_RTX, &new_direct_jump_p)) != 0)
697 goto retry;
699 for (nextlinks = LOG_LINKS (prev); nextlinks;
700 nextlinks = XEXP (nextlinks, 1))
701 if ((next = try_combine (insn, prev,
702 XEXP (nextlinks, 0),
703 &new_direct_jump_p)) != 0)
704 goto retry;
707 /* Finally, see if any of the insns that this insn links to
708 explicitly references CC0. If so, try this insn, that insn,
709 and its predecessor if it sets CC0. */
710 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
711 if (GET_CODE (XEXP (links, 0)) == INSN
712 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
713 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
714 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
715 && GET_CODE (prev) == INSN
716 && sets_cc0_p (PATTERN (prev))
717 && (next = try_combine (insn, XEXP (links, 0),
718 prev, &new_direct_jump_p)) != 0)
719 goto retry;
720 #endif
722 /* Try combining an insn with two different insns whose results it
723 uses. */
724 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
725 for (nextlinks = XEXP (links, 1); nextlinks;
726 nextlinks = XEXP (nextlinks, 1))
727 if ((next = try_combine (insn, XEXP (links, 0),
728 XEXP (nextlinks, 0),
729 &new_direct_jump_p)) != 0)
730 goto retry;
732 if (GET_CODE (insn) != NOTE)
733 record_dead_and_set_regs (insn);
735 retry:
740 clear_bb_flags ();
742 EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i,
743 BASIC_BLOCK (i)->flags |= BB_DIRTY);
744 new_direct_jump_p |= purge_all_dead_edges (0);
745 delete_noop_moves (f);
747 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
748 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
749 | PROP_KILL_DEAD_CODE);
751 /* Clean up. */
752 sbitmap_free (refresh_blocks);
753 free (reg_nonzero_bits);
754 free (reg_sign_bit_copies);
755 free (reg_last_death);
756 free (reg_last_set);
757 free (reg_last_set_value);
758 free (reg_last_set_table_tick);
759 free (reg_last_set_label);
760 free (reg_last_set_invalid);
761 free (reg_last_set_mode);
762 free (reg_last_set_nonzero_bits);
763 free (reg_last_set_sign_bit_copies);
764 free (uid_cuid);
767 struct undo *undo, *next;
768 for (undo = undobuf.frees; undo; undo = next)
770 next = undo->next;
771 free (undo);
773 undobuf.frees = 0;
776 total_attempts += combine_attempts;
777 total_merges += combine_merges;
778 total_extras += combine_extras;
779 total_successes += combine_successes;
781 nonzero_sign_valid = 0;
783 /* Make recognizer allow volatile MEMs again. */
784 init_recog ();
786 return new_direct_jump_p;
789 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
791 static void
792 init_reg_last_arrays ()
794 unsigned int nregs = combine_max_regno;
796 memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
797 memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
798 memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
799 memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
800 memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
801 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
802 memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
803 memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
804 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
807 /* Set up any promoted values for incoming argument registers. */
809 static void
810 setup_incoming_promotions ()
812 #ifdef PROMOTE_FUNCTION_ARGS
813 unsigned int regno;
814 rtx reg;
815 enum machine_mode mode;
816 int unsignedp;
817 rtx first = get_insns ();
819 #ifndef OUTGOING_REGNO
820 #define OUTGOING_REGNO(N) N
821 #endif
822 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
823 /* Check whether this register can hold an incoming pointer
824 argument. FUNCTION_ARG_REGNO_P tests outgoing register
825 numbers, so translate if necessary due to register windows. */
826 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
827 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
829 record_value_for_reg
830 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
831 : SIGN_EXTEND),
832 GET_MODE (reg),
833 gen_rtx_CLOBBER (mode, const0_rtx)));
835 #endif
838 /* Called via note_stores. If X is a pseudo that is narrower than
839 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
841 If we are setting only a portion of X and we can't figure out what
842 portion, assume all bits will be used since we don't know what will
843 be happening.
845 Similarly, set how many bits of X are known to be copies of the sign bit
846 at all locations in the function. This is the smallest number implied
847 by any set of X. */
849 static void
850 set_nonzero_bits_and_sign_copies (x, set, data)
851 rtx x;
852 rtx set;
853 void *data ATTRIBUTE_UNUSED;
855 unsigned int num;
857 if (GET_CODE (x) == REG
858 && REGNO (x) >= FIRST_PSEUDO_REGISTER
859 /* If this register is undefined at the start of the file, we can't
860 say what its contents were. */
861 && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x))
862 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
864 if (set == 0 || GET_CODE (set) == CLOBBER)
866 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
867 reg_sign_bit_copies[REGNO (x)] = 1;
868 return;
871 /* If this is a complex assignment, see if we can convert it into a
872 simple assignment. */
873 set = expand_field_assignment (set);
875 /* If this is a simple assignment, or we have a paradoxical SUBREG,
876 set what we know about X. */
878 if (SET_DEST (set) == x
879 || (GET_CODE (SET_DEST (set)) == SUBREG
880 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
881 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
882 && SUBREG_REG (SET_DEST (set)) == x))
884 rtx src = SET_SRC (set);
886 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
887 /* If X is narrower than a word and SRC is a non-negative
888 constant that would appear negative in the mode of X,
889 sign-extend it for use in reg_nonzero_bits because some
890 machines (maybe most) will actually do the sign-extension
891 and this is the conservative approach.
893 ??? For 2.5, try to tighten up the MD files in this regard
894 instead of this kludge. */
896 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
897 && GET_CODE (src) == CONST_INT
898 && INTVAL (src) > 0
899 && 0 != (INTVAL (src)
900 & ((HOST_WIDE_INT) 1
901 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
902 src = GEN_INT (INTVAL (src)
903 | ((HOST_WIDE_INT) (-1)
904 << GET_MODE_BITSIZE (GET_MODE (x))));
905 #endif
907 /* Don't call nonzero_bits if it cannot change anything. */
908 if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
909 reg_nonzero_bits[REGNO (x)]
910 |= nonzero_bits (src, nonzero_bits_mode);
911 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
912 if (reg_sign_bit_copies[REGNO (x)] == 0
913 || reg_sign_bit_copies[REGNO (x)] > num)
914 reg_sign_bit_copies[REGNO (x)] = num;
916 else
918 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
919 reg_sign_bit_copies[REGNO (x)] = 1;
924 /* See if INSN can be combined into I3. PRED and SUCC are optionally
925 insns that were previously combined into I3 or that will be combined
926 into the merger of INSN and I3.
928 Return 0 if the combination is not allowed for any reason.
930 If the combination is allowed, *PDEST will be set to the single
931 destination of INSN and *PSRC to the single source, and this function
932 will return 1. */
934 static int
935 can_combine_p (insn, i3, pred, succ, pdest, psrc)
936 rtx insn;
937 rtx i3;
938 rtx pred ATTRIBUTE_UNUSED;
939 rtx succ;
940 rtx *pdest, *psrc;
942 int i;
943 rtx set = 0, src, dest;
944 rtx p;
945 #ifdef AUTO_INC_DEC
946 rtx link;
947 #endif
948 int all_adjacent = (succ ? (next_active_insn (insn) == succ
949 && next_active_insn (succ) == i3)
950 : next_active_insn (insn) == i3);
952 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
953 or a PARALLEL consisting of such a SET and CLOBBERs.
955 If INSN has CLOBBER parallel parts, ignore them for our processing.
956 By definition, these happen during the execution of the insn. When it
957 is merged with another insn, all bets are off. If they are, in fact,
958 needed and aren't also supplied in I3, they may be added by
959 recog_for_combine. Otherwise, it won't match.
961 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
962 note.
964 Get the source and destination of INSN. If more than one, can't
965 combine. */
967 if (GET_CODE (PATTERN (insn)) == SET)
968 set = PATTERN (insn);
969 else if (GET_CODE (PATTERN (insn)) == PARALLEL
970 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
972 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
974 rtx elt = XVECEXP (PATTERN (insn), 0, i);
976 switch (GET_CODE (elt))
978 /* This is important to combine floating point insns
979 for the SH4 port. */
980 case USE:
981 /* Combining an isolated USE doesn't make sense.
982 We depend here on combinable_i3pat to reject them. */
983 /* The code below this loop only verifies that the inputs of
984 the SET in INSN do not change. We call reg_set_between_p
985 to verify that the REG in the USE does not change between
986 I3 and INSN.
987 If the USE in INSN was for a pseudo register, the matching
988 insn pattern will likely match any register; combining this
989 with any other USE would only be safe if we knew that the
990 used registers have identical values, or if there was
991 something to tell them apart, e.g. different modes. For
992 now, we forgo such complicated tests and simply disallow
993 combining of USES of pseudo registers with any other USE. */
994 if (GET_CODE (XEXP (elt, 0)) == REG
995 && GET_CODE (PATTERN (i3)) == PARALLEL)
997 rtx i3pat = PATTERN (i3);
998 int i = XVECLEN (i3pat, 0) - 1;
999 unsigned int regno = REGNO (XEXP (elt, 0));
1003 rtx i3elt = XVECEXP (i3pat, 0, i);
1005 if (GET_CODE (i3elt) == USE
1006 && GET_CODE (XEXP (i3elt, 0)) == REG
1007 && (REGNO (XEXP (i3elt, 0)) == regno
1008 ? reg_set_between_p (XEXP (elt, 0),
1009 PREV_INSN (insn), i3)
1010 : regno >= FIRST_PSEUDO_REGISTER))
1011 return 0;
1013 while (--i >= 0);
1015 break;
1017 /* We can ignore CLOBBERs. */
1018 case CLOBBER:
1019 break;
1021 case SET:
1022 /* Ignore SETs whose result isn't used but not those that
1023 have side-effects. */
1024 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1025 && ! side_effects_p (elt))
1026 break;
1028 /* If we have already found a SET, this is a second one and
1029 so we cannot combine with this insn. */
1030 if (set)
1031 return 0;
1033 set = elt;
1034 break;
1036 default:
1037 /* Anything else means we can't combine. */
1038 return 0;
1042 if (set == 0
1043 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1044 so don't do anything with it. */
1045 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1046 return 0;
1048 else
1049 return 0;
1051 if (set == 0)
1052 return 0;
1054 set = expand_field_assignment (set);
1055 src = SET_SRC (set), dest = SET_DEST (set);
1057 /* Don't eliminate a store in the stack pointer. */
1058 if (dest == stack_pointer_rtx
1059 /* If we couldn't eliminate a field assignment, we can't combine. */
1060 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1061 /* Don't combine with an insn that sets a register to itself if it has
1062 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1063 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1064 /* Can't merge an ASM_OPERANDS. */
1065 || GET_CODE (src) == ASM_OPERANDS
1066 /* Can't merge a function call. */
1067 || GET_CODE (src) == CALL
1068 /* Don't eliminate a function call argument. */
1069 || (GET_CODE (i3) == CALL_INSN
1070 && (find_reg_fusage (i3, USE, dest)
1071 || (GET_CODE (dest) == REG
1072 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1073 && global_regs[REGNO (dest)])))
1074 /* Don't substitute into an incremented register. */
1075 || FIND_REG_INC_NOTE (i3, dest)
1076 || (succ && FIND_REG_INC_NOTE (succ, dest))
1077 #if 0
1078 /* Don't combine the end of a libcall into anything. */
1079 /* ??? This gives worse code, and appears to be unnecessary, since no
1080 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1081 use REG_RETVAL notes for noconflict blocks, but other code here
1082 makes sure that those insns don't disappear. */
1083 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1084 #endif
1085 /* Make sure that DEST is not used after SUCC but before I3. */
1086 || (succ && ! all_adjacent
1087 && reg_used_between_p (dest, succ, i3))
1088 /* Make sure that the value that is to be substituted for the register
1089 does not use any registers whose values alter in between. However,
1090 If the insns are adjacent, a use can't cross a set even though we
1091 think it might (this can happen for a sequence of insns each setting
1092 the same destination; reg_last_set of that register might point to
1093 a NOTE). If INSN has a REG_EQUIV note, the register is always
1094 equivalent to the memory so the substitution is valid even if there
1095 are intervening stores. Also, don't move a volatile asm or
1096 UNSPEC_VOLATILE across any other insns. */
1097 || (! all_adjacent
1098 && (((GET_CODE (src) != MEM
1099 || ! find_reg_note (insn, REG_EQUIV, src))
1100 && use_crosses_set_p (src, INSN_CUID (insn)))
1101 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1102 || GET_CODE (src) == UNSPEC_VOLATILE))
1103 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1104 better register allocation by not doing the combine. */
1105 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1106 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1107 /* Don't combine across a CALL_INSN, because that would possibly
1108 change whether the life span of some REGs crosses calls or not,
1109 and it is a pain to update that information.
1110 Exception: if source is a constant, moving it later can't hurt.
1111 Accept that special case, because it helps -fforce-addr a lot. */
1112 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1113 return 0;
1115 /* DEST must either be a REG or CC0. */
1116 if (GET_CODE (dest) == REG)
1118 /* If register alignment is being enforced for multi-word items in all
1119 cases except for parameters, it is possible to have a register copy
1120 insn referencing a hard register that is not allowed to contain the
1121 mode being copied and which would not be valid as an operand of most
1122 insns. Eliminate this problem by not combining with such an insn.
1124 Also, on some machines we don't want to extend the life of a hard
1125 register. */
1127 if (GET_CODE (src) == REG
1128 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1129 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1130 /* Don't extend the life of a hard register unless it is
1131 user variable (if we have few registers) or it can't
1132 fit into the desired register (meaning something special
1133 is going on).
1134 Also avoid substituting a return register into I3, because
1135 reload can't handle a conflict with constraints of other
1136 inputs. */
1137 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1138 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1139 return 0;
1141 else if (GET_CODE (dest) != CC0)
1142 return 0;
1144 /* Don't substitute for a register intended as a clobberable operand.
1145 Similarly, don't substitute an expression containing a register that
1146 will be clobbered in I3. */
1147 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1148 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1149 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1150 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1151 src)
1152 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1153 return 0;
1155 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1156 or not), reject, unless nothing volatile comes between it and I3 */
1158 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1160 /* Make sure succ doesn't contain a volatile reference. */
1161 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1162 return 0;
1164 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1165 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1166 return 0;
1169 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1170 to be an explicit register variable, and was chosen for a reason. */
1172 if (GET_CODE (src) == ASM_OPERANDS
1173 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1174 return 0;
1176 /* If there are any volatile insns between INSN and I3, reject, because
1177 they might affect machine state. */
1179 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1180 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1181 return 0;
1183 /* If INSN or I2 contains an autoincrement or autodecrement,
1184 make sure that register is not used between there and I3,
1185 and not already used in I3 either.
1186 Also insist that I3 not be a jump; if it were one
1187 and the incremented register were spilled, we would lose. */
1189 #ifdef AUTO_INC_DEC
1190 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1191 if (REG_NOTE_KIND (link) == REG_INC
1192 && (GET_CODE (i3) == JUMP_INSN
1193 || reg_used_between_p (XEXP (link, 0), insn, i3)
1194 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1195 return 0;
1196 #endif
1198 #ifdef HAVE_cc0
1199 /* Don't combine an insn that follows a CC0-setting insn.
1200 An insn that uses CC0 must not be separated from the one that sets it.
1201 We do, however, allow I2 to follow a CC0-setting insn if that insn
1202 is passed as I1; in that case it will be deleted also.
1203 We also allow combining in this case if all the insns are adjacent
1204 because that would leave the two CC0 insns adjacent as well.
1205 It would be more logical to test whether CC0 occurs inside I1 or I2,
1206 but that would be much slower, and this ought to be equivalent. */
1208 p = prev_nonnote_insn (insn);
1209 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1210 && ! all_adjacent)
1211 return 0;
1212 #endif
1214 /* If we get here, we have passed all the tests and the combination is
1215 to be allowed. */
1217 *pdest = dest;
1218 *psrc = src;
1220 return 1;
1223 /* Check if PAT is an insn - or a part of it - used to set up an
1224 argument for a function in a hard register. */
1226 static int
1227 sets_function_arg_p (pat)
1228 rtx pat;
1230 int i;
1231 rtx inner_dest;
1233 switch (GET_CODE (pat))
1235 case INSN:
1236 return sets_function_arg_p (PATTERN (pat));
1238 case PARALLEL:
1239 for (i = XVECLEN (pat, 0); --i >= 0;)
1240 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1241 return 1;
1243 break;
1245 case SET:
1246 inner_dest = SET_DEST (pat);
1247 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1248 || GET_CODE (inner_dest) == SUBREG
1249 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1250 inner_dest = XEXP (inner_dest, 0);
1252 return (GET_CODE (inner_dest) == REG
1253 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1254 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1256 default:
1257 break;
1260 return 0;
1263 /* LOC is the location within I3 that contains its pattern or the component
1264 of a PARALLEL of the pattern. We validate that it is valid for combining.
1266 One problem is if I3 modifies its output, as opposed to replacing it
1267 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1268 so would produce an insn that is not equivalent to the original insns.
1270 Consider:
1272 (set (reg:DI 101) (reg:DI 100))
1273 (set (subreg:SI (reg:DI 101) 0) <foo>)
1275 This is NOT equivalent to:
1277 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1278 (set (reg:DI 101) (reg:DI 100))])
1280 Not only does this modify 100 (in which case it might still be valid
1281 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1283 We can also run into a problem if I2 sets a register that I1
1284 uses and I1 gets directly substituted into I3 (not via I2). In that
1285 case, we would be getting the wrong value of I2DEST into I3, so we
1286 must reject the combination. This case occurs when I2 and I1 both
1287 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1288 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1289 of a SET must prevent combination from occurring.
1291 Before doing the above check, we first try to expand a field assignment
1292 into a set of logical operations.
1294 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1295 we place a register that is both set and used within I3. If more than one
1296 such register is detected, we fail.
1298 Return 1 if the combination is valid, zero otherwise. */
1300 static int
1301 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1302 rtx i3;
1303 rtx *loc;
1304 rtx i2dest;
1305 rtx i1dest;
1306 int i1_not_in_src;
1307 rtx *pi3dest_killed;
1309 rtx x = *loc;
1311 if (GET_CODE (x) == SET)
1313 rtx set = expand_field_assignment (x);
1314 rtx dest = SET_DEST (set);
1315 rtx src = SET_SRC (set);
1316 rtx inner_dest = dest;
1318 #if 0
1319 rtx inner_src = src;
1320 #endif
1322 SUBST (*loc, set);
1324 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1325 || GET_CODE (inner_dest) == SUBREG
1326 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1327 inner_dest = XEXP (inner_dest, 0);
1329 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1330 was added. */
1331 #if 0
1332 while (GET_CODE (inner_src) == STRICT_LOW_PART
1333 || GET_CODE (inner_src) == SUBREG
1334 || GET_CODE (inner_src) == ZERO_EXTRACT)
1335 inner_src = XEXP (inner_src, 0);
1337 /* If it is better that two different modes keep two different pseudos,
1338 avoid combining them. This avoids producing the following pattern
1339 on a 386:
1340 (set (subreg:SI (reg/v:QI 21) 0)
1341 (lshiftrt:SI (reg/v:SI 20)
1342 (const_int 24)))
1343 If that were made, reload could not handle the pair of
1344 reg 20/21, since it would try to get any GENERAL_REGS
1345 but some of them don't handle QImode. */
1347 if (rtx_equal_p (inner_src, i2dest)
1348 && GET_CODE (inner_dest) == REG
1349 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1350 return 0;
1351 #endif
1353 /* Check for the case where I3 modifies its output, as
1354 discussed above. */
1355 if ((inner_dest != dest
1356 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1357 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1359 /* This is the same test done in can_combine_p except we can't test
1360 all_adjacent; we don't have to, since this instruction will stay
1361 in place, thus we are not considering increasing the lifetime of
1362 INNER_DEST.
1364 Also, if this insn sets a function argument, combining it with
1365 something that might need a spill could clobber a previous
1366 function argument; the all_adjacent test in can_combine_p also
1367 checks this; here, we do a more specific test for this case. */
1369 || (GET_CODE (inner_dest) == REG
1370 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1371 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1372 GET_MODE (inner_dest))))
1373 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1374 return 0;
1376 /* If DEST is used in I3, it is being killed in this insn,
1377 so record that for later.
1378 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1379 STACK_POINTER_REGNUM, since these are always considered to be
1380 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1381 if (pi3dest_killed && GET_CODE (dest) == REG
1382 && reg_referenced_p (dest, PATTERN (i3))
1383 && REGNO (dest) != FRAME_POINTER_REGNUM
1384 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1385 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1386 #endif
1387 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1388 && (REGNO (dest) != ARG_POINTER_REGNUM
1389 || ! fixed_regs [REGNO (dest)])
1390 #endif
1391 && REGNO (dest) != STACK_POINTER_REGNUM)
1393 if (*pi3dest_killed)
1394 return 0;
1396 *pi3dest_killed = dest;
1400 else if (GET_CODE (x) == PARALLEL)
1402 int i;
1404 for (i = 0; i < XVECLEN (x, 0); i++)
1405 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1406 i1_not_in_src, pi3dest_killed))
1407 return 0;
1410 return 1;
1413 /* Return 1 if X is an arithmetic expression that contains a multiplication
1414 and division. We don't count multiplications by powers of two here. */
1416 static int
1417 contains_muldiv (x)
1418 rtx x;
1420 switch (GET_CODE (x))
1422 case MOD: case DIV: case UMOD: case UDIV:
1423 return 1;
1425 case MULT:
1426 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1427 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1428 default:
1429 switch (GET_RTX_CLASS (GET_CODE (x)))
1431 case 'c': case '<': case '2':
1432 return contains_muldiv (XEXP (x, 0))
1433 || contains_muldiv (XEXP (x, 1));
1435 case '1':
1436 return contains_muldiv (XEXP (x, 0));
1438 default:
1439 return 0;
1444 /* Determine whether INSN can be used in a combination. Return nonzero if
1445 not. This is used in try_combine to detect early some cases where we
1446 can't perform combinations. */
1448 static int
1449 cant_combine_insn_p (insn)
1450 rtx insn;
1452 rtx set;
1453 rtx src, dest;
1455 /* If this isn't really an insn, we can't do anything.
1456 This can occur when flow deletes an insn that it has merged into an
1457 auto-increment address. */
1458 if (! INSN_P (insn))
1459 return 1;
1461 /* Never combine loads and stores involving hard regs. The register
1462 allocator can usually handle such reg-reg moves by tying. If we allow
1463 the combiner to make substitutions of hard regs, we risk aborting in
1464 reload on machines that have SMALL_REGISTER_CLASSES.
1465 As an exception, we allow combinations involving fixed regs; these are
1466 not available to the register allocator so there's no risk involved. */
1468 set = single_set (insn);
1469 if (! set)
1470 return 0;
1471 src = SET_SRC (set);
1472 dest = SET_DEST (set);
1473 if (GET_CODE (src) == SUBREG)
1474 src = SUBREG_REG (src);
1475 if (GET_CODE (dest) == SUBREG)
1476 dest = SUBREG_REG (dest);
1477 if (REG_P (src) && REG_P (dest)
1478 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1479 && ! fixed_regs[REGNO (src)])
1480 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1481 && ! fixed_regs[REGNO (dest)])))
1482 return 1;
1484 return 0;
1487 /* Try to combine the insns I1 and I2 into I3.
1488 Here I1 and I2 appear earlier than I3.
1489 I1 can be zero; then we combine just I2 into I3.
1491 If we are combining three insns and the resulting insn is not recognized,
1492 try splitting it into two insns. If that happens, I2 and I3 are retained
1493 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1494 are pseudo-deleted.
1496 Return 0 if the combination does not work. Then nothing is changed.
1497 If we did the combination, return the insn at which combine should
1498 resume scanning.
1500 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1501 new direct jump instruction. */
1503 static rtx
1504 try_combine (i3, i2, i1, new_direct_jump_p)
1505 rtx i3, i2, i1;
1506 int *new_direct_jump_p;
1508 /* New patterns for I3 and I2, respectively. */
1509 rtx newpat, newi2pat = 0;
1510 int substed_i2 = 0, substed_i1 = 0;
1511 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1512 int added_sets_1, added_sets_2;
1513 /* Total number of SETs to put into I3. */
1514 int total_sets;
1515 /* Nonzero is I2's body now appears in I3. */
1516 int i2_is_used;
1517 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1518 int insn_code_number, i2_code_number = 0, other_code_number = 0;
1519 /* Contains I3 if the destination of I3 is used in its source, which means
1520 that the old life of I3 is being killed. If that usage is placed into
1521 I2 and not in I3, a REG_DEAD note must be made. */
1522 rtx i3dest_killed = 0;
1523 /* SET_DEST and SET_SRC of I2 and I1. */
1524 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1525 /* PATTERN (I2), or a copy of it in certain cases. */
1526 rtx i2pat;
1527 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1528 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1529 int i1_feeds_i3 = 0;
1530 /* Notes that must be added to REG_NOTES in I3 and I2. */
1531 rtx new_i3_notes, new_i2_notes;
1532 /* Notes that we substituted I3 into I2 instead of the normal case. */
1533 int i3_subst_into_i2 = 0;
1534 /* Notes that I1, I2 or I3 is a MULT operation. */
1535 int have_mult = 0;
1537 int maxreg;
1538 rtx temp;
1539 rtx link;
1540 int i;
1542 /* Exit early if one of the insns involved can't be used for
1543 combinations. */
1544 if (cant_combine_insn_p (i3)
1545 || cant_combine_insn_p (i2)
1546 || (i1 && cant_combine_insn_p (i1))
1547 /* We also can't do anything if I3 has a
1548 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1549 libcall. */
1550 #if 0
1551 /* ??? This gives worse code, and appears to be unnecessary, since no
1552 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1553 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1554 #endif
1556 return 0;
1558 combine_attempts++;
1559 undobuf.other_insn = 0;
1561 /* Reset the hard register usage information. */
1562 CLEAR_HARD_REG_SET (newpat_used_regs);
1564 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1565 code below, set I1 to be the earlier of the two insns. */
1566 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1567 temp = i1, i1 = i2, i2 = temp;
1569 added_links_insn = 0;
1571 /* First check for one important special-case that the code below will
1572 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
1573 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1574 we may be able to replace that destination with the destination of I3.
1575 This occurs in the common code where we compute both a quotient and
1576 remainder into a structure, in which case we want to do the computation
1577 directly into the structure to avoid register-register copies.
1579 Note that this case handles both multiple sets in I2 and also
1580 cases where I2 has a number of CLOBBER or PARALLELs.
1582 We make very conservative checks below and only try to handle the
1583 most common cases of this. For example, we only handle the case
1584 where I2 and I3 are adjacent to avoid making difficult register
1585 usage tests. */
1587 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1588 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1589 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1590 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1591 && GET_CODE (PATTERN (i2)) == PARALLEL
1592 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1593 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1594 below would need to check what is inside (and reg_overlap_mentioned_p
1595 doesn't support those codes anyway). Don't allow those destinations;
1596 the resulting insn isn't likely to be recognized anyway. */
1597 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1598 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1599 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1600 SET_DEST (PATTERN (i3)))
1601 && next_real_insn (i2) == i3)
1603 rtx p2 = PATTERN (i2);
1605 /* Make sure that the destination of I3,
1606 which we are going to substitute into one output of I2,
1607 is not used within another output of I2. We must avoid making this:
1608 (parallel [(set (mem (reg 69)) ...)
1609 (set (reg 69) ...)])
1610 which is not well-defined as to order of actions.
1611 (Besides, reload can't handle output reloads for this.)
1613 The problem can also happen if the dest of I3 is a memory ref,
1614 if another dest in I2 is an indirect memory ref. */
1615 for (i = 0; i < XVECLEN (p2, 0); i++)
1616 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1617 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1618 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1619 SET_DEST (XVECEXP (p2, 0, i))))
1620 break;
1622 if (i == XVECLEN (p2, 0))
1623 for (i = 0; i < XVECLEN (p2, 0); i++)
1624 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1625 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1626 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1628 combine_merges++;
1630 subst_insn = i3;
1631 subst_low_cuid = INSN_CUID (i2);
1633 added_sets_2 = added_sets_1 = 0;
1634 i2dest = SET_SRC (PATTERN (i3));
1636 /* Replace the dest in I2 with our dest and make the resulting
1637 insn the new pattern for I3. Then skip to where we
1638 validate the pattern. Everything was set up above. */
1639 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1640 SET_DEST (PATTERN (i3)));
1642 newpat = p2;
1643 i3_subst_into_i2 = 1;
1644 goto validate_replacement;
1648 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1649 one of those words to another constant, merge them by making a new
1650 constant. */
1651 if (i1 == 0
1652 && (temp = single_set (i2)) != 0
1653 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1654 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1655 && GET_CODE (SET_DEST (temp)) == REG
1656 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1657 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1658 && GET_CODE (PATTERN (i3)) == SET
1659 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1660 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1661 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1662 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1663 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1665 HOST_WIDE_INT lo, hi;
1667 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1668 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1669 else
1671 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1672 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1675 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1677 /* We don't handle the case of the target word being wider
1678 than a host wide int. */
1679 if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1680 abort ();
1682 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1683 lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1684 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1686 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1687 hi = INTVAL (SET_SRC (PATTERN (i3)));
1688 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1690 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1691 >> (HOST_BITS_PER_WIDE_INT - 1));
1693 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1694 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1695 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1696 (INTVAL (SET_SRC (PATTERN (i3)))));
1697 if (hi == sign)
1698 hi = lo < 0 ? -1 : 0;
1700 else
1701 /* We don't handle the case of the higher word not fitting
1702 entirely in either hi or lo. */
1703 abort ();
1705 combine_merges++;
1706 subst_insn = i3;
1707 subst_low_cuid = INSN_CUID (i2);
1708 added_sets_2 = added_sets_1 = 0;
1709 i2dest = SET_DEST (temp);
1711 SUBST (SET_SRC (temp),
1712 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1714 newpat = PATTERN (i2);
1715 goto validate_replacement;
1718 #ifndef HAVE_cc0
1719 /* If we have no I1 and I2 looks like:
1720 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1721 (set Y OP)])
1722 make up a dummy I1 that is
1723 (set Y OP)
1724 and change I2 to be
1725 (set (reg:CC X) (compare:CC Y (const_int 0)))
1727 (We can ignore any trailing CLOBBERs.)
1729 This undoes a previous combination and allows us to match a branch-and-
1730 decrement insn. */
1732 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1733 && XVECLEN (PATTERN (i2), 0) >= 2
1734 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1735 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1736 == MODE_CC)
1737 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1738 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1739 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1740 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1741 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1742 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1744 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1745 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1746 break;
1748 if (i == 1)
1750 /* We make I1 with the same INSN_UID as I2. This gives it
1751 the same INSN_CUID for value tracking. Our fake I1 will
1752 never appear in the insn stream so giving it the same INSN_UID
1753 as I2 will not cause a problem. */
1755 subst_prev_insn = i1
1756 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1757 BLOCK_FOR_INSN (i2), INSN_SCOPE (i2),
1758 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1759 NULL_RTX);
1761 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1762 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1763 SET_DEST (PATTERN (i1)));
1766 #endif
1768 /* Verify that I2 and I1 are valid for combining. */
1769 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1770 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1772 undo_all ();
1773 return 0;
1776 /* Record whether I2DEST is used in I2SRC and similarly for the other
1777 cases. Knowing this will help in register status updating below. */
1778 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1779 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1780 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1782 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1783 in I2SRC. */
1784 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1786 /* Ensure that I3's pattern can be the destination of combines. */
1787 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1788 i1 && i2dest_in_i1src && i1_feeds_i3,
1789 &i3dest_killed))
1791 undo_all ();
1792 return 0;
1795 /* See if any of the insns is a MULT operation. Unless one is, we will
1796 reject a combination that is, since it must be slower. Be conservative
1797 here. */
1798 if (GET_CODE (i2src) == MULT
1799 || (i1 != 0 && GET_CODE (i1src) == MULT)
1800 || (GET_CODE (PATTERN (i3)) == SET
1801 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1802 have_mult = 1;
1804 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1805 We used to do this EXCEPT in one case: I3 has a post-inc in an
1806 output operand. However, that exception can give rise to insns like
1807 mov r3,(r3)+
1808 which is a famous insn on the PDP-11 where the value of r3 used as the
1809 source was model-dependent. Avoid this sort of thing. */
1811 #if 0
1812 if (!(GET_CODE (PATTERN (i3)) == SET
1813 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1814 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1815 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1816 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1817 /* It's not the exception. */
1818 #endif
1819 #ifdef AUTO_INC_DEC
1820 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1821 if (REG_NOTE_KIND (link) == REG_INC
1822 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1823 || (i1 != 0
1824 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1826 undo_all ();
1827 return 0;
1829 #endif
1831 /* See if the SETs in I1 or I2 need to be kept around in the merged
1832 instruction: whenever the value set there is still needed past I3.
1833 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1835 For the SET in I1, we have two cases: If I1 and I2 independently
1836 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1837 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1838 in I1 needs to be kept around unless I1DEST dies or is set in either
1839 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1840 I1DEST. If so, we know I1 feeds into I2. */
1842 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1844 added_sets_1
1845 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1846 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1848 /* If the set in I2 needs to be kept around, we must make a copy of
1849 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1850 PATTERN (I2), we are only substituting for the original I1DEST, not into
1851 an already-substituted copy. This also prevents making self-referential
1852 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1853 I2DEST. */
1855 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1856 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1857 : PATTERN (i2));
1859 if (added_sets_2)
1860 i2pat = copy_rtx (i2pat);
1862 combine_merges++;
1864 /* Substitute in the latest insn for the regs set by the earlier ones. */
1866 maxreg = max_reg_num ();
1868 subst_insn = i3;
1870 /* It is possible that the source of I2 or I1 may be performing an
1871 unneeded operation, such as a ZERO_EXTEND of something that is known
1872 to have the high part zero. Handle that case by letting subst look at
1873 the innermost one of them.
1875 Another way to do this would be to have a function that tries to
1876 simplify a single insn instead of merging two or more insns. We don't
1877 do this because of the potential of infinite loops and because
1878 of the potential extra memory required. However, doing it the way
1879 we are is a bit of a kludge and doesn't catch all cases.
1881 But only do this if -fexpensive-optimizations since it slows things down
1882 and doesn't usually win. */
1884 if (flag_expensive_optimizations)
1886 /* Pass pc_rtx so no substitutions are done, just simplifications.
1887 The cases that we are interested in here do not involve the few
1888 cases were is_replaced is checked. */
1889 if (i1)
1891 subst_low_cuid = INSN_CUID (i1);
1892 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1894 else
1896 subst_low_cuid = INSN_CUID (i2);
1897 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1901 #ifndef HAVE_cc0
1902 /* Many machines that don't use CC0 have insns that can both perform an
1903 arithmetic operation and set the condition code. These operations will
1904 be represented as a PARALLEL with the first element of the vector
1905 being a COMPARE of an arithmetic operation with the constant zero.
1906 The second element of the vector will set some pseudo to the result
1907 of the same arithmetic operation. If we simplify the COMPARE, we won't
1908 match such a pattern and so will generate an extra insn. Here we test
1909 for this case, where both the comparison and the operation result are
1910 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1911 I2SRC. Later we will make the PARALLEL that contains I2. */
1913 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1914 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1915 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1916 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1918 #ifdef EXTRA_CC_MODES
1919 rtx *cc_use;
1920 enum machine_mode compare_mode;
1921 #endif
1923 newpat = PATTERN (i3);
1924 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1926 i2_is_used = 1;
1928 #ifdef EXTRA_CC_MODES
1929 /* See if a COMPARE with the operand we substituted in should be done
1930 with the mode that is currently being used. If not, do the same
1931 processing we do in `subst' for a SET; namely, if the destination
1932 is used only once, try to replace it with a register of the proper
1933 mode and also replace the COMPARE. */
1934 if (undobuf.other_insn == 0
1935 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1936 &undobuf.other_insn))
1937 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1938 i2src, const0_rtx))
1939 != GET_MODE (SET_DEST (newpat))))
1941 unsigned int regno = REGNO (SET_DEST (newpat));
1942 rtx new_dest = gen_rtx_REG (compare_mode, regno);
1944 if (regno < FIRST_PSEUDO_REGISTER
1945 || (REG_N_SETS (regno) == 1 && ! added_sets_2
1946 && ! REG_USERVAR_P (SET_DEST (newpat))))
1948 if (regno >= FIRST_PSEUDO_REGISTER)
1949 SUBST (regno_reg_rtx[regno], new_dest);
1951 SUBST (SET_DEST (newpat), new_dest);
1952 SUBST (XEXP (*cc_use, 0), new_dest);
1953 SUBST (SET_SRC (newpat),
1954 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1956 else
1957 undobuf.other_insn = 0;
1959 #endif
1961 else
1962 #endif
1964 n_occurrences = 0; /* `subst' counts here */
1966 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1967 need to make a unique copy of I2SRC each time we substitute it
1968 to avoid self-referential rtl. */
1970 subst_low_cuid = INSN_CUID (i2);
1971 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1972 ! i1_feeds_i3 && i1dest_in_i1src);
1973 substed_i2 = 1;
1975 /* Record whether i2's body now appears within i3's body. */
1976 i2_is_used = n_occurrences;
1979 /* If we already got a failure, don't try to do more. Otherwise,
1980 try to substitute in I1 if we have it. */
1982 if (i1 && GET_CODE (newpat) != CLOBBER)
1984 /* Before we can do this substitution, we must redo the test done
1985 above (see detailed comments there) that ensures that I1DEST
1986 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1988 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1989 0, (rtx*) 0))
1991 undo_all ();
1992 return 0;
1995 n_occurrences = 0;
1996 subst_low_cuid = INSN_CUID (i1);
1997 newpat = subst (newpat, i1dest, i1src, 0, 0);
1998 substed_i1 = 1;
2001 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2002 to count all the ways that I2SRC and I1SRC can be used. */
2003 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2004 && i2_is_used + added_sets_2 > 1)
2005 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2006 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2007 > 1))
2008 /* Fail if we tried to make a new register (we used to abort, but there's
2009 really no reason to). */
2010 || max_reg_num () != maxreg
2011 /* Fail if we couldn't do something and have a CLOBBER. */
2012 || GET_CODE (newpat) == CLOBBER
2013 /* Fail if this new pattern is a MULT and we didn't have one before
2014 at the outer level. */
2015 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2016 && ! have_mult))
2018 undo_all ();
2019 return 0;
2022 /* If the actions of the earlier insns must be kept
2023 in addition to substituting them into the latest one,
2024 we must make a new PARALLEL for the latest insn
2025 to hold additional the SETs. */
2027 if (added_sets_1 || added_sets_2)
2029 combine_extras++;
2031 if (GET_CODE (newpat) == PARALLEL)
2033 rtvec old = XVEC (newpat, 0);
2034 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2035 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2036 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2037 sizeof (old->elem[0]) * old->num_elem);
2039 else
2041 rtx old = newpat;
2042 total_sets = 1 + added_sets_1 + added_sets_2;
2043 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2044 XVECEXP (newpat, 0, 0) = old;
2047 if (added_sets_1)
2048 XVECEXP (newpat, 0, --total_sets)
2049 = (GET_CODE (PATTERN (i1)) == PARALLEL
2050 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2052 if (added_sets_2)
2054 /* If there is no I1, use I2's body as is. We used to also not do
2055 the subst call below if I2 was substituted into I3,
2056 but that could lose a simplification. */
2057 if (i1 == 0)
2058 XVECEXP (newpat, 0, --total_sets) = i2pat;
2059 else
2060 /* See comment where i2pat is assigned. */
2061 XVECEXP (newpat, 0, --total_sets)
2062 = subst (i2pat, i1dest, i1src, 0, 0);
2066 /* We come here when we are replacing a destination in I2 with the
2067 destination of I3. */
2068 validate_replacement:
2070 /* Note which hard regs this insn has as inputs. */
2071 mark_used_regs_combine (newpat);
2073 /* Is the result of combination a valid instruction? */
2074 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2076 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2077 the second SET's destination is a register that is unused. In that case,
2078 we just need the first SET. This can occur when simplifying a divmod
2079 insn. We *must* test for this case here because the code below that
2080 splits two independent SETs doesn't handle this case correctly when it
2081 updates the register status. Also check the case where the first
2082 SET's destination is unused. That would not cause incorrect code, but
2083 does cause an unneeded insn to remain. */
2085 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2086 && XVECLEN (newpat, 0) == 2
2087 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2088 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2089 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2090 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2091 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2092 && asm_noperands (newpat) < 0)
2094 newpat = XVECEXP (newpat, 0, 0);
2095 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2098 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2099 && XVECLEN (newpat, 0) == 2
2100 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2101 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2102 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2103 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2104 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2105 && asm_noperands (newpat) < 0)
2107 newpat = XVECEXP (newpat, 0, 1);
2108 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2111 /* If we were combining three insns and the result is a simple SET
2112 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2113 insns. There are two ways to do this. It can be split using a
2114 machine-specific method (like when you have an addition of a large
2115 constant) or by combine in the function find_split_point. */
2117 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2118 && asm_noperands (newpat) < 0)
2120 rtx m_split, *split;
2121 rtx ni2dest = i2dest;
2123 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2124 use I2DEST as a scratch register will help. In the latter case,
2125 convert I2DEST to the mode of the source of NEWPAT if we can. */
2127 m_split = split_insns (newpat, i3);
2129 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2130 inputs of NEWPAT. */
2132 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2133 possible to try that as a scratch reg. This would require adding
2134 more code to make it work though. */
2136 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2138 /* If I2DEST is a hard register or the only use of a pseudo,
2139 we can change its mode. */
2140 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2141 && GET_MODE (SET_DEST (newpat)) != VOIDmode
2142 && GET_CODE (i2dest) == REG
2143 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2144 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2145 && ! REG_USERVAR_P (i2dest))))
2146 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2147 REGNO (i2dest));
2149 m_split = split_insns (gen_rtx_PARALLEL
2150 (VOIDmode,
2151 gen_rtvec (2, newpat,
2152 gen_rtx_CLOBBER (VOIDmode,
2153 ni2dest))),
2154 i3);
2155 /* If the split with the mode-changed register didn't work, try
2156 the original register. */
2157 if (! m_split && ni2dest != i2dest)
2159 ni2dest = i2dest;
2160 m_split = split_insns (gen_rtx_PARALLEL
2161 (VOIDmode,
2162 gen_rtvec (2, newpat,
2163 gen_rtx_CLOBBER (VOIDmode,
2164 i2dest))),
2165 i3);
2169 if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2171 m_split = PATTERN (m_split);
2172 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2173 if (insn_code_number >= 0)
2174 newpat = m_split;
2176 else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2177 && (next_real_insn (i2) == i3
2178 || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2180 rtx i2set, i3set;
2181 rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2182 newi2pat = PATTERN (m_split);
2184 i3set = single_set (NEXT_INSN (m_split));
2185 i2set = single_set (m_split);
2187 /* In case we changed the mode of I2DEST, replace it in the
2188 pseudo-register table here. We can't do it above in case this
2189 code doesn't get executed and we do a split the other way. */
2191 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2192 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2194 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2196 /* If I2 or I3 has multiple SETs, we won't know how to track
2197 register status, so don't use these insns. If I2's destination
2198 is used between I2 and I3, we also can't use these insns. */
2200 if (i2_code_number >= 0 && i2set && i3set
2201 && (next_real_insn (i2) == i3
2202 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2203 insn_code_number = recog_for_combine (&newi3pat, i3,
2204 &new_i3_notes);
2205 if (insn_code_number >= 0)
2206 newpat = newi3pat;
2208 /* It is possible that both insns now set the destination of I3.
2209 If so, we must show an extra use of it. */
2211 if (insn_code_number >= 0)
2213 rtx new_i3_dest = SET_DEST (i3set);
2214 rtx new_i2_dest = SET_DEST (i2set);
2216 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2217 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2218 || GET_CODE (new_i3_dest) == SUBREG)
2219 new_i3_dest = XEXP (new_i3_dest, 0);
2221 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2222 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2223 || GET_CODE (new_i2_dest) == SUBREG)
2224 new_i2_dest = XEXP (new_i2_dest, 0);
2226 if (GET_CODE (new_i3_dest) == REG
2227 && GET_CODE (new_i2_dest) == REG
2228 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2229 REG_N_SETS (REGNO (new_i2_dest))++;
2233 /* If we can split it and use I2DEST, go ahead and see if that
2234 helps things be recognized. Verify that none of the registers
2235 are set between I2 and I3. */
2236 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2237 #ifdef HAVE_cc0
2238 && GET_CODE (i2dest) == REG
2239 #endif
2240 /* We need I2DEST in the proper mode. If it is a hard register
2241 or the only use of a pseudo, we can change its mode. */
2242 && (GET_MODE (*split) == GET_MODE (i2dest)
2243 || GET_MODE (*split) == VOIDmode
2244 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2245 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2246 && ! REG_USERVAR_P (i2dest)))
2247 && (next_real_insn (i2) == i3
2248 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2249 /* We can't overwrite I2DEST if its value is still used by
2250 NEWPAT. */
2251 && ! reg_referenced_p (i2dest, newpat))
2253 rtx newdest = i2dest;
2254 enum rtx_code split_code = GET_CODE (*split);
2255 enum machine_mode split_mode = GET_MODE (*split);
2257 /* Get NEWDEST as a register in the proper mode. We have already
2258 validated that we can do this. */
2259 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2261 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2263 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2264 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2267 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2268 an ASHIFT. This can occur if it was inside a PLUS and hence
2269 appeared to be a memory address. This is a kludge. */
2270 if (split_code == MULT
2271 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2272 && INTVAL (XEXP (*split, 1)) > 0
2273 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2275 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2276 XEXP (*split, 0), GEN_INT (i)));
2277 /* Update split_code because we may not have a multiply
2278 anymore. */
2279 split_code = GET_CODE (*split);
2282 #ifdef INSN_SCHEDULING
2283 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2284 be written as a ZERO_EXTEND. */
2285 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2287 #ifdef LOAD_EXTEND_OP
2288 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2289 what it really is. */
2290 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2291 == SIGN_EXTEND)
2292 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2293 SUBREG_REG (*split)));
2294 else
2295 #endif
2296 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2297 SUBREG_REG (*split)));
2299 #endif
2301 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2302 SUBST (*split, newdest);
2303 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2305 /* If the split point was a MULT and we didn't have one before,
2306 don't use one now. */
2307 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2308 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2312 /* Check for a case where we loaded from memory in a narrow mode and
2313 then sign extended it, but we need both registers. In that case,
2314 we have a PARALLEL with both loads from the same memory location.
2315 We can split this into a load from memory followed by a register-register
2316 copy. This saves at least one insn, more if register allocation can
2317 eliminate the copy.
2319 We cannot do this if the destination of the second assignment is
2320 a register that we have already assumed is zero-extended. Similarly
2321 for a SUBREG of such a register. */
2323 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2324 && GET_CODE (newpat) == PARALLEL
2325 && XVECLEN (newpat, 0) == 2
2326 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2327 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2328 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2329 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2330 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2331 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2332 INSN_CUID (i2))
2333 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2334 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2335 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2336 (GET_CODE (temp) == REG
2337 && reg_nonzero_bits[REGNO (temp)] != 0
2338 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2339 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2340 && (reg_nonzero_bits[REGNO (temp)]
2341 != GET_MODE_MASK (word_mode))))
2342 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2343 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2344 (GET_CODE (temp) == REG
2345 && reg_nonzero_bits[REGNO (temp)] != 0
2346 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2347 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2348 && (reg_nonzero_bits[REGNO (temp)]
2349 != GET_MODE_MASK (word_mode)))))
2350 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2351 SET_SRC (XVECEXP (newpat, 0, 1)))
2352 && ! find_reg_note (i3, REG_UNUSED,
2353 SET_DEST (XVECEXP (newpat, 0, 0))))
2355 rtx ni2dest;
2357 newi2pat = XVECEXP (newpat, 0, 0);
2358 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2359 newpat = XVECEXP (newpat, 0, 1);
2360 SUBST (SET_SRC (newpat),
2361 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2362 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2364 if (i2_code_number >= 0)
2365 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2367 if (insn_code_number >= 0)
2369 rtx insn;
2370 rtx link;
2372 /* If we will be able to accept this, we have made a change to the
2373 destination of I3. This can invalidate a LOG_LINKS pointing
2374 to I3. No other part of combine.c makes such a transformation.
2376 The new I3 will have a destination that was previously the
2377 destination of I1 or I2 and which was used in i2 or I3. Call
2378 distribute_links to make a LOG_LINK from the next use of
2379 that destination. */
2381 PATTERN (i3) = newpat;
2382 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2384 /* I3 now uses what used to be its destination and which is
2385 now I2's destination. That means we need a LOG_LINK from
2386 I3 to I2. But we used to have one, so we still will.
2388 However, some later insn might be using I2's dest and have
2389 a LOG_LINK pointing at I3. We must remove this link.
2390 The simplest way to remove the link is to point it at I1,
2391 which we know will be a NOTE. */
2393 for (insn = NEXT_INSN (i3);
2394 insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2395 || insn != this_basic_block->next_bb->head);
2396 insn = NEXT_INSN (insn))
2398 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2400 for (link = LOG_LINKS (insn); link;
2401 link = XEXP (link, 1))
2402 if (XEXP (link, 0) == i3)
2403 XEXP (link, 0) = i1;
2405 break;
2411 /* Similarly, check for a case where we have a PARALLEL of two independent
2412 SETs but we started with three insns. In this case, we can do the sets
2413 as two separate insns. This case occurs when some SET allows two
2414 other insns to combine, but the destination of that SET is still live. */
2416 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2417 && GET_CODE (newpat) == PARALLEL
2418 && XVECLEN (newpat, 0) == 2
2419 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2420 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2421 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2422 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2423 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2424 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2425 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2426 INSN_CUID (i2))
2427 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2428 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2429 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2430 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2431 XVECEXP (newpat, 0, 0))
2432 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2433 XVECEXP (newpat, 0, 1))
2434 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2435 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2437 /* Normally, it doesn't matter which of the two is done first,
2438 but it does if one references cc0. In that case, it has to
2439 be first. */
2440 #ifdef HAVE_cc0
2441 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2443 newi2pat = XVECEXP (newpat, 0, 0);
2444 newpat = XVECEXP (newpat, 0, 1);
2446 else
2447 #endif
2449 newi2pat = XVECEXP (newpat, 0, 1);
2450 newpat = XVECEXP (newpat, 0, 0);
2453 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2455 if (i2_code_number >= 0)
2456 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2459 /* If it still isn't recognized, fail and change things back the way they
2460 were. */
2461 if ((insn_code_number < 0
2462 /* Is the result a reasonable ASM_OPERANDS? */
2463 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2465 undo_all ();
2466 return 0;
2469 /* If we had to change another insn, make sure it is valid also. */
2470 if (undobuf.other_insn)
2472 rtx other_pat = PATTERN (undobuf.other_insn);
2473 rtx new_other_notes;
2474 rtx note, next;
2476 CLEAR_HARD_REG_SET (newpat_used_regs);
2478 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2479 &new_other_notes);
2481 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2483 undo_all ();
2484 return 0;
2487 PATTERN (undobuf.other_insn) = other_pat;
2489 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2490 are still valid. Then add any non-duplicate notes added by
2491 recog_for_combine. */
2492 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2494 next = XEXP (note, 1);
2496 if (REG_NOTE_KIND (note) == REG_UNUSED
2497 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2499 if (GET_CODE (XEXP (note, 0)) == REG)
2500 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2502 remove_note (undobuf.other_insn, note);
2506 for (note = new_other_notes; note; note = XEXP (note, 1))
2507 if (GET_CODE (XEXP (note, 0)) == REG)
2508 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2510 distribute_notes (new_other_notes, undobuf.other_insn,
2511 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2513 #ifdef HAVE_cc0
2514 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2515 they are adjacent to each other or not. */
2517 rtx p = prev_nonnote_insn (i3);
2518 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2519 && sets_cc0_p (newi2pat))
2521 undo_all ();
2522 return 0;
2525 #endif
2527 /* We now know that we can do this combination. Merge the insns and
2528 update the status of registers and LOG_LINKS. */
2531 rtx i3notes, i2notes, i1notes = 0;
2532 rtx i3links, i2links, i1links = 0;
2533 rtx midnotes = 0;
2534 unsigned int regno;
2535 /* Compute which registers we expect to eliminate. newi2pat may be setting
2536 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2537 same as i3dest, in which case newi2pat may be setting i1dest. */
2538 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2539 || i2dest_in_i2src || i2dest_in_i1src
2540 ? 0 : i2dest);
2541 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2542 || (newi2pat && reg_set_p (i1dest, newi2pat))
2543 ? 0 : i1dest);
2545 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2546 clear them. */
2547 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2548 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2549 if (i1)
2550 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2552 /* Ensure that we do not have something that should not be shared but
2553 occurs multiple times in the new insns. Check this by first
2554 resetting all the `used' flags and then copying anything is shared. */
2556 reset_used_flags (i3notes);
2557 reset_used_flags (i2notes);
2558 reset_used_flags (i1notes);
2559 reset_used_flags (newpat);
2560 reset_used_flags (newi2pat);
2561 if (undobuf.other_insn)
2562 reset_used_flags (PATTERN (undobuf.other_insn));
2564 i3notes = copy_rtx_if_shared (i3notes);
2565 i2notes = copy_rtx_if_shared (i2notes);
2566 i1notes = copy_rtx_if_shared (i1notes);
2567 newpat = copy_rtx_if_shared (newpat);
2568 newi2pat = copy_rtx_if_shared (newi2pat);
2569 if (undobuf.other_insn)
2570 reset_used_flags (PATTERN (undobuf.other_insn));
2572 INSN_CODE (i3) = insn_code_number;
2573 PATTERN (i3) = newpat;
2575 if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2577 rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2579 reset_used_flags (call_usage);
2580 call_usage = copy_rtx (call_usage);
2582 if (substed_i2)
2583 replace_rtx (call_usage, i2dest, i2src);
2585 if (substed_i1)
2586 replace_rtx (call_usage, i1dest, i1src);
2588 CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2591 if (undobuf.other_insn)
2592 INSN_CODE (undobuf.other_insn) = other_code_number;
2594 /* We had one special case above where I2 had more than one set and
2595 we replaced a destination of one of those sets with the destination
2596 of I3. In that case, we have to update LOG_LINKS of insns later
2597 in this basic block. Note that this (expensive) case is rare.
2599 Also, in this case, we must pretend that all REG_NOTEs for I2
2600 actually came from I3, so that REG_UNUSED notes from I2 will be
2601 properly handled. */
2603 if (i3_subst_into_i2)
2605 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2606 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2607 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2608 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2609 && ! find_reg_note (i2, REG_UNUSED,
2610 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2611 for (temp = NEXT_INSN (i2);
2612 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2613 || this_basic_block->head != temp);
2614 temp = NEXT_INSN (temp))
2615 if (temp != i3 && INSN_P (temp))
2616 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2617 if (XEXP (link, 0) == i2)
2618 XEXP (link, 0) = i3;
2620 if (i3notes)
2622 rtx link = i3notes;
2623 while (XEXP (link, 1))
2624 link = XEXP (link, 1);
2625 XEXP (link, 1) = i2notes;
2627 else
2628 i3notes = i2notes;
2629 i2notes = 0;
2632 LOG_LINKS (i3) = 0;
2633 REG_NOTES (i3) = 0;
2634 LOG_LINKS (i2) = 0;
2635 REG_NOTES (i2) = 0;
2637 if (newi2pat)
2639 INSN_CODE (i2) = i2_code_number;
2640 PATTERN (i2) = newi2pat;
2642 else
2644 PUT_CODE (i2, NOTE);
2645 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2646 NOTE_SOURCE_FILE (i2) = 0;
2649 if (i1)
2651 LOG_LINKS (i1) = 0;
2652 REG_NOTES (i1) = 0;
2653 PUT_CODE (i1, NOTE);
2654 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2655 NOTE_SOURCE_FILE (i1) = 0;
2658 /* Get death notes for everything that is now used in either I3 or
2659 I2 and used to die in a previous insn. If we built two new
2660 patterns, move from I1 to I2 then I2 to I3 so that we get the
2661 proper movement on registers that I2 modifies. */
2663 if (newi2pat)
2665 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2666 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2668 else
2669 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2670 i3, &midnotes);
2672 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2673 if (i3notes)
2674 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2675 elim_i2, elim_i1);
2676 if (i2notes)
2677 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2678 elim_i2, elim_i1);
2679 if (i1notes)
2680 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2681 elim_i2, elim_i1);
2682 if (midnotes)
2683 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2684 elim_i2, elim_i1);
2686 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2687 know these are REG_UNUSED and want them to go to the desired insn,
2688 so we always pass it as i3. We have not counted the notes in
2689 reg_n_deaths yet, so we need to do so now. */
2691 if (newi2pat && new_i2_notes)
2693 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2694 if (GET_CODE (XEXP (temp, 0)) == REG)
2695 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2697 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2700 if (new_i3_notes)
2702 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2703 if (GET_CODE (XEXP (temp, 0)) == REG)
2704 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2706 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2709 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2710 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2711 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2712 in that case, it might delete I2. Similarly for I2 and I1.
2713 Show an additional death due to the REG_DEAD note we make here. If
2714 we discard it in distribute_notes, we will decrement it again. */
2716 if (i3dest_killed)
2718 if (GET_CODE (i3dest_killed) == REG)
2719 REG_N_DEATHS (REGNO (i3dest_killed))++;
2721 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2722 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2723 NULL_RTX),
2724 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2725 else
2726 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2727 NULL_RTX),
2728 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2729 elim_i2, elim_i1);
2732 if (i2dest_in_i2src)
2734 if (GET_CODE (i2dest) == REG)
2735 REG_N_DEATHS (REGNO (i2dest))++;
2737 if (newi2pat && reg_set_p (i2dest, newi2pat))
2738 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2739 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2740 else
2741 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2742 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2743 NULL_RTX, NULL_RTX);
2746 if (i1dest_in_i1src)
2748 if (GET_CODE (i1dest) == REG)
2749 REG_N_DEATHS (REGNO (i1dest))++;
2751 if (newi2pat && reg_set_p (i1dest, newi2pat))
2752 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2753 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2754 else
2755 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2756 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2757 NULL_RTX, NULL_RTX);
2760 distribute_links (i3links);
2761 distribute_links (i2links);
2762 distribute_links (i1links);
2764 if (GET_CODE (i2dest) == REG)
2766 rtx link;
2767 rtx i2_insn = 0, i2_val = 0, set;
2769 /* The insn that used to set this register doesn't exist, and
2770 this life of the register may not exist either. See if one of
2771 I3's links points to an insn that sets I2DEST. If it does,
2772 that is now the last known value for I2DEST. If we don't update
2773 this and I2 set the register to a value that depended on its old
2774 contents, we will get confused. If this insn is used, thing
2775 will be set correctly in combine_instructions. */
2777 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2778 if ((set = single_set (XEXP (link, 0))) != 0
2779 && rtx_equal_p (i2dest, SET_DEST (set)))
2780 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2782 record_value_for_reg (i2dest, i2_insn, i2_val);
2784 /* If the reg formerly set in I2 died only once and that was in I3,
2785 zero its use count so it won't make `reload' do any work. */
2786 if (! added_sets_2
2787 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2788 && ! i2dest_in_i2src)
2790 regno = REGNO (i2dest);
2791 REG_N_SETS (regno)--;
2795 if (i1 && GET_CODE (i1dest) == REG)
2797 rtx link;
2798 rtx i1_insn = 0, i1_val = 0, set;
2800 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2801 if ((set = single_set (XEXP (link, 0))) != 0
2802 && rtx_equal_p (i1dest, SET_DEST (set)))
2803 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2805 record_value_for_reg (i1dest, i1_insn, i1_val);
2807 regno = REGNO (i1dest);
2808 if (! added_sets_1 && ! i1dest_in_i1src)
2809 REG_N_SETS (regno)--;
2812 /* Update reg_nonzero_bits et al for any changes that may have been made
2813 to this insn. The order of set_nonzero_bits_and_sign_copies() is
2814 important. Because newi2pat can affect nonzero_bits of newpat */
2815 if (newi2pat)
2816 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2817 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2819 /* Set new_direct_jump_p if a new return or simple jump instruction
2820 has been created.
2822 If I3 is now an unconditional jump, ensure that it has a
2823 BARRIER following it since it may have initially been a
2824 conditional jump. It may also be the last nonnote insn. */
2826 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
2828 *new_direct_jump_p = 1;
2830 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2831 || GET_CODE (temp) != BARRIER)
2832 emit_barrier_after (i3);
2834 /* An NOOP jump does not need barrier, but it does need cleaning up
2835 of CFG. */
2836 if (GET_CODE (newpat) == SET
2837 && SET_SRC (newpat) == pc_rtx
2838 && SET_DEST (newpat) == pc_rtx)
2839 *new_direct_jump_p = 1;
2842 combine_successes++;
2843 undo_commit ();
2845 /* Clear this here, so that subsequent get_last_value calls are not
2846 affected. */
2847 subst_prev_insn = NULL_RTX;
2849 if (added_links_insn
2850 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2851 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2852 return added_links_insn;
2853 else
2854 return newi2pat ? i2 : i3;
2857 /* Undo all the modifications recorded in undobuf. */
2859 static void
2860 undo_all ()
2862 struct undo *undo, *next;
2864 for (undo = undobuf.undos; undo; undo = next)
2866 next = undo->next;
2867 if (undo->is_int)
2868 *undo->where.i = undo->old_contents.i;
2869 else
2870 *undo->where.r = undo->old_contents.r;
2872 undo->next = undobuf.frees;
2873 undobuf.frees = undo;
2876 undobuf.undos = 0;
2878 /* Clear this here, so that subsequent get_last_value calls are not
2879 affected. */
2880 subst_prev_insn = NULL_RTX;
2883 /* We've committed to accepting the changes we made. Move all
2884 of the undos to the free list. */
2886 static void
2887 undo_commit ()
2889 struct undo *undo, *next;
2891 for (undo = undobuf.undos; undo; undo = next)
2893 next = undo->next;
2894 undo->next = undobuf.frees;
2895 undobuf.frees = undo;
2897 undobuf.undos = 0;
2901 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2902 where we have an arithmetic expression and return that point. LOC will
2903 be inside INSN.
2905 try_combine will call this function to see if an insn can be split into
2906 two insns. */
2908 static rtx *
2909 find_split_point (loc, insn)
2910 rtx *loc;
2911 rtx insn;
2913 rtx x = *loc;
2914 enum rtx_code code = GET_CODE (x);
2915 rtx *split;
2916 unsigned HOST_WIDE_INT len = 0;
2917 HOST_WIDE_INT pos = 0;
2918 int unsignedp = 0;
2919 rtx inner = NULL_RTX;
2921 /* First special-case some codes. */
2922 switch (code)
2924 case SUBREG:
2925 #ifdef INSN_SCHEDULING
2926 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2927 point. */
2928 if (GET_CODE (SUBREG_REG (x)) == MEM)
2929 return loc;
2930 #endif
2931 return find_split_point (&SUBREG_REG (x), insn);
2933 case MEM:
2934 #ifdef HAVE_lo_sum
2935 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2936 using LO_SUM and HIGH. */
2937 if (GET_CODE (XEXP (x, 0)) == CONST
2938 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2940 SUBST (XEXP (x, 0),
2941 gen_rtx_LO_SUM (Pmode,
2942 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2943 XEXP (x, 0)));
2944 return &XEXP (XEXP (x, 0), 0);
2946 #endif
2948 /* If we have a PLUS whose second operand is a constant and the
2949 address is not valid, perhaps will can split it up using
2950 the machine-specific way to split large constants. We use
2951 the first pseudo-reg (one of the virtual regs) as a placeholder;
2952 it will not remain in the result. */
2953 if (GET_CODE (XEXP (x, 0)) == PLUS
2954 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2955 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2957 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2958 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2959 subst_insn);
2961 /* This should have produced two insns, each of which sets our
2962 placeholder. If the source of the second is a valid address,
2963 we can make put both sources together and make a split point
2964 in the middle. */
2966 if (seq
2967 && NEXT_INSN (seq) != NULL_RTX
2968 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
2969 && GET_CODE (seq) == INSN
2970 && GET_CODE (PATTERN (seq)) == SET
2971 && SET_DEST (PATTERN (seq)) == reg
2972 && ! reg_mentioned_p (reg,
2973 SET_SRC (PATTERN (seq)))
2974 && GET_CODE (NEXT_INSN (seq)) == INSN
2975 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
2976 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
2977 && memory_address_p (GET_MODE (x),
2978 SET_SRC (PATTERN (NEXT_INSN (seq)))))
2980 rtx src1 = SET_SRC (PATTERN (seq));
2981 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
2983 /* Replace the placeholder in SRC2 with SRC1. If we can
2984 find where in SRC2 it was placed, that can become our
2985 split point and we can replace this address with SRC2.
2986 Just try two obvious places. */
2988 src2 = replace_rtx (src2, reg, src1);
2989 split = 0;
2990 if (XEXP (src2, 0) == src1)
2991 split = &XEXP (src2, 0);
2992 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2993 && XEXP (XEXP (src2, 0), 0) == src1)
2994 split = &XEXP (XEXP (src2, 0), 0);
2996 if (split)
2998 SUBST (XEXP (x, 0), src2);
2999 return split;
3003 /* If that didn't work, perhaps the first operand is complex and
3004 needs to be computed separately, so make a split point there.
3005 This will occur on machines that just support REG + CONST
3006 and have a constant moved through some previous computation. */
3008 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
3009 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3010 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
3011 == 'o')))
3012 return &XEXP (XEXP (x, 0), 0);
3014 break;
3016 case SET:
3017 #ifdef HAVE_cc0
3018 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3019 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3020 we need to put the operand into a register. So split at that
3021 point. */
3023 if (SET_DEST (x) == cc0_rtx
3024 && GET_CODE (SET_SRC (x)) != COMPARE
3025 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3026 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
3027 && ! (GET_CODE (SET_SRC (x)) == SUBREG
3028 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
3029 return &SET_SRC (x);
3030 #endif
3032 /* See if we can split SET_SRC as it stands. */
3033 split = find_split_point (&SET_SRC (x), insn);
3034 if (split && split != &SET_SRC (x))
3035 return split;
3037 /* See if we can split SET_DEST as it stands. */
3038 split = find_split_point (&SET_DEST (x), insn);
3039 if (split && split != &SET_DEST (x))
3040 return split;
3042 /* See if this is a bitfield assignment with everything constant. If
3043 so, this is an IOR of an AND, so split it into that. */
3044 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3045 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3046 <= HOST_BITS_PER_WIDE_INT)
3047 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3048 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3049 && GET_CODE (SET_SRC (x)) == CONST_INT
3050 && ((INTVAL (XEXP (SET_DEST (x), 1))
3051 + INTVAL (XEXP (SET_DEST (x), 2)))
3052 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3053 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3055 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3056 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3057 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3058 rtx dest = XEXP (SET_DEST (x), 0);
3059 enum machine_mode mode = GET_MODE (dest);
3060 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3062 if (BITS_BIG_ENDIAN)
3063 pos = GET_MODE_BITSIZE (mode) - len - pos;
3065 if (src == mask)
3066 SUBST (SET_SRC (x),
3067 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3068 else
3069 SUBST (SET_SRC (x),
3070 gen_binary (IOR, mode,
3071 gen_binary (AND, mode, dest,
3072 gen_int_mode (~(mask << pos),
3073 mode)),
3074 GEN_INT (src << pos)));
3076 SUBST (SET_DEST (x), dest);
3078 split = find_split_point (&SET_SRC (x), insn);
3079 if (split && split != &SET_SRC (x))
3080 return split;
3083 /* Otherwise, see if this is an operation that we can split into two.
3084 If so, try to split that. */
3085 code = GET_CODE (SET_SRC (x));
3087 switch (code)
3089 case AND:
3090 /* If we are AND'ing with a large constant that is only a single
3091 bit and the result is only being used in a context where we
3092 need to know if it is zero or non-zero, replace it with a bit
3093 extraction. This will avoid the large constant, which might
3094 have taken more than one insn to make. If the constant were
3095 not a valid argument to the AND but took only one insn to make,
3096 this is no worse, but if it took more than one insn, it will
3097 be better. */
3099 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3100 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3101 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3102 && GET_CODE (SET_DEST (x)) == REG
3103 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3104 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3105 && XEXP (*split, 0) == SET_DEST (x)
3106 && XEXP (*split, 1) == const0_rtx)
3108 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3109 XEXP (SET_SRC (x), 0),
3110 pos, NULL_RTX, 1, 1, 0, 0);
3111 if (extraction != 0)
3113 SUBST (SET_SRC (x), extraction);
3114 return find_split_point (loc, insn);
3117 break;
3119 case NE:
3120 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3121 is known to be on, this can be converted into a NEG of a shift. */
3122 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3123 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3124 && 1 <= (pos = exact_log2
3125 (nonzero_bits (XEXP (SET_SRC (x), 0),
3126 GET_MODE (XEXP (SET_SRC (x), 0))))))
3128 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3130 SUBST (SET_SRC (x),
3131 gen_rtx_NEG (mode,
3132 gen_rtx_LSHIFTRT (mode,
3133 XEXP (SET_SRC (x), 0),
3134 GEN_INT (pos))));
3136 split = find_split_point (&SET_SRC (x), insn);
3137 if (split && split != &SET_SRC (x))
3138 return split;
3140 break;
3142 case SIGN_EXTEND:
3143 inner = XEXP (SET_SRC (x), 0);
3145 /* We can't optimize if either mode is a partial integer
3146 mode as we don't know how many bits are significant
3147 in those modes. */
3148 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3149 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3150 break;
3152 pos = 0;
3153 len = GET_MODE_BITSIZE (GET_MODE (inner));
3154 unsignedp = 0;
3155 break;
3157 case SIGN_EXTRACT:
3158 case ZERO_EXTRACT:
3159 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3160 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3162 inner = XEXP (SET_SRC (x), 0);
3163 len = INTVAL (XEXP (SET_SRC (x), 1));
3164 pos = INTVAL (XEXP (SET_SRC (x), 2));
3166 if (BITS_BIG_ENDIAN)
3167 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3168 unsignedp = (code == ZERO_EXTRACT);
3170 break;
3172 default:
3173 break;
3176 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3178 enum machine_mode mode = GET_MODE (SET_SRC (x));
3180 /* For unsigned, we have a choice of a shift followed by an
3181 AND or two shifts. Use two shifts for field sizes where the
3182 constant might be too large. We assume here that we can
3183 always at least get 8-bit constants in an AND insn, which is
3184 true for every current RISC. */
3186 if (unsignedp && len <= 8)
3188 SUBST (SET_SRC (x),
3189 gen_rtx_AND (mode,
3190 gen_rtx_LSHIFTRT
3191 (mode, gen_lowpart_for_combine (mode, inner),
3192 GEN_INT (pos)),
3193 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3195 split = find_split_point (&SET_SRC (x), insn);
3196 if (split && split != &SET_SRC (x))
3197 return split;
3199 else
3201 SUBST (SET_SRC (x),
3202 gen_rtx_fmt_ee
3203 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3204 gen_rtx_ASHIFT (mode,
3205 gen_lowpart_for_combine (mode, inner),
3206 GEN_INT (GET_MODE_BITSIZE (mode)
3207 - len - pos)),
3208 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3210 split = find_split_point (&SET_SRC (x), insn);
3211 if (split && split != &SET_SRC (x))
3212 return split;
3216 /* See if this is a simple operation with a constant as the second
3217 operand. It might be that this constant is out of range and hence
3218 could be used as a split point. */
3219 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3220 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3221 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3222 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3223 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3224 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3225 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3226 == 'o'))))
3227 return &XEXP (SET_SRC (x), 1);
3229 /* Finally, see if this is a simple operation with its first operand
3230 not in a register. The operation might require this operand in a
3231 register, so return it as a split point. We can always do this
3232 because if the first operand were another operation, we would have
3233 already found it as a split point. */
3234 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3235 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3236 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3237 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3238 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3239 return &XEXP (SET_SRC (x), 0);
3241 return 0;
3243 case AND:
3244 case IOR:
3245 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3246 it is better to write this as (not (ior A B)) so we can split it.
3247 Similarly for IOR. */
3248 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3250 SUBST (*loc,
3251 gen_rtx_NOT (GET_MODE (x),
3252 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3253 GET_MODE (x),
3254 XEXP (XEXP (x, 0), 0),
3255 XEXP (XEXP (x, 1), 0))));
3256 return find_split_point (loc, insn);
3259 /* Many RISC machines have a large set of logical insns. If the
3260 second operand is a NOT, put it first so we will try to split the
3261 other operand first. */
3262 if (GET_CODE (XEXP (x, 1)) == NOT)
3264 rtx tem = XEXP (x, 0);
3265 SUBST (XEXP (x, 0), XEXP (x, 1));
3266 SUBST (XEXP (x, 1), tem);
3268 break;
3270 default:
3271 break;
3274 /* Otherwise, select our actions depending on our rtx class. */
3275 switch (GET_RTX_CLASS (code))
3277 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3278 case '3':
3279 split = find_split_point (&XEXP (x, 2), insn);
3280 if (split)
3281 return split;
3282 /* ... fall through ... */
3283 case '2':
3284 case 'c':
3285 case '<':
3286 split = find_split_point (&XEXP (x, 1), insn);
3287 if (split)
3288 return split;
3289 /* ... fall through ... */
3290 case '1':
3291 /* Some machines have (and (shift ...) ...) insns. If X is not
3292 an AND, but XEXP (X, 0) is, use it as our split point. */
3293 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3294 return &XEXP (x, 0);
3296 split = find_split_point (&XEXP (x, 0), insn);
3297 if (split)
3298 return split;
3299 return loc;
3302 /* Otherwise, we don't have a split point. */
3303 return 0;
3306 /* Throughout X, replace FROM with TO, and return the result.
3307 The result is TO if X is FROM;
3308 otherwise the result is X, but its contents may have been modified.
3309 If they were modified, a record was made in undobuf so that
3310 undo_all will (among other things) return X to its original state.
3312 If the number of changes necessary is too much to record to undo,
3313 the excess changes are not made, so the result is invalid.
3314 The changes already made can still be undone.
3315 undobuf.num_undo is incremented for such changes, so by testing that
3316 the caller can tell whether the result is valid.
3318 `n_occurrences' is incremented each time FROM is replaced.
3320 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3322 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
3323 by copying if `n_occurrences' is non-zero. */
3325 static rtx
3326 subst (x, from, to, in_dest, unique_copy)
3327 rtx x, from, to;
3328 int in_dest;
3329 int unique_copy;
3331 enum rtx_code code = GET_CODE (x);
3332 enum machine_mode op0_mode = VOIDmode;
3333 const char *fmt;
3334 int len, i;
3335 rtx new;
3337 /* Two expressions are equal if they are identical copies of a shared
3338 RTX or if they are both registers with the same register number
3339 and mode. */
3341 #define COMBINE_RTX_EQUAL_P(X,Y) \
3342 ((X) == (Y) \
3343 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3344 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3346 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3348 n_occurrences++;
3349 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3352 /* If X and FROM are the same register but different modes, they will
3353 not have been seen as equal above. However, flow.c will make a
3354 LOG_LINKS entry for that case. If we do nothing, we will try to
3355 rerecognize our original insn and, when it succeeds, we will
3356 delete the feeding insn, which is incorrect.
3358 So force this insn not to match in this (rare) case. */
3359 if (! in_dest && code == REG && GET_CODE (from) == REG
3360 && REGNO (x) == REGNO (from))
3361 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3363 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3364 of which may contain things that can be combined. */
3365 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3366 return x;
3368 /* It is possible to have a subexpression appear twice in the insn.
3369 Suppose that FROM is a register that appears within TO.
3370 Then, after that subexpression has been scanned once by `subst',
3371 the second time it is scanned, TO may be found. If we were
3372 to scan TO here, we would find FROM within it and create a
3373 self-referent rtl structure which is completely wrong. */
3374 if (COMBINE_RTX_EQUAL_P (x, to))
3375 return to;
3377 /* Parallel asm_operands need special attention because all of the
3378 inputs are shared across the arms. Furthermore, unsharing the
3379 rtl results in recognition failures. Failure to handle this case
3380 specially can result in circular rtl.
3382 Solve this by doing a normal pass across the first entry of the
3383 parallel, and only processing the SET_DESTs of the subsequent
3384 entries. Ug. */
3386 if (code == PARALLEL
3387 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3388 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3390 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3392 /* If this substitution failed, this whole thing fails. */
3393 if (GET_CODE (new) == CLOBBER
3394 && XEXP (new, 0) == const0_rtx)
3395 return new;
3397 SUBST (XVECEXP (x, 0, 0), new);
3399 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3401 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3403 if (GET_CODE (dest) != REG
3404 && GET_CODE (dest) != CC0
3405 && GET_CODE (dest) != PC)
3407 new = subst (dest, from, to, 0, unique_copy);
3409 /* If this substitution failed, this whole thing fails. */
3410 if (GET_CODE (new) == CLOBBER
3411 && XEXP (new, 0) == const0_rtx)
3412 return new;
3414 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3418 else
3420 len = GET_RTX_LENGTH (code);
3421 fmt = GET_RTX_FORMAT (code);
3423 /* We don't need to process a SET_DEST that is a register, CC0,
3424 or PC, so set up to skip this common case. All other cases
3425 where we want to suppress replacing something inside a
3426 SET_SRC are handled via the IN_DEST operand. */
3427 if (code == SET
3428 && (GET_CODE (SET_DEST (x)) == REG
3429 || GET_CODE (SET_DEST (x)) == CC0
3430 || GET_CODE (SET_DEST (x)) == PC))
3431 fmt = "ie";
3433 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3434 constant. */
3435 if (fmt[0] == 'e')
3436 op0_mode = GET_MODE (XEXP (x, 0));
3438 for (i = 0; i < len; i++)
3440 if (fmt[i] == 'E')
3442 int j;
3443 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3445 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3447 new = (unique_copy && n_occurrences
3448 ? copy_rtx (to) : to);
3449 n_occurrences++;
3451 else
3453 new = subst (XVECEXP (x, i, j), from, to, 0,
3454 unique_copy);
3456 /* If this substitution failed, this whole thing
3457 fails. */
3458 if (GET_CODE (new) == CLOBBER
3459 && XEXP (new, 0) == const0_rtx)
3460 return new;
3463 SUBST (XVECEXP (x, i, j), new);
3466 else if (fmt[i] == 'e')
3468 /* If this is a register being set, ignore it. */
3469 new = XEXP (x, i);
3470 if (in_dest
3471 && (code == SUBREG || code == STRICT_LOW_PART
3472 || code == ZERO_EXTRACT)
3473 && i == 0
3474 && GET_CODE (new) == REG)
3477 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3479 /* In general, don't install a subreg involving two
3480 modes not tieable. It can worsen register
3481 allocation, and can even make invalid reload
3482 insns, since the reg inside may need to be copied
3483 from in the outside mode, and that may be invalid
3484 if it is an fp reg copied in integer mode.
3486 We allow two exceptions to this: It is valid if
3487 it is inside another SUBREG and the mode of that
3488 SUBREG and the mode of the inside of TO is
3489 tieable and it is valid if X is a SET that copies
3490 FROM to CC0. */
3492 if (GET_CODE (to) == SUBREG
3493 && ! MODES_TIEABLE_P (GET_MODE (to),
3494 GET_MODE (SUBREG_REG (to)))
3495 && ! (code == SUBREG
3496 && MODES_TIEABLE_P (GET_MODE (x),
3497 GET_MODE (SUBREG_REG (to))))
3498 #ifdef HAVE_cc0
3499 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3500 #endif
3502 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3504 #ifdef CLASS_CANNOT_CHANGE_MODE
3505 if (code == SUBREG
3506 && GET_CODE (to) == REG
3507 && REGNO (to) < FIRST_PSEUDO_REGISTER
3508 && (TEST_HARD_REG_BIT
3509 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
3510 REGNO (to)))
3511 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3512 GET_MODE (x)))
3513 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3514 #endif
3516 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3517 n_occurrences++;
3519 else
3520 /* If we are in a SET_DEST, suppress most cases unless we
3521 have gone inside a MEM, in which case we want to
3522 simplify the address. We assume here that things that
3523 are actually part of the destination have their inner
3524 parts in the first expression. This is true for SUBREG,
3525 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3526 things aside from REG and MEM that should appear in a
3527 SET_DEST. */
3528 new = subst (XEXP (x, i), from, to,
3529 (((in_dest
3530 && (code == SUBREG || code == STRICT_LOW_PART
3531 || code == ZERO_EXTRACT))
3532 || code == SET)
3533 && i == 0), unique_copy);
3535 /* If we found that we will have to reject this combination,
3536 indicate that by returning the CLOBBER ourselves, rather than
3537 an expression containing it. This will speed things up as
3538 well as prevent accidents where two CLOBBERs are considered
3539 to be equal, thus producing an incorrect simplification. */
3541 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3542 return new;
3544 if (GET_CODE (new) == CONST_INT && GET_CODE (x) == SUBREG)
3546 enum machine_mode mode = GET_MODE (x);
3548 x = simplify_subreg (GET_MODE (x), new,
3549 GET_MODE (SUBREG_REG (x)),
3550 SUBREG_BYTE (x));
3551 if (! x)
3552 x = gen_rtx_CLOBBER (mode, const0_rtx);
3554 else if (GET_CODE (new) == CONST_INT
3555 && GET_CODE (x) == ZERO_EXTEND)
3557 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3558 new, GET_MODE (XEXP (x, 0)));
3559 if (! x)
3560 abort ();
3562 else
3563 SUBST (XEXP (x, i), new);
3568 /* Try to simplify X. If the simplification changed the code, it is likely
3569 that further simplification will help, so loop, but limit the number
3570 of repetitions that will be performed. */
3572 for (i = 0; i < 4; i++)
3574 /* If X is sufficiently simple, don't bother trying to do anything
3575 with it. */
3576 if (code != CONST_INT && code != REG && code != CLOBBER)
3577 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3579 if (GET_CODE (x) == code)
3580 break;
3582 code = GET_CODE (x);
3584 /* We no longer know the original mode of operand 0 since we
3585 have changed the form of X) */
3586 op0_mode = VOIDmode;
3589 return x;
3592 /* Simplify X, a piece of RTL. We just operate on the expression at the
3593 outer level; call `subst' to simplify recursively. Return the new
3594 expression.
3596 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3597 will be the iteration even if an expression with a code different from
3598 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
3600 static rtx
3601 combine_simplify_rtx (x, op0_mode, last, in_dest)
3602 rtx x;
3603 enum machine_mode op0_mode;
3604 int last;
3605 int in_dest;
3607 enum rtx_code code = GET_CODE (x);
3608 enum machine_mode mode = GET_MODE (x);
3609 rtx temp;
3610 rtx reversed;
3611 int i;
3613 /* If this is a commutative operation, put a constant last and a complex
3614 expression first. We don't need to do this for comparisons here. */
3615 if (GET_RTX_CLASS (code) == 'c'
3616 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3618 temp = XEXP (x, 0);
3619 SUBST (XEXP (x, 0), XEXP (x, 1));
3620 SUBST (XEXP (x, 1), temp);
3623 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3624 sign extension of a PLUS with a constant, reverse the order of the sign
3625 extension and the addition. Note that this not the same as the original
3626 code, but overflow is undefined for signed values. Also note that the
3627 PLUS will have been partially moved "inside" the sign-extension, so that
3628 the first operand of X will really look like:
3629 (ashiftrt (plus (ashift A C4) C5) C4).
3630 We convert this to
3631 (plus (ashiftrt (ashift A C4) C2) C4)
3632 and replace the first operand of X with that expression. Later parts
3633 of this function may simplify the expression further.
3635 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3636 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3637 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3639 We do this to simplify address expressions. */
3641 if ((code == PLUS || code == MINUS || code == MULT)
3642 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3643 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3644 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3645 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3646 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3647 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3648 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3649 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3650 XEXP (XEXP (XEXP (x, 0), 0), 1),
3651 XEXP (XEXP (x, 0), 1))) != 0)
3653 rtx new
3654 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3655 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3656 INTVAL (XEXP (XEXP (x, 0), 1)));
3658 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3659 INTVAL (XEXP (XEXP (x, 0), 1)));
3661 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3664 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3665 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3666 things. Check for cases where both arms are testing the same
3667 condition.
3669 Don't do anything if all operands are very simple. */
3671 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3672 || GET_RTX_CLASS (code) == '<')
3673 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3674 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3675 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3676 == 'o')))
3677 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3678 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3679 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3680 == 'o')))))
3681 || (GET_RTX_CLASS (code) == '1'
3682 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3683 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3684 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3685 == 'o'))))))
3687 rtx cond, true_rtx, false_rtx;
3689 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3690 if (cond != 0
3691 /* If everything is a comparison, what we have is highly unlikely
3692 to be simpler, so don't use it. */
3693 && ! (GET_RTX_CLASS (code) == '<'
3694 && (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3695 || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3697 rtx cop1 = const0_rtx;
3698 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3700 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3701 return x;
3703 /* Simplify the alternative arms; this may collapse the true and
3704 false arms to store-flag values. */
3705 true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3706 false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
3708 /* If true_rtx and false_rtx are not general_operands, an if_then_else
3709 is unlikely to be simpler. */
3710 if (general_operand (true_rtx, VOIDmode)
3711 && general_operand (false_rtx, VOIDmode))
3713 /* Restarting if we generate a store-flag expression will cause
3714 us to loop. Just drop through in this case. */
3716 /* If the result values are STORE_FLAG_VALUE and zero, we can
3717 just make the comparison operation. */
3718 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3719 x = gen_binary (cond_code, mode, cond, cop1);
3720 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3721 && reverse_condition (cond_code) != UNKNOWN)
3722 x = gen_binary (reverse_condition (cond_code),
3723 mode, cond, cop1);
3725 /* Likewise, we can make the negate of a comparison operation
3726 if the result values are - STORE_FLAG_VALUE and zero. */
3727 else if (GET_CODE (true_rtx) == CONST_INT
3728 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3729 && false_rtx == const0_rtx)
3730 x = simplify_gen_unary (NEG, mode,
3731 gen_binary (cond_code, mode, cond,
3732 cop1),
3733 mode);
3734 else if (GET_CODE (false_rtx) == CONST_INT
3735 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3736 && true_rtx == const0_rtx)
3737 x = simplify_gen_unary (NEG, mode,
3738 gen_binary (reverse_condition
3739 (cond_code),
3740 mode, cond, cop1),
3741 mode);
3742 else
3743 return gen_rtx_IF_THEN_ELSE (mode,
3744 gen_binary (cond_code, VOIDmode,
3745 cond, cop1),
3746 true_rtx, false_rtx);
3748 code = GET_CODE (x);
3749 op0_mode = VOIDmode;
3754 /* Try to fold this expression in case we have constants that weren't
3755 present before. */
3756 temp = 0;
3757 switch (GET_RTX_CLASS (code))
3759 case '1':
3760 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3761 break;
3762 case '<':
3764 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3765 if (cmp_mode == VOIDmode)
3767 cmp_mode = GET_MODE (XEXP (x, 1));
3768 if (cmp_mode == VOIDmode)
3769 cmp_mode = op0_mode;
3771 temp = simplify_relational_operation (code, cmp_mode,
3772 XEXP (x, 0), XEXP (x, 1));
3774 #ifdef FLOAT_STORE_FLAG_VALUE
3775 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3777 if (temp == const0_rtx)
3778 temp = CONST0_RTX (mode);
3779 else
3780 temp = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE (mode),
3781 mode);
3783 #endif
3784 break;
3785 case 'c':
3786 case '2':
3787 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3788 break;
3789 case 'b':
3790 case '3':
3791 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3792 XEXP (x, 1), XEXP (x, 2));
3793 break;
3796 if (temp)
3798 x = temp;
3799 code = GET_CODE (temp);
3800 op0_mode = VOIDmode;
3801 mode = GET_MODE (temp);
3804 /* First see if we can apply the inverse distributive law. */
3805 if (code == PLUS || code == MINUS
3806 || code == AND || code == IOR || code == XOR)
3808 x = apply_distributive_law (x);
3809 code = GET_CODE (x);
3810 op0_mode = VOIDmode;
3813 /* If CODE is an associative operation not otherwise handled, see if we
3814 can associate some operands. This can win if they are constants or
3815 if they are logically related (i.e. (a & b) & a). */
3816 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3817 || code == AND || code == IOR || code == XOR
3818 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3819 && ((INTEGRAL_MODE_P (mode) && code != DIV)
3820 || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3822 if (GET_CODE (XEXP (x, 0)) == code)
3824 rtx other = XEXP (XEXP (x, 0), 0);
3825 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3826 rtx inner_op1 = XEXP (x, 1);
3827 rtx inner;
3829 /* Make sure we pass the constant operand if any as the second
3830 one if this is a commutative operation. */
3831 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3833 rtx tem = inner_op0;
3834 inner_op0 = inner_op1;
3835 inner_op1 = tem;
3837 inner = simplify_binary_operation (code == MINUS ? PLUS
3838 : code == DIV ? MULT
3839 : code,
3840 mode, inner_op0, inner_op1);
3842 /* For commutative operations, try the other pair if that one
3843 didn't simplify. */
3844 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3846 other = XEXP (XEXP (x, 0), 1);
3847 inner = simplify_binary_operation (code, mode,
3848 XEXP (XEXP (x, 0), 0),
3849 XEXP (x, 1));
3852 if (inner)
3853 return gen_binary (code, mode, other, inner);
3857 /* A little bit of algebraic simplification here. */
3858 switch (code)
3860 case MEM:
3861 /* Ensure that our address has any ASHIFTs converted to MULT in case
3862 address-recognizing predicates are called later. */
3863 temp = make_compound_operation (XEXP (x, 0), MEM);
3864 SUBST (XEXP (x, 0), temp);
3865 break;
3867 case SUBREG:
3868 if (op0_mode == VOIDmode)
3869 op0_mode = GET_MODE (SUBREG_REG (x));
3871 /* simplify_subreg can't use gen_lowpart_for_combine. */
3872 if (CONSTANT_P (SUBREG_REG (x))
3873 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
3874 /* Don't call gen_lowpart_for_combine if the inner mode
3875 is VOIDmode and we cannot simplify it, as SUBREG without
3876 inner mode is invalid. */
3877 && (GET_MODE (SUBREG_REG (x)) != VOIDmode
3878 || gen_lowpart_common (mode, SUBREG_REG (x))))
3879 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3881 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3882 break;
3884 rtx temp;
3885 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3886 SUBREG_BYTE (x));
3887 if (temp)
3888 return temp;
3891 /* Don't change the mode of the MEM if that would change the meaning
3892 of the address. */
3893 if (GET_CODE (SUBREG_REG (x)) == MEM
3894 && (MEM_VOLATILE_P (SUBREG_REG (x))
3895 || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3896 return gen_rtx_CLOBBER (mode, const0_rtx);
3898 /* Note that we cannot do any narrowing for non-constants since
3899 we might have been counting on using the fact that some bits were
3900 zero. We now do this in the SET. */
3902 break;
3904 case NOT:
3905 /* (not (plus X -1)) can become (neg X). */
3906 if (GET_CODE (XEXP (x, 0)) == PLUS
3907 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3908 return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
3910 /* Similarly, (not (neg X)) is (plus X -1). */
3911 if (GET_CODE (XEXP (x, 0)) == NEG)
3912 return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3914 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
3915 if (GET_CODE (XEXP (x, 0)) == XOR
3916 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3917 && (temp = simplify_unary_operation (NOT, mode,
3918 XEXP (XEXP (x, 0), 1),
3919 mode)) != 0)
3920 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3922 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3923 other than 1, but that is not valid. We could do a similar
3924 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3925 but this doesn't seem common enough to bother with. */
3926 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3927 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3928 return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3929 const1_rtx, mode),
3930 XEXP (XEXP (x, 0), 1));
3932 if (GET_CODE (XEXP (x, 0)) == SUBREG
3933 && subreg_lowpart_p (XEXP (x, 0))
3934 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3935 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3936 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3937 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3939 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3941 x = gen_rtx_ROTATE (inner_mode,
3942 simplify_gen_unary (NOT, inner_mode, const1_rtx,
3943 inner_mode),
3944 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3945 return gen_lowpart_for_combine (mode, x);
3948 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3949 reversing the comparison code if valid. */
3950 if (STORE_FLAG_VALUE == -1
3951 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3952 && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3953 XEXP (XEXP (x, 0), 1))))
3954 return reversed;
3956 /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1
3957 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3958 perform the above simplification. */
3960 if (STORE_FLAG_VALUE == -1
3961 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3962 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3963 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3964 return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3966 /* Apply De Morgan's laws to reduce number of patterns for machines
3967 with negating logical insns (and-not, nand, etc.). If result has
3968 only one NOT, put it first, since that is how the patterns are
3969 coded. */
3971 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3973 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3974 enum machine_mode op_mode;
3976 op_mode = GET_MODE (in1);
3977 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3979 op_mode = GET_MODE (in2);
3980 if (op_mode == VOIDmode)
3981 op_mode = mode;
3982 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3984 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3986 rtx tem = in2;
3987 in2 = in1; in1 = tem;
3990 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3991 mode, in1, in2);
3993 break;
3995 case NEG:
3996 /* (neg (plus X 1)) can become (not X). */
3997 if (GET_CODE (XEXP (x, 0)) == PLUS
3998 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3999 return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
4001 /* Similarly, (neg (not X)) is (plus X 1). */
4002 if (GET_CODE (XEXP (x, 0)) == NOT)
4003 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
4005 /* (neg (minus X Y)) can become (minus Y X). This transformation
4006 isn't safe for modes with signed zeros, since if X and Y are
4007 both +0, (minus Y X) is the same as (minus X Y). If the rounding
4008 mode is towards +infinity (or -infinity) then the two expressions
4009 will be rounded differently. */
4010 if (GET_CODE (XEXP (x, 0)) == MINUS
4011 && !HONOR_SIGNED_ZEROS (mode)
4012 && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
4013 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4014 XEXP (XEXP (x, 0), 0));
4016 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
4017 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
4018 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4019 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
4021 /* NEG commutes with ASHIFT since it is multiplication. Only do this
4022 if we can then eliminate the NEG (e.g.,
4023 if the operand is a constant). */
4025 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4027 temp = simplify_unary_operation (NEG, mode,
4028 XEXP (XEXP (x, 0), 0), mode);
4029 if (temp)
4030 return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1));
4033 temp = expand_compound_operation (XEXP (x, 0));
4035 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4036 replaced by (lshiftrt X C). This will convert
4037 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4039 if (GET_CODE (temp) == ASHIFTRT
4040 && GET_CODE (XEXP (temp, 1)) == CONST_INT
4041 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4042 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4043 INTVAL (XEXP (temp, 1)));
4045 /* If X has only a single bit that might be nonzero, say, bit I, convert
4046 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4047 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4048 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4049 or a SUBREG of one since we'd be making the expression more
4050 complex if it was just a register. */
4052 if (GET_CODE (temp) != REG
4053 && ! (GET_CODE (temp) == SUBREG
4054 && GET_CODE (SUBREG_REG (temp)) == REG)
4055 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4057 rtx temp1 = simplify_shift_const
4058 (NULL_RTX, ASHIFTRT, mode,
4059 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4060 GET_MODE_BITSIZE (mode) - 1 - i),
4061 GET_MODE_BITSIZE (mode) - 1 - i);
4063 /* If all we did was surround TEMP with the two shifts, we
4064 haven't improved anything, so don't use it. Otherwise,
4065 we are better off with TEMP1. */
4066 if (GET_CODE (temp1) != ASHIFTRT
4067 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4068 || XEXP (XEXP (temp1, 0), 0) != temp)
4069 return temp1;
4071 break;
4073 case TRUNCATE:
4074 /* We can't handle truncation to a partial integer mode here
4075 because we don't know the real bitsize of the partial
4076 integer mode. */
4077 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4078 break;
4080 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4081 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4082 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4083 SUBST (XEXP (x, 0),
4084 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4085 GET_MODE_MASK (mode), NULL_RTX, 0));
4087 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
4088 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4089 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4090 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4091 return XEXP (XEXP (x, 0), 0);
4093 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4094 (OP:SI foo:SI) if OP is NEG or ABS. */
4095 if ((GET_CODE (XEXP (x, 0)) == ABS
4096 || GET_CODE (XEXP (x, 0)) == NEG)
4097 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4098 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4099 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4100 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4101 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4103 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4104 (truncate:SI x). */
4105 if (GET_CODE (XEXP (x, 0)) == SUBREG
4106 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4107 && subreg_lowpart_p (XEXP (x, 0)))
4108 return SUBREG_REG (XEXP (x, 0));
4110 /* If we know that the value is already truncated, we can
4111 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4112 is nonzero for the corresponding modes. But don't do this
4113 for an (LSHIFTRT (MULT ...)) since this will cause problems
4114 with the umulXi3_highpart patterns. */
4115 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4116 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4117 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4118 >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4119 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4120 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4121 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4123 /* A truncate of a comparison can be replaced with a subreg if
4124 STORE_FLAG_VALUE permits. This is like the previous test,
4125 but it works even if the comparison is done in a mode larger
4126 than HOST_BITS_PER_WIDE_INT. */
4127 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4128 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4129 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4130 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4132 /* Similarly, a truncate of a register whose value is a
4133 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4134 permits. */
4135 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4136 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4137 && (temp = get_last_value (XEXP (x, 0)))
4138 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4139 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4141 break;
4143 case FLOAT_TRUNCATE:
4144 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4145 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4146 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4147 return XEXP (XEXP (x, 0), 0);
4149 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4150 (OP:SF foo:SF) if OP is NEG or ABS. */
4151 if ((GET_CODE (XEXP (x, 0)) == ABS
4152 || GET_CODE (XEXP (x, 0)) == NEG)
4153 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4154 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4155 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4156 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4158 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4159 is (float_truncate:SF x). */
4160 if (GET_CODE (XEXP (x, 0)) == SUBREG
4161 && subreg_lowpart_p (XEXP (x, 0))
4162 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4163 return SUBREG_REG (XEXP (x, 0));
4164 break;
4166 #ifdef HAVE_cc0
4167 case COMPARE:
4168 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4169 using cc0, in which case we want to leave it as a COMPARE
4170 so we can distinguish it from a register-register-copy. */
4171 if (XEXP (x, 1) == const0_rtx)
4172 return XEXP (x, 0);
4174 /* x - 0 is the same as x unless x's mode has signed zeros and
4175 allows rounding towards -infinity. Under those conditions,
4176 0 - 0 is -0. */
4177 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4178 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4179 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4180 return XEXP (x, 0);
4181 break;
4182 #endif
4184 case CONST:
4185 /* (const (const X)) can become (const X). Do it this way rather than
4186 returning the inner CONST since CONST can be shared with a
4187 REG_EQUAL note. */
4188 if (GET_CODE (XEXP (x, 0)) == CONST)
4189 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4190 break;
4192 #ifdef HAVE_lo_sum
4193 case LO_SUM:
4194 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4195 can add in an offset. find_split_point will split this address up
4196 again if it doesn't match. */
4197 if (GET_CODE (XEXP (x, 0)) == HIGH
4198 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4199 return XEXP (x, 1);
4200 break;
4201 #endif
4203 case PLUS:
4204 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4205 outermost. That's because that's the way indexed addresses are
4206 supposed to appear. This code used to check many more cases, but
4207 they are now checked elsewhere. */
4208 if (GET_CODE (XEXP (x, 0)) == PLUS
4209 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4210 return gen_binary (PLUS, mode,
4211 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4212 XEXP (x, 1)),
4213 XEXP (XEXP (x, 0), 1));
4215 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4216 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4217 bit-field and can be replaced by either a sign_extend or a
4218 sign_extract. The `and' may be a zero_extend and the two
4219 <c>, -<c> constants may be reversed. */
4220 if (GET_CODE (XEXP (x, 0)) == XOR
4221 && GET_CODE (XEXP (x, 1)) == CONST_INT
4222 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4223 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4224 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4225 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4226 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4227 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4228 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4229 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4230 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4231 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4232 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4233 == (unsigned int) i + 1))))
4234 return simplify_shift_const
4235 (NULL_RTX, ASHIFTRT, mode,
4236 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4237 XEXP (XEXP (XEXP (x, 0), 0), 0),
4238 GET_MODE_BITSIZE (mode) - (i + 1)),
4239 GET_MODE_BITSIZE (mode) - (i + 1));
4241 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4242 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4243 is 1. This produces better code than the alternative immediately
4244 below. */
4245 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4246 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4247 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4248 && (reversed = reversed_comparison (XEXP (x, 0), mode,
4249 XEXP (XEXP (x, 0), 0),
4250 XEXP (XEXP (x, 0), 1))))
4251 return
4252 simplify_gen_unary (NEG, mode, reversed, mode);
4254 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4255 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4256 the bitsize of the mode - 1. This allows simplification of
4257 "a = (b & 8) == 0;" */
4258 if (XEXP (x, 1) == constm1_rtx
4259 && GET_CODE (XEXP (x, 0)) != REG
4260 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4261 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4262 && nonzero_bits (XEXP (x, 0), mode) == 1)
4263 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4264 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4265 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4266 GET_MODE_BITSIZE (mode) - 1),
4267 GET_MODE_BITSIZE (mode) - 1);
4269 /* If we are adding two things that have no bits in common, convert
4270 the addition into an IOR. This will often be further simplified,
4271 for example in cases like ((a & 1) + (a & 2)), which can
4272 become a & 3. */
4274 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4275 && (nonzero_bits (XEXP (x, 0), mode)
4276 & nonzero_bits (XEXP (x, 1), mode)) == 0)
4278 /* Try to simplify the expression further. */
4279 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4280 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4282 /* If we could, great. If not, do not go ahead with the IOR
4283 replacement, since PLUS appears in many special purpose
4284 address arithmetic instructions. */
4285 if (GET_CODE (temp) != CLOBBER && temp != tor)
4286 return temp;
4288 break;
4290 case MINUS:
4291 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4292 by reversing the comparison code if valid. */
4293 if (STORE_FLAG_VALUE == 1
4294 && XEXP (x, 0) == const1_rtx
4295 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4296 && (reversed = reversed_comparison (XEXP (x, 1), mode,
4297 XEXP (XEXP (x, 1), 0),
4298 XEXP (XEXP (x, 1), 1))))
4299 return reversed;
4301 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4302 (and <foo> (const_int pow2-1)) */
4303 if (GET_CODE (XEXP (x, 1)) == AND
4304 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4305 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4306 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4307 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4308 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4310 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4311 integers. */
4312 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4313 return gen_binary (MINUS, mode,
4314 gen_binary (MINUS, mode, XEXP (x, 0),
4315 XEXP (XEXP (x, 1), 0)),
4316 XEXP (XEXP (x, 1), 1));
4317 break;
4319 case MULT:
4320 /* If we have (mult (plus A B) C), apply the distributive law and then
4321 the inverse distributive law to see if things simplify. This
4322 occurs mostly in addresses, often when unrolling loops. */
4324 if (GET_CODE (XEXP (x, 0)) == PLUS)
4326 x = apply_distributive_law
4327 (gen_binary (PLUS, mode,
4328 gen_binary (MULT, mode,
4329 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4330 gen_binary (MULT, mode,
4331 XEXP (XEXP (x, 0), 1),
4332 copy_rtx (XEXP (x, 1)))));
4334 if (GET_CODE (x) != MULT)
4335 return x;
4337 /* Try simplify a*(b/c) as (a*b)/c. */
4338 if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4339 && GET_CODE (XEXP (x, 0)) == DIV)
4341 rtx tem = simplify_binary_operation (MULT, mode,
4342 XEXP (XEXP (x, 0), 0),
4343 XEXP (x, 1));
4344 if (tem)
4345 return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4347 break;
4349 case UDIV:
4350 /* If this is a divide by a power of two, treat it as a shift if
4351 its first operand is a shift. */
4352 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4353 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4354 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4355 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4356 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4357 || GET_CODE (XEXP (x, 0)) == ROTATE
4358 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4359 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4360 break;
4362 case EQ: case NE:
4363 case GT: case GTU: case GE: case GEU:
4364 case LT: case LTU: case LE: case LEU:
4365 case UNEQ: case LTGT:
4366 case UNGT: case UNGE:
4367 case UNLT: case UNLE:
4368 case UNORDERED: case ORDERED:
4369 /* If the first operand is a condition code, we can't do anything
4370 with it. */
4371 if (GET_CODE (XEXP (x, 0)) == COMPARE
4372 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4373 #ifdef HAVE_cc0
4374 && XEXP (x, 0) != cc0_rtx
4375 #endif
4378 rtx op0 = XEXP (x, 0);
4379 rtx op1 = XEXP (x, 1);
4380 enum rtx_code new_code;
4382 if (GET_CODE (op0) == COMPARE)
4383 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4385 /* Simplify our comparison, if possible. */
4386 new_code = simplify_comparison (code, &op0, &op1);
4388 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4389 if only the low-order bit is possibly nonzero in X (such as when
4390 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4391 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4392 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4393 (plus X 1).
4395 Remove any ZERO_EXTRACT we made when thinking this was a
4396 comparison. It may now be simpler to use, e.g., an AND. If a
4397 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4398 the call to make_compound_operation in the SET case. */
4400 if (STORE_FLAG_VALUE == 1
4401 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4402 && op1 == const0_rtx
4403 && mode == GET_MODE (op0)
4404 && nonzero_bits (op0, mode) == 1)
4405 return gen_lowpart_for_combine (mode,
4406 expand_compound_operation (op0));
4408 else if (STORE_FLAG_VALUE == 1
4409 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4410 && op1 == const0_rtx
4411 && mode == GET_MODE (op0)
4412 && (num_sign_bit_copies (op0, mode)
4413 == GET_MODE_BITSIZE (mode)))
4415 op0 = expand_compound_operation (op0);
4416 return simplify_gen_unary (NEG, mode,
4417 gen_lowpart_for_combine (mode, op0),
4418 mode);
4421 else if (STORE_FLAG_VALUE == 1
4422 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4423 && op1 == const0_rtx
4424 && mode == GET_MODE (op0)
4425 && nonzero_bits (op0, mode) == 1)
4427 op0 = expand_compound_operation (op0);
4428 return gen_binary (XOR, mode,
4429 gen_lowpart_for_combine (mode, op0),
4430 const1_rtx);
4433 else if (STORE_FLAG_VALUE == 1
4434 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4435 && op1 == const0_rtx
4436 && mode == GET_MODE (op0)
4437 && (num_sign_bit_copies (op0, mode)
4438 == GET_MODE_BITSIZE (mode)))
4440 op0 = expand_compound_operation (op0);
4441 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4444 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4445 those above. */
4446 if (STORE_FLAG_VALUE == -1
4447 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4448 && op1 == const0_rtx
4449 && (num_sign_bit_copies (op0, mode)
4450 == GET_MODE_BITSIZE (mode)))
4451 return gen_lowpart_for_combine (mode,
4452 expand_compound_operation (op0));
4454 else if (STORE_FLAG_VALUE == -1
4455 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4456 && op1 == const0_rtx
4457 && mode == GET_MODE (op0)
4458 && nonzero_bits (op0, mode) == 1)
4460 op0 = expand_compound_operation (op0);
4461 return simplify_gen_unary (NEG, mode,
4462 gen_lowpart_for_combine (mode, op0),
4463 mode);
4466 else if (STORE_FLAG_VALUE == -1
4467 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4468 && op1 == const0_rtx
4469 && mode == GET_MODE (op0)
4470 && (num_sign_bit_copies (op0, mode)
4471 == GET_MODE_BITSIZE (mode)))
4473 op0 = expand_compound_operation (op0);
4474 return simplify_gen_unary (NOT, mode,
4475 gen_lowpart_for_combine (mode, op0),
4476 mode);
4479 /* If X is 0/1, (eq X 0) is X-1. */
4480 else if (STORE_FLAG_VALUE == -1
4481 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4482 && op1 == const0_rtx
4483 && mode == GET_MODE (op0)
4484 && nonzero_bits (op0, mode) == 1)
4486 op0 = expand_compound_operation (op0);
4487 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4490 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4491 one bit that might be nonzero, we can convert (ne x 0) to
4492 (ashift x c) where C puts the bit in the sign bit. Remove any
4493 AND with STORE_FLAG_VALUE when we are done, since we are only
4494 going to test the sign bit. */
4495 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4496 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4497 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4498 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4499 && op1 == const0_rtx
4500 && mode == GET_MODE (op0)
4501 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4503 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4504 expand_compound_operation (op0),
4505 GET_MODE_BITSIZE (mode) - 1 - i);
4506 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4507 return XEXP (x, 0);
4508 else
4509 return x;
4512 /* If the code changed, return a whole new comparison. */
4513 if (new_code != code)
4514 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4516 /* Otherwise, keep this operation, but maybe change its operands.
4517 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4518 SUBST (XEXP (x, 0), op0);
4519 SUBST (XEXP (x, 1), op1);
4521 break;
4523 case IF_THEN_ELSE:
4524 return simplify_if_then_else (x);
4526 case ZERO_EXTRACT:
4527 case SIGN_EXTRACT:
4528 case ZERO_EXTEND:
4529 case SIGN_EXTEND:
4530 /* If we are processing SET_DEST, we are done. */
4531 if (in_dest)
4532 return x;
4534 return expand_compound_operation (x);
4536 case SET:
4537 return simplify_set (x);
4539 case AND:
4540 case IOR:
4541 case XOR:
4542 return simplify_logical (x, last);
4544 case ABS:
4545 /* (abs (neg <foo>)) -> (abs <foo>) */
4546 if (GET_CODE (XEXP (x, 0)) == NEG)
4547 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4549 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4550 do nothing. */
4551 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4552 break;
4554 /* If operand is something known to be positive, ignore the ABS. */
4555 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4556 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4557 <= HOST_BITS_PER_WIDE_INT)
4558 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4559 & ((HOST_WIDE_INT) 1
4560 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4561 == 0)))
4562 return XEXP (x, 0);
4564 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4565 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4566 return gen_rtx_NEG (mode, XEXP (x, 0));
4568 break;
4570 case FFS:
4571 /* (ffs (*_extend <X>)) = (ffs <X>) */
4572 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4573 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4574 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4575 break;
4577 case FLOAT:
4578 /* (float (sign_extend <X>)) = (float <X>). */
4579 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4580 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4581 break;
4583 case ASHIFT:
4584 case LSHIFTRT:
4585 case ASHIFTRT:
4586 case ROTATE:
4587 case ROTATERT:
4588 /* If this is a shift by a constant amount, simplify it. */
4589 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4590 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4591 INTVAL (XEXP (x, 1)));
4593 #ifdef SHIFT_COUNT_TRUNCATED
4594 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4595 SUBST (XEXP (x, 1),
4596 force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4597 ((HOST_WIDE_INT) 1
4598 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4599 - 1,
4600 NULL_RTX, 0));
4601 #endif
4603 break;
4605 case VEC_SELECT:
4607 rtx op0 = XEXP (x, 0);
4608 rtx op1 = XEXP (x, 1);
4609 int len;
4611 if (GET_CODE (op1) != PARALLEL)
4612 abort ();
4613 len = XVECLEN (op1, 0);
4614 if (len == 1
4615 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4616 && GET_CODE (op0) == VEC_CONCAT)
4618 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4620 /* Try to find the element in the VEC_CONCAT. */
4621 for (;;)
4623 if (GET_MODE (op0) == GET_MODE (x))
4624 return op0;
4625 if (GET_CODE (op0) == VEC_CONCAT)
4627 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4628 if (op0_size < offset)
4629 op0 = XEXP (op0, 0);
4630 else
4632 offset -= op0_size;
4633 op0 = XEXP (op0, 1);
4636 else
4637 break;
4642 break;
4644 default:
4645 break;
4648 return x;
4651 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4653 static rtx
4654 simplify_if_then_else (x)
4655 rtx x;
4657 enum machine_mode mode = GET_MODE (x);
4658 rtx cond = XEXP (x, 0);
4659 rtx true_rtx = XEXP (x, 1);
4660 rtx false_rtx = XEXP (x, 2);
4661 enum rtx_code true_code = GET_CODE (cond);
4662 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4663 rtx temp;
4664 int i;
4665 enum rtx_code false_code;
4666 rtx reversed;
4668 /* Simplify storing of the truth value. */
4669 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4670 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4672 /* Also when the truth value has to be reversed. */
4673 if (comparison_p
4674 && true_rtx == const0_rtx && false_rtx == const_true_rtx
4675 && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4676 XEXP (cond, 1))))
4677 return reversed;
4679 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4680 in it is being compared against certain values. Get the true and false
4681 comparisons and see if that says anything about the value of each arm. */
4683 if (comparison_p
4684 && ((false_code = combine_reversed_comparison_code (cond))
4685 != UNKNOWN)
4686 && GET_CODE (XEXP (cond, 0)) == REG)
4688 HOST_WIDE_INT nzb;
4689 rtx from = XEXP (cond, 0);
4690 rtx true_val = XEXP (cond, 1);
4691 rtx false_val = true_val;
4692 int swapped = 0;
4694 /* If FALSE_CODE is EQ, swap the codes and arms. */
4696 if (false_code == EQ)
4698 swapped = 1, true_code = EQ, false_code = NE;
4699 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4702 /* If we are comparing against zero and the expression being tested has
4703 only a single bit that might be nonzero, that is its value when it is
4704 not equal to zero. Similarly if it is known to be -1 or 0. */
4706 if (true_code == EQ && true_val == const0_rtx
4707 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4708 false_code = EQ, false_val = GEN_INT (nzb);
4709 else if (true_code == EQ && true_val == const0_rtx
4710 && (num_sign_bit_copies (from, GET_MODE (from))
4711 == GET_MODE_BITSIZE (GET_MODE (from))))
4712 false_code = EQ, false_val = constm1_rtx;
4714 /* Now simplify an arm if we know the value of the register in the
4715 branch and it is used in the arm. Be careful due to the potential
4716 of locally-shared RTL. */
4718 if (reg_mentioned_p (from, true_rtx))
4719 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4720 from, true_val),
4721 pc_rtx, pc_rtx, 0, 0);
4722 if (reg_mentioned_p (from, false_rtx))
4723 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4724 from, false_val),
4725 pc_rtx, pc_rtx, 0, 0);
4727 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4728 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4730 true_rtx = XEXP (x, 1);
4731 false_rtx = XEXP (x, 2);
4732 true_code = GET_CODE (cond);
4735 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4736 reversed, do so to avoid needing two sets of patterns for
4737 subtract-and-branch insns. Similarly if we have a constant in the true
4738 arm, the false arm is the same as the first operand of the comparison, or
4739 the false arm is more complicated than the true arm. */
4741 if (comparison_p
4742 && combine_reversed_comparison_code (cond) != UNKNOWN
4743 && (true_rtx == pc_rtx
4744 || (CONSTANT_P (true_rtx)
4745 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4746 || true_rtx == const0_rtx
4747 || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4748 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4749 || (GET_CODE (true_rtx) == SUBREG
4750 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4751 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4752 || reg_mentioned_p (true_rtx, false_rtx)
4753 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4755 true_code = reversed_comparison_code (cond, NULL);
4756 SUBST (XEXP (x, 0),
4757 reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4758 XEXP (cond, 1)));
4760 SUBST (XEXP (x, 1), false_rtx);
4761 SUBST (XEXP (x, 2), true_rtx);
4763 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4764 cond = XEXP (x, 0);
4766 /* It is possible that the conditional has been simplified out. */
4767 true_code = GET_CODE (cond);
4768 comparison_p = GET_RTX_CLASS (true_code) == '<';
4771 /* If the two arms are identical, we don't need the comparison. */
4773 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4774 return true_rtx;
4776 /* Convert a == b ? b : a to "a". */
4777 if (true_code == EQ && ! side_effects_p (cond)
4778 && !HONOR_NANS (mode)
4779 && rtx_equal_p (XEXP (cond, 0), false_rtx)
4780 && rtx_equal_p (XEXP (cond, 1), true_rtx))
4781 return false_rtx;
4782 else if (true_code == NE && ! side_effects_p (cond)
4783 && !HONOR_NANS (mode)
4784 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4785 && rtx_equal_p (XEXP (cond, 1), false_rtx))
4786 return true_rtx;
4788 /* Look for cases where we have (abs x) or (neg (abs X)). */
4790 if (GET_MODE_CLASS (mode) == MODE_INT
4791 && GET_CODE (false_rtx) == NEG
4792 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4793 && comparison_p
4794 && rtx_equal_p (true_rtx, XEXP (cond, 0))
4795 && ! side_effects_p (true_rtx))
4796 switch (true_code)
4798 case GT:
4799 case GE:
4800 return simplify_gen_unary (ABS, mode, true_rtx, mode);
4801 case LT:
4802 case LE:
4803 return
4804 simplify_gen_unary (NEG, mode,
4805 simplify_gen_unary (ABS, mode, true_rtx, mode),
4806 mode);
4807 default:
4808 break;
4811 /* Look for MIN or MAX. */
4813 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4814 && comparison_p
4815 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4816 && rtx_equal_p (XEXP (cond, 1), false_rtx)
4817 && ! side_effects_p (cond))
4818 switch (true_code)
4820 case GE:
4821 case GT:
4822 return gen_binary (SMAX, mode, true_rtx, false_rtx);
4823 case LE:
4824 case LT:
4825 return gen_binary (SMIN, mode, true_rtx, false_rtx);
4826 case GEU:
4827 case GTU:
4828 return gen_binary (UMAX, mode, true_rtx, false_rtx);
4829 case LEU:
4830 case LTU:
4831 return gen_binary (UMIN, mode, true_rtx, false_rtx);
4832 default:
4833 break;
4836 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4837 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4838 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4839 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4840 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4841 neither 1 or -1, but it isn't worth checking for. */
4843 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4844 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4846 rtx t = make_compound_operation (true_rtx, SET);
4847 rtx f = make_compound_operation (false_rtx, SET);
4848 rtx cond_op0 = XEXP (cond, 0);
4849 rtx cond_op1 = XEXP (cond, 1);
4850 enum rtx_code op = NIL, extend_op = NIL;
4851 enum machine_mode m = mode;
4852 rtx z = 0, c1 = NULL_RTX;
4854 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4855 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4856 || GET_CODE (t) == ASHIFT
4857 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4858 && rtx_equal_p (XEXP (t, 0), f))
4859 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4861 /* If an identity-zero op is commutative, check whether there
4862 would be a match if we swapped the operands. */
4863 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4864 || GET_CODE (t) == XOR)
4865 && rtx_equal_p (XEXP (t, 1), f))
4866 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4867 else if (GET_CODE (t) == SIGN_EXTEND
4868 && (GET_CODE (XEXP (t, 0)) == PLUS
4869 || GET_CODE (XEXP (t, 0)) == MINUS
4870 || GET_CODE (XEXP (t, 0)) == IOR
4871 || GET_CODE (XEXP (t, 0)) == XOR
4872 || GET_CODE (XEXP (t, 0)) == ASHIFT
4873 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4874 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4875 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4876 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4877 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4878 && (num_sign_bit_copies (f, GET_MODE (f))
4879 > (unsigned int)
4880 (GET_MODE_BITSIZE (mode)
4881 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4883 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4884 extend_op = SIGN_EXTEND;
4885 m = GET_MODE (XEXP (t, 0));
4887 else if (GET_CODE (t) == SIGN_EXTEND
4888 && (GET_CODE (XEXP (t, 0)) == PLUS
4889 || GET_CODE (XEXP (t, 0)) == IOR
4890 || GET_CODE (XEXP (t, 0)) == XOR)
4891 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4892 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4893 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4894 && (num_sign_bit_copies (f, GET_MODE (f))
4895 > (unsigned int)
4896 (GET_MODE_BITSIZE (mode)
4897 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4899 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4900 extend_op = SIGN_EXTEND;
4901 m = GET_MODE (XEXP (t, 0));
4903 else if (GET_CODE (t) == ZERO_EXTEND
4904 && (GET_CODE (XEXP (t, 0)) == PLUS
4905 || GET_CODE (XEXP (t, 0)) == MINUS
4906 || GET_CODE (XEXP (t, 0)) == IOR
4907 || GET_CODE (XEXP (t, 0)) == XOR
4908 || GET_CODE (XEXP (t, 0)) == ASHIFT
4909 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4910 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4911 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4912 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4913 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4914 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4915 && ((nonzero_bits (f, GET_MODE (f))
4916 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4917 == 0))
4919 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4920 extend_op = ZERO_EXTEND;
4921 m = GET_MODE (XEXP (t, 0));
4923 else if (GET_CODE (t) == ZERO_EXTEND
4924 && (GET_CODE (XEXP (t, 0)) == PLUS
4925 || GET_CODE (XEXP (t, 0)) == IOR
4926 || GET_CODE (XEXP (t, 0)) == XOR)
4927 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4928 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4929 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4930 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4931 && ((nonzero_bits (f, GET_MODE (f))
4932 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4933 == 0))
4935 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4936 extend_op = ZERO_EXTEND;
4937 m = GET_MODE (XEXP (t, 0));
4940 if (z)
4942 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4943 pc_rtx, pc_rtx, 0, 0);
4944 temp = gen_binary (MULT, m, temp,
4945 gen_binary (MULT, m, c1, const_true_rtx));
4946 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4947 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4949 if (extend_op != NIL)
4950 temp = simplify_gen_unary (extend_op, mode, temp, m);
4952 return temp;
4956 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4957 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4958 negation of a single bit, we can convert this operation to a shift. We
4959 can actually do this more generally, but it doesn't seem worth it. */
4961 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4962 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4963 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4964 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4965 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4966 == GET_MODE_BITSIZE (mode))
4967 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4968 return
4969 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4970 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4972 return x;
4975 /* Simplify X, a SET expression. Return the new expression. */
4977 static rtx
4978 simplify_set (x)
4979 rtx x;
4981 rtx src = SET_SRC (x);
4982 rtx dest = SET_DEST (x);
4983 enum machine_mode mode
4984 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4985 rtx other_insn;
4986 rtx *cc_use;
4988 /* (set (pc) (return)) gets written as (return). */
4989 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4990 return src;
4992 /* Now that we know for sure which bits of SRC we are using, see if we can
4993 simplify the expression for the object knowing that we only need the
4994 low-order bits. */
4996 if (GET_MODE_CLASS (mode) == MODE_INT)
4998 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4999 SUBST (SET_SRC (x), src);
5002 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5003 the comparison result and try to simplify it unless we already have used
5004 undobuf.other_insn. */
5005 if ((GET_CODE (src) == COMPARE
5006 #ifdef HAVE_cc0
5007 || dest == cc0_rtx
5008 #endif
5010 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5011 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5012 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
5013 && rtx_equal_p (XEXP (*cc_use, 0), dest))
5015 enum rtx_code old_code = GET_CODE (*cc_use);
5016 enum rtx_code new_code;
5017 rtx op0, op1;
5018 int other_changed = 0;
5019 enum machine_mode compare_mode = GET_MODE (dest);
5021 if (GET_CODE (src) == COMPARE)
5022 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5023 else
5024 op0 = src, op1 = const0_rtx;
5026 /* Simplify our comparison, if possible. */
5027 new_code = simplify_comparison (old_code, &op0, &op1);
5029 #ifdef EXTRA_CC_MODES
5030 /* If this machine has CC modes other than CCmode, check to see if we
5031 need to use a different CC mode here. */
5032 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5033 #endif /* EXTRA_CC_MODES */
5035 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
5036 /* If the mode changed, we have to change SET_DEST, the mode in the
5037 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5038 a hard register, just build new versions with the proper mode. If it
5039 is a pseudo, we lose unless it is only time we set the pseudo, in
5040 which case we can safely change its mode. */
5041 if (compare_mode != GET_MODE (dest))
5043 unsigned int regno = REGNO (dest);
5044 rtx new_dest = gen_rtx_REG (compare_mode, regno);
5046 if (regno < FIRST_PSEUDO_REGISTER
5047 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5049 if (regno >= FIRST_PSEUDO_REGISTER)
5050 SUBST (regno_reg_rtx[regno], new_dest);
5052 SUBST (SET_DEST (x), new_dest);
5053 SUBST (XEXP (*cc_use, 0), new_dest);
5054 other_changed = 1;
5056 dest = new_dest;
5059 #endif
5061 /* If the code changed, we have to build a new comparison in
5062 undobuf.other_insn. */
5063 if (new_code != old_code)
5065 unsigned HOST_WIDE_INT mask;
5067 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5068 dest, const0_rtx));
5070 /* If the only change we made was to change an EQ into an NE or
5071 vice versa, OP0 has only one bit that might be nonzero, and OP1
5072 is zero, check if changing the user of the condition code will
5073 produce a valid insn. If it won't, we can keep the original code
5074 in that insn by surrounding our operation with an XOR. */
5076 if (((old_code == NE && new_code == EQ)
5077 || (old_code == EQ && new_code == NE))
5078 && ! other_changed && op1 == const0_rtx
5079 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5080 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5082 rtx pat = PATTERN (other_insn), note = 0;
5084 if ((recog_for_combine (&pat, other_insn, &note) < 0
5085 && ! check_asm_operands (pat)))
5087 PUT_CODE (*cc_use, old_code);
5088 other_insn = 0;
5090 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5094 other_changed = 1;
5097 if (other_changed)
5098 undobuf.other_insn = other_insn;
5100 #ifdef HAVE_cc0
5101 /* If we are now comparing against zero, change our source if
5102 needed. If we do not use cc0, we always have a COMPARE. */
5103 if (op1 == const0_rtx && dest == cc0_rtx)
5105 SUBST (SET_SRC (x), op0);
5106 src = op0;
5108 else
5109 #endif
5111 /* Otherwise, if we didn't previously have a COMPARE in the
5112 correct mode, we need one. */
5113 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5115 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5116 src = SET_SRC (x);
5118 else
5120 /* Otherwise, update the COMPARE if needed. */
5121 SUBST (XEXP (src, 0), op0);
5122 SUBST (XEXP (src, 1), op1);
5125 else
5127 /* Get SET_SRC in a form where we have placed back any
5128 compound expressions. Then do the checks below. */
5129 src = make_compound_operation (src, SET);
5130 SUBST (SET_SRC (x), src);
5133 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5134 and X being a REG or (subreg (reg)), we may be able to convert this to
5135 (set (subreg:m2 x) (op)).
5137 We can always do this if M1 is narrower than M2 because that means that
5138 we only care about the low bits of the result.
5140 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5141 perform a narrower operation than requested since the high-order bits will
5142 be undefined. On machine where it is defined, this transformation is safe
5143 as long as M1 and M2 have the same number of words. */
5145 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5146 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5147 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5148 / UNITS_PER_WORD)
5149 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5150 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5151 #ifndef WORD_REGISTER_OPERATIONS
5152 && (GET_MODE_SIZE (GET_MODE (src))
5153 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5154 #endif
5155 #ifdef CLASS_CANNOT_CHANGE_MODE
5156 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5157 && (TEST_HARD_REG_BIT
5158 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
5159 REGNO (dest)))
5160 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5161 GET_MODE (SUBREG_REG (src))))
5162 #endif
5163 && (GET_CODE (dest) == REG
5164 || (GET_CODE (dest) == SUBREG
5165 && GET_CODE (SUBREG_REG (dest)) == REG)))
5167 SUBST (SET_DEST (x),
5168 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5169 dest));
5170 SUBST (SET_SRC (x), SUBREG_REG (src));
5172 src = SET_SRC (x), dest = SET_DEST (x);
5175 #ifdef HAVE_cc0
5176 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5177 in SRC. */
5178 if (dest == cc0_rtx
5179 && GET_CODE (src) == SUBREG
5180 && subreg_lowpart_p (src)
5181 && (GET_MODE_BITSIZE (GET_MODE (src))
5182 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5184 rtx inner = SUBREG_REG (src);
5185 enum machine_mode inner_mode = GET_MODE (inner);
5187 /* Here we make sure that we don't have a sign bit on. */
5188 if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5189 && (nonzero_bits (inner, inner_mode)
5190 < ((unsigned HOST_WIDE_INT) 1
5191 << (GET_MODE_BITSIZE (inner_mode) - 1))))
5193 SUBST (SET_SRC (x), inner);
5194 src = SET_SRC (x);
5197 #endif
5199 #ifdef LOAD_EXTEND_OP
5200 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5201 would require a paradoxical subreg. Replace the subreg with a
5202 zero_extend to avoid the reload that would otherwise be required. */
5204 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5205 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5206 && SUBREG_BYTE (src) == 0
5207 && (GET_MODE_SIZE (GET_MODE (src))
5208 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5209 && GET_CODE (SUBREG_REG (src)) == MEM)
5211 SUBST (SET_SRC (x),
5212 gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5213 GET_MODE (src), SUBREG_REG (src)));
5215 src = SET_SRC (x);
5217 #endif
5219 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5220 are comparing an item known to be 0 or -1 against 0, use a logical
5221 operation instead. Check for one of the arms being an IOR of the other
5222 arm with some value. We compute three terms to be IOR'ed together. In
5223 practice, at most two will be nonzero. Then we do the IOR's. */
5225 if (GET_CODE (dest) != PC
5226 && GET_CODE (src) == IF_THEN_ELSE
5227 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5228 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5229 && XEXP (XEXP (src, 0), 1) == const0_rtx
5230 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5231 #ifdef HAVE_conditional_move
5232 && ! can_conditionally_move_p (GET_MODE (src))
5233 #endif
5234 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5235 GET_MODE (XEXP (XEXP (src, 0), 0)))
5236 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5237 && ! side_effects_p (src))
5239 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5240 ? XEXP (src, 1) : XEXP (src, 2));
5241 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5242 ? XEXP (src, 2) : XEXP (src, 1));
5243 rtx term1 = const0_rtx, term2, term3;
5245 if (GET_CODE (true_rtx) == IOR
5246 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5247 term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5248 else if (GET_CODE (true_rtx) == IOR
5249 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5250 term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5251 else if (GET_CODE (false_rtx) == IOR
5252 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5253 term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5254 else if (GET_CODE (false_rtx) == IOR
5255 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5256 term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5258 term2 = gen_binary (AND, GET_MODE (src),
5259 XEXP (XEXP (src, 0), 0), true_rtx);
5260 term3 = gen_binary (AND, GET_MODE (src),
5261 simplify_gen_unary (NOT, GET_MODE (src),
5262 XEXP (XEXP (src, 0), 0),
5263 GET_MODE (src)),
5264 false_rtx);
5266 SUBST (SET_SRC (x),
5267 gen_binary (IOR, GET_MODE (src),
5268 gen_binary (IOR, GET_MODE (src), term1, term2),
5269 term3));
5271 src = SET_SRC (x);
5274 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5275 whole thing fail. */
5276 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5277 return src;
5278 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5279 return dest;
5280 else
5281 /* Convert this into a field assignment operation, if possible. */
5282 return make_field_assignment (x);
5285 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5286 result. LAST is nonzero if this is the last retry. */
5288 static rtx
5289 simplify_logical (x, last)
5290 rtx x;
5291 int last;
5293 enum machine_mode mode = GET_MODE (x);
5294 rtx op0 = XEXP (x, 0);
5295 rtx op1 = XEXP (x, 1);
5296 rtx reversed;
5298 switch (GET_CODE (x))
5300 case AND:
5301 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5302 insn (and may simplify more). */
5303 if (GET_CODE (op0) == XOR
5304 && rtx_equal_p (XEXP (op0, 0), op1)
5305 && ! side_effects_p (op1))
5306 x = gen_binary (AND, mode,
5307 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5308 op1);
5310 if (GET_CODE (op0) == XOR
5311 && rtx_equal_p (XEXP (op0, 1), op1)
5312 && ! side_effects_p (op1))
5313 x = gen_binary (AND, mode,
5314 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5315 op1);
5317 /* Similarly for (~(A ^ B)) & A. */
5318 if (GET_CODE (op0) == NOT
5319 && GET_CODE (XEXP (op0, 0)) == XOR
5320 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5321 && ! side_effects_p (op1))
5322 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5324 if (GET_CODE (op0) == NOT
5325 && GET_CODE (XEXP (op0, 0)) == XOR
5326 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5327 && ! side_effects_p (op1))
5328 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5330 /* We can call simplify_and_const_int only if we don't lose
5331 any (sign) bits when converting INTVAL (op1) to
5332 "unsigned HOST_WIDE_INT". */
5333 if (GET_CODE (op1) == CONST_INT
5334 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5335 || INTVAL (op1) > 0))
5337 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5339 /* If we have (ior (and (X C1) C2)) and the next restart would be
5340 the last, simplify this by making C1 as small as possible
5341 and then exit. */
5342 if (last
5343 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5344 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5345 && GET_CODE (op1) == CONST_INT)
5346 return gen_binary (IOR, mode,
5347 gen_binary (AND, mode, XEXP (op0, 0),
5348 GEN_INT (INTVAL (XEXP (op0, 1))
5349 & ~INTVAL (op1))), op1);
5351 if (GET_CODE (x) != AND)
5352 return x;
5354 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5355 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5356 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5359 /* Convert (A | B) & A to A. */
5360 if (GET_CODE (op0) == IOR
5361 && (rtx_equal_p (XEXP (op0, 0), op1)
5362 || rtx_equal_p (XEXP (op0, 1), op1))
5363 && ! side_effects_p (XEXP (op0, 0))
5364 && ! side_effects_p (XEXP (op0, 1)))
5365 return op1;
5367 /* In the following group of tests (and those in case IOR below),
5368 we start with some combination of logical operations and apply
5369 the distributive law followed by the inverse distributive law.
5370 Most of the time, this results in no change. However, if some of
5371 the operands are the same or inverses of each other, simplifications
5372 will result.
5374 For example, (and (ior A B) (not B)) can occur as the result of
5375 expanding a bit field assignment. When we apply the distributive
5376 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5377 which then simplifies to (and (A (not B))).
5379 If we have (and (ior A B) C), apply the distributive law and then
5380 the inverse distributive law to see if things simplify. */
5382 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5384 x = apply_distributive_law
5385 (gen_binary (GET_CODE (op0), mode,
5386 gen_binary (AND, mode, XEXP (op0, 0), op1),
5387 gen_binary (AND, mode, XEXP (op0, 1),
5388 copy_rtx (op1))));
5389 if (GET_CODE (x) != AND)
5390 return x;
5393 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5394 return apply_distributive_law
5395 (gen_binary (GET_CODE (op1), mode,
5396 gen_binary (AND, mode, XEXP (op1, 0), op0),
5397 gen_binary (AND, mode, XEXP (op1, 1),
5398 copy_rtx (op0))));
5400 /* Similarly, taking advantage of the fact that
5401 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5403 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5404 return apply_distributive_law
5405 (gen_binary (XOR, mode,
5406 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5407 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5408 XEXP (op1, 1))));
5410 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5411 return apply_distributive_law
5412 (gen_binary (XOR, mode,
5413 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5414 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5415 break;
5417 case IOR:
5418 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
5419 if (GET_CODE (op1) == CONST_INT
5420 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5421 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5422 return op1;
5424 /* Convert (A & B) | A to A. */
5425 if (GET_CODE (op0) == AND
5426 && (rtx_equal_p (XEXP (op0, 0), op1)
5427 || rtx_equal_p (XEXP (op0, 1), op1))
5428 && ! side_effects_p (XEXP (op0, 0))
5429 && ! side_effects_p (XEXP (op0, 1)))
5430 return op1;
5432 /* If we have (ior (and A B) C), apply the distributive law and then
5433 the inverse distributive law to see if things simplify. */
5435 if (GET_CODE (op0) == AND)
5437 x = apply_distributive_law
5438 (gen_binary (AND, mode,
5439 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5440 gen_binary (IOR, mode, XEXP (op0, 1),
5441 copy_rtx (op1))));
5443 if (GET_CODE (x) != IOR)
5444 return x;
5447 if (GET_CODE (op1) == AND)
5449 x = apply_distributive_law
5450 (gen_binary (AND, mode,
5451 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5452 gen_binary (IOR, mode, XEXP (op1, 1),
5453 copy_rtx (op0))));
5455 if (GET_CODE (x) != IOR)
5456 return x;
5459 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5460 mode size to (rotate A CX). */
5462 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5463 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5464 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5465 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5466 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5467 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5468 == GET_MODE_BITSIZE (mode)))
5469 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5470 (GET_CODE (op0) == ASHIFT
5471 ? XEXP (op0, 1) : XEXP (op1, 1)));
5473 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5474 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5475 does not affect any of the bits in OP1, it can really be done
5476 as a PLUS and we can associate. We do this by seeing if OP1
5477 can be safely shifted left C bits. */
5478 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5479 && GET_CODE (XEXP (op0, 0)) == PLUS
5480 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5481 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5482 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5484 int count = INTVAL (XEXP (op0, 1));
5485 HOST_WIDE_INT mask = INTVAL (op1) << count;
5487 if (mask >> count == INTVAL (op1)
5488 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5490 SUBST (XEXP (XEXP (op0, 0), 1),
5491 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5492 return op0;
5495 break;
5497 case XOR:
5498 /* If we are XORing two things that have no bits in common,
5499 convert them into an IOR. This helps to detect rotation encoded
5500 using those methods and possibly other simplifications. */
5502 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5503 && (nonzero_bits (op0, mode)
5504 & nonzero_bits (op1, mode)) == 0)
5505 return (gen_binary (IOR, mode, op0, op1));
5507 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5508 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5509 (NOT y). */
5511 int num_negated = 0;
5513 if (GET_CODE (op0) == NOT)
5514 num_negated++, op0 = XEXP (op0, 0);
5515 if (GET_CODE (op1) == NOT)
5516 num_negated++, op1 = XEXP (op1, 0);
5518 if (num_negated == 2)
5520 SUBST (XEXP (x, 0), op0);
5521 SUBST (XEXP (x, 1), op1);
5523 else if (num_negated == 1)
5524 return
5525 simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5526 mode);
5529 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5530 correspond to a machine insn or result in further simplifications
5531 if B is a constant. */
5533 if (GET_CODE (op0) == AND
5534 && rtx_equal_p (XEXP (op0, 1), op1)
5535 && ! side_effects_p (op1))
5536 return gen_binary (AND, mode,
5537 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5538 op1);
5540 else if (GET_CODE (op0) == AND
5541 && rtx_equal_p (XEXP (op0, 0), op1)
5542 && ! side_effects_p (op1))
5543 return gen_binary (AND, mode,
5544 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5545 op1);
5547 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5548 comparison if STORE_FLAG_VALUE is 1. */
5549 if (STORE_FLAG_VALUE == 1
5550 && op1 == const1_rtx
5551 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5552 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5553 XEXP (op0, 1))))
5554 return reversed;
5556 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5557 is (lt foo (const_int 0)), so we can perform the above
5558 simplification if STORE_FLAG_VALUE is 1. */
5560 if (STORE_FLAG_VALUE == 1
5561 && op1 == const1_rtx
5562 && GET_CODE (op0) == LSHIFTRT
5563 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5564 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5565 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5567 /* (xor (comparison foo bar) (const_int sign-bit))
5568 when STORE_FLAG_VALUE is the sign bit. */
5569 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5570 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5571 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5572 && op1 == const_true_rtx
5573 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5574 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5575 XEXP (op0, 1))))
5576 return reversed;
5578 break;
5580 default:
5581 abort ();
5584 return x;
5587 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5588 operations" because they can be replaced with two more basic operations.
5589 ZERO_EXTEND is also considered "compound" because it can be replaced with
5590 an AND operation, which is simpler, though only one operation.
5592 The function expand_compound_operation is called with an rtx expression
5593 and will convert it to the appropriate shifts and AND operations,
5594 simplifying at each stage.
5596 The function make_compound_operation is called to convert an expression
5597 consisting of shifts and ANDs into the equivalent compound expression.
5598 It is the inverse of this function, loosely speaking. */
5600 static rtx
5601 expand_compound_operation (x)
5602 rtx x;
5604 unsigned HOST_WIDE_INT pos = 0, len;
5605 int unsignedp = 0;
5606 unsigned int modewidth;
5607 rtx tem;
5609 switch (GET_CODE (x))
5611 case ZERO_EXTEND:
5612 unsignedp = 1;
5613 case SIGN_EXTEND:
5614 /* We can't necessarily use a const_int for a multiword mode;
5615 it depends on implicitly extending the value.
5616 Since we don't know the right way to extend it,
5617 we can't tell whether the implicit way is right.
5619 Even for a mode that is no wider than a const_int,
5620 we can't win, because we need to sign extend one of its bits through
5621 the rest of it, and we don't know which bit. */
5622 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5623 return x;
5625 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5626 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5627 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5628 reloaded. If not for that, MEM's would very rarely be safe.
5630 Reject MODEs bigger than a word, because we might not be able
5631 to reference a two-register group starting with an arbitrary register
5632 (and currently gen_lowpart might crash for a SUBREG). */
5634 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5635 return x;
5637 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5638 /* If the inner object has VOIDmode (the only way this can happen
5639 is if it is an ASM_OPERANDS), we can't do anything since we don't
5640 know how much masking to do. */
5641 if (len == 0)
5642 return x;
5644 break;
5646 case ZERO_EXTRACT:
5647 unsignedp = 1;
5648 case SIGN_EXTRACT:
5649 /* If the operand is a CLOBBER, just return it. */
5650 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5651 return XEXP (x, 0);
5653 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5654 || GET_CODE (XEXP (x, 2)) != CONST_INT
5655 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5656 return x;
5658 len = INTVAL (XEXP (x, 1));
5659 pos = INTVAL (XEXP (x, 2));
5661 /* If this goes outside the object being extracted, replace the object
5662 with a (use (mem ...)) construct that only combine understands
5663 and is used only for this purpose. */
5664 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5665 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5667 if (BITS_BIG_ENDIAN)
5668 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5670 break;
5672 default:
5673 return x;
5675 /* Convert sign extension to zero extension, if we know that the high
5676 bit is not set, as this is easier to optimize. It will be converted
5677 back to cheaper alternative in make_extraction. */
5678 if (GET_CODE (x) == SIGN_EXTEND
5679 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5680 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5681 & ~(((unsigned HOST_WIDE_INT)
5682 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5683 >> 1))
5684 == 0)))
5686 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5687 return expand_compound_operation (temp);
5690 /* We can optimize some special cases of ZERO_EXTEND. */
5691 if (GET_CODE (x) == ZERO_EXTEND)
5693 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5694 know that the last value didn't have any inappropriate bits
5695 set. */
5696 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5697 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5698 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5699 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5700 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5701 return XEXP (XEXP (x, 0), 0);
5703 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5704 if (GET_CODE (XEXP (x, 0)) == SUBREG
5705 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5706 && subreg_lowpart_p (XEXP (x, 0))
5707 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5708 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5709 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5710 return SUBREG_REG (XEXP (x, 0));
5712 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5713 is a comparison and STORE_FLAG_VALUE permits. This is like
5714 the first case, but it works even when GET_MODE (x) is larger
5715 than HOST_WIDE_INT. */
5716 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5717 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5718 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5719 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5720 <= HOST_BITS_PER_WIDE_INT)
5721 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5722 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5723 return XEXP (XEXP (x, 0), 0);
5725 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5726 if (GET_CODE (XEXP (x, 0)) == SUBREG
5727 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5728 && subreg_lowpart_p (XEXP (x, 0))
5729 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5730 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5731 <= HOST_BITS_PER_WIDE_INT)
5732 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5733 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5734 return SUBREG_REG (XEXP (x, 0));
5738 /* If we reach here, we want to return a pair of shifts. The inner
5739 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5740 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5741 logical depending on the value of UNSIGNEDP.
5743 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5744 converted into an AND of a shift.
5746 We must check for the case where the left shift would have a negative
5747 count. This can happen in a case like (x >> 31) & 255 on machines
5748 that can't shift by a constant. On those machines, we would first
5749 combine the shift with the AND to produce a variable-position
5750 extraction. Then the constant of 31 would be substituted in to produce
5751 a such a position. */
5753 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5754 if (modewidth + len >= pos)
5755 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5756 GET_MODE (x),
5757 simplify_shift_const (NULL_RTX, ASHIFT,
5758 GET_MODE (x),
5759 XEXP (x, 0),
5760 modewidth - pos - len),
5761 modewidth - len);
5763 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5764 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5765 simplify_shift_const (NULL_RTX, LSHIFTRT,
5766 GET_MODE (x),
5767 XEXP (x, 0), pos),
5768 ((HOST_WIDE_INT) 1 << len) - 1);
5769 else
5770 /* Any other cases we can't handle. */
5771 return x;
5773 /* If we couldn't do this for some reason, return the original
5774 expression. */
5775 if (GET_CODE (tem) == CLOBBER)
5776 return x;
5778 return tem;
5781 /* X is a SET which contains an assignment of one object into
5782 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5783 or certain SUBREGS). If possible, convert it into a series of
5784 logical operations.
5786 We half-heartedly support variable positions, but do not at all
5787 support variable lengths. */
5789 static rtx
5790 expand_field_assignment (x)
5791 rtx x;
5793 rtx inner;
5794 rtx pos; /* Always counts from low bit. */
5795 int len;
5796 rtx mask;
5797 enum machine_mode compute_mode;
5799 /* Loop until we find something we can't simplify. */
5800 while (1)
5802 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5803 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5805 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5806 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5807 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5809 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5810 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5812 inner = XEXP (SET_DEST (x), 0);
5813 len = INTVAL (XEXP (SET_DEST (x), 1));
5814 pos = XEXP (SET_DEST (x), 2);
5816 /* If the position is constant and spans the width of INNER,
5817 surround INNER with a USE to indicate this. */
5818 if (GET_CODE (pos) == CONST_INT
5819 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5820 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5822 if (BITS_BIG_ENDIAN)
5824 if (GET_CODE (pos) == CONST_INT)
5825 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5826 - INTVAL (pos));
5827 else if (GET_CODE (pos) == MINUS
5828 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5829 && (INTVAL (XEXP (pos, 1))
5830 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5831 /* If position is ADJUST - X, new position is X. */
5832 pos = XEXP (pos, 0);
5833 else
5834 pos = gen_binary (MINUS, GET_MODE (pos),
5835 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5836 - len),
5837 pos);
5841 /* A SUBREG between two modes that occupy the same numbers of words
5842 can be done by moving the SUBREG to the source. */
5843 else if (GET_CODE (SET_DEST (x)) == SUBREG
5844 /* We need SUBREGs to compute nonzero_bits properly. */
5845 && nonzero_sign_valid
5846 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5847 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5848 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5849 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5851 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5852 gen_lowpart_for_combine
5853 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5854 SET_SRC (x)));
5855 continue;
5857 else
5858 break;
5860 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5861 inner = SUBREG_REG (inner);
5863 compute_mode = GET_MODE (inner);
5865 /* Don't attempt bitwise arithmetic on non-integral modes. */
5866 if (! INTEGRAL_MODE_P (compute_mode))
5868 enum machine_mode imode;
5870 /* Something is probably seriously wrong if this matches. */
5871 if (! FLOAT_MODE_P (compute_mode))
5872 break;
5874 /* Try to find an integral mode to pun with. */
5875 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5876 if (imode == BLKmode)
5877 break;
5879 compute_mode = imode;
5880 inner = gen_lowpart_for_combine (imode, inner);
5883 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5884 if (len < HOST_BITS_PER_WIDE_INT)
5885 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5886 else
5887 break;
5889 /* Now compute the equivalent expression. Make a copy of INNER
5890 for the SET_DEST in case it is a MEM into which we will substitute;
5891 we don't want shared RTL in that case. */
5892 x = gen_rtx_SET
5893 (VOIDmode, copy_rtx (inner),
5894 gen_binary (IOR, compute_mode,
5895 gen_binary (AND, compute_mode,
5896 simplify_gen_unary (NOT, compute_mode,
5897 gen_binary (ASHIFT,
5898 compute_mode,
5899 mask, pos),
5900 compute_mode),
5901 inner),
5902 gen_binary (ASHIFT, compute_mode,
5903 gen_binary (AND, compute_mode,
5904 gen_lowpart_for_combine
5905 (compute_mode, SET_SRC (x)),
5906 mask),
5907 pos)));
5910 return x;
5913 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5914 it is an RTX that represents a variable starting position; otherwise,
5915 POS is the (constant) starting bit position (counted from the LSB).
5917 INNER may be a USE. This will occur when we started with a bitfield
5918 that went outside the boundary of the object in memory, which is
5919 allowed on most machines. To isolate this case, we produce a USE
5920 whose mode is wide enough and surround the MEM with it. The only
5921 code that understands the USE is this routine. If it is not removed,
5922 it will cause the resulting insn not to match.
5924 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5925 signed reference.
5927 IN_DEST is non-zero if this is a reference in the destination of a
5928 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5929 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5930 be used.
5932 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5933 ZERO_EXTRACT should be built even for bits starting at bit 0.
5935 MODE is the desired mode of the result (if IN_DEST == 0).
5937 The result is an RTX for the extraction or NULL_RTX if the target
5938 can't handle it. */
5940 static rtx
5941 make_extraction (mode, inner, pos, pos_rtx, len,
5942 unsignedp, in_dest, in_compare)
5943 enum machine_mode mode;
5944 rtx inner;
5945 HOST_WIDE_INT pos;
5946 rtx pos_rtx;
5947 unsigned HOST_WIDE_INT len;
5948 int unsignedp;
5949 int in_dest, in_compare;
5951 /* This mode describes the size of the storage area
5952 to fetch the overall value from. Within that, we
5953 ignore the POS lowest bits, etc. */
5954 enum machine_mode is_mode = GET_MODE (inner);
5955 enum machine_mode inner_mode;
5956 enum machine_mode wanted_inner_mode = byte_mode;
5957 enum machine_mode wanted_inner_reg_mode = word_mode;
5958 enum machine_mode pos_mode = word_mode;
5959 enum machine_mode extraction_mode = word_mode;
5960 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5961 int spans_byte = 0;
5962 rtx new = 0;
5963 rtx orig_pos_rtx = pos_rtx;
5964 HOST_WIDE_INT orig_pos;
5966 /* Get some information about INNER and get the innermost object. */
5967 if (GET_CODE (inner) == USE)
5968 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5969 /* We don't need to adjust the position because we set up the USE
5970 to pretend that it was a full-word object. */
5971 spans_byte = 1, inner = XEXP (inner, 0);
5972 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5974 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5975 consider just the QI as the memory to extract from.
5976 The subreg adds or removes high bits; its mode is
5977 irrelevant to the meaning of this extraction,
5978 since POS and LEN count from the lsb. */
5979 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5980 is_mode = GET_MODE (SUBREG_REG (inner));
5981 inner = SUBREG_REG (inner);
5983 else if (GET_CODE (inner) == ASHIFT
5984 && GET_CODE (XEXP (inner, 1)) == CONST_INT
5985 && pos_rtx == 0 && pos == 0
5986 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
5988 /* We're extracting the least significant bits of an rtx
5989 (ashift X (const_int C)), where LEN > C. Extract the
5990 least significant (LEN - C) bits of X, giving an rtx
5991 whose mode is MODE, then shift it left C times. */
5992 new = make_extraction (mode, XEXP (inner, 0),
5993 0, 0, len - INTVAL (XEXP (inner, 1)),
5994 unsignedp, in_dest, in_compare);
5995 if (new != 0)
5996 return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
5999 inner_mode = GET_MODE (inner);
6001 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6002 pos = INTVAL (pos_rtx), pos_rtx = 0;
6004 /* See if this can be done without an extraction. We never can if the
6005 width of the field is not the same as that of some integer mode. For
6006 registers, we can only avoid the extraction if the position is at the
6007 low-order bit and this is either not in the destination or we have the
6008 appropriate STRICT_LOW_PART operation available.
6010 For MEM, we can avoid an extract if the field starts on an appropriate
6011 boundary and we can change the mode of the memory reference. However,
6012 we cannot directly access the MEM if we have a USE and the underlying
6013 MEM is not TMODE. This combination means that MEM was being used in a
6014 context where bits outside its mode were being referenced; that is only
6015 valid in bit-field insns. */
6017 if (tmode != BLKmode
6018 && ! (spans_byte && inner_mode != tmode)
6019 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6020 && GET_CODE (inner) != MEM
6021 && (! in_dest
6022 || (GET_CODE (inner) == REG
6023 && have_insn_for (STRICT_LOW_PART, tmode))))
6024 || (GET_CODE (inner) == MEM && pos_rtx == 0
6025 && (pos
6026 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6027 : BITS_PER_UNIT)) == 0
6028 /* We can't do this if we are widening INNER_MODE (it
6029 may not be aligned, for one thing). */
6030 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6031 && (inner_mode == tmode
6032 || (! mode_dependent_address_p (XEXP (inner, 0))
6033 && ! MEM_VOLATILE_P (inner))))))
6035 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6036 field. If the original and current mode are the same, we need not
6037 adjust the offset. Otherwise, we do if bytes big endian.
6039 If INNER is not a MEM, get a piece consisting of just the field
6040 of interest (in this case POS % BITS_PER_WORD must be 0). */
6042 if (GET_CODE (inner) == MEM)
6044 HOST_WIDE_INT offset;
6046 /* POS counts from lsb, but make OFFSET count in memory order. */
6047 if (BYTES_BIG_ENDIAN)
6048 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6049 else
6050 offset = pos / BITS_PER_UNIT;
6052 new = adjust_address_nv (inner, tmode, offset);
6054 else if (GET_CODE (inner) == REG)
6056 /* We can't call gen_lowpart_for_combine here since we always want
6057 a SUBREG and it would sometimes return a new hard register. */
6058 if (tmode != inner_mode)
6060 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6062 if (WORDS_BIG_ENDIAN
6063 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6064 final_word = ((GET_MODE_SIZE (inner_mode)
6065 - GET_MODE_SIZE (tmode))
6066 / UNITS_PER_WORD) - final_word;
6068 final_word *= UNITS_PER_WORD;
6069 if (BYTES_BIG_ENDIAN &&
6070 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6071 final_word += (GET_MODE_SIZE (inner_mode)
6072 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6074 new = gen_rtx_SUBREG (tmode, inner, final_word);
6076 else
6077 new = inner;
6079 else
6080 new = force_to_mode (inner, tmode,
6081 len >= HOST_BITS_PER_WIDE_INT
6082 ? ~(unsigned HOST_WIDE_INT) 0
6083 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6084 NULL_RTX, 0);
6086 /* If this extraction is going into the destination of a SET,
6087 make a STRICT_LOW_PART unless we made a MEM. */
6089 if (in_dest)
6090 return (GET_CODE (new) == MEM ? new
6091 : (GET_CODE (new) != SUBREG
6092 ? gen_rtx_CLOBBER (tmode, const0_rtx)
6093 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6095 if (mode == tmode)
6096 return new;
6098 if (GET_CODE (new) == CONST_INT)
6099 return gen_int_mode (INTVAL (new), mode);
6101 /* If we know that no extraneous bits are set, and that the high
6102 bit is not set, convert the extraction to the cheaper of
6103 sign and zero extension, that are equivalent in these cases. */
6104 if (flag_expensive_optimizations
6105 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6106 && ((nonzero_bits (new, tmode)
6107 & ~(((unsigned HOST_WIDE_INT)
6108 GET_MODE_MASK (tmode))
6109 >> 1))
6110 == 0)))
6112 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6113 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6115 /* Prefer ZERO_EXTENSION, since it gives more information to
6116 backends. */
6117 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6118 return temp;
6119 return temp1;
6122 /* Otherwise, sign- or zero-extend unless we already are in the
6123 proper mode. */
6125 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6126 mode, new));
6129 /* Unless this is a COMPARE or we have a funny memory reference,
6130 don't do anything with zero-extending field extracts starting at
6131 the low-order bit since they are simple AND operations. */
6132 if (pos_rtx == 0 && pos == 0 && ! in_dest
6133 && ! in_compare && ! spans_byte && unsignedp)
6134 return 0;
6136 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6137 we would be spanning bytes or if the position is not a constant and the
6138 length is not 1. In all other cases, we would only be going outside
6139 our object in cases when an original shift would have been
6140 undefined. */
6141 if (! spans_byte && GET_CODE (inner) == MEM
6142 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6143 || (pos_rtx != 0 && len != 1)))
6144 return 0;
6146 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6147 and the mode for the result. */
6148 if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6150 wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6151 pos_mode = mode_for_extraction (EP_insv, 2);
6152 extraction_mode = mode_for_extraction (EP_insv, 3);
6155 if (! in_dest && unsignedp
6156 && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6158 wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6159 pos_mode = mode_for_extraction (EP_extzv, 3);
6160 extraction_mode = mode_for_extraction (EP_extzv, 0);
6163 if (! in_dest && ! unsignedp
6164 && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6166 wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6167 pos_mode = mode_for_extraction (EP_extv, 3);
6168 extraction_mode = mode_for_extraction (EP_extv, 0);
6171 /* Never narrow an object, since that might not be safe. */
6173 if (mode != VOIDmode
6174 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6175 extraction_mode = mode;
6177 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6178 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6179 pos_mode = GET_MODE (pos_rtx);
6181 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6182 if we have to change the mode of memory and cannot, the desired mode is
6183 EXTRACTION_MODE. */
6184 if (GET_CODE (inner) != MEM)
6185 wanted_inner_mode = wanted_inner_reg_mode;
6186 else if (inner_mode != wanted_inner_mode
6187 && (mode_dependent_address_p (XEXP (inner, 0))
6188 || MEM_VOLATILE_P (inner)))
6189 wanted_inner_mode = extraction_mode;
6191 orig_pos = pos;
6193 if (BITS_BIG_ENDIAN)
6195 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6196 BITS_BIG_ENDIAN style. If position is constant, compute new
6197 position. Otherwise, build subtraction.
6198 Note that POS is relative to the mode of the original argument.
6199 If it's a MEM we need to recompute POS relative to that.
6200 However, if we're extracting from (or inserting into) a register,
6201 we want to recompute POS relative to wanted_inner_mode. */
6202 int width = (GET_CODE (inner) == MEM
6203 ? GET_MODE_BITSIZE (is_mode)
6204 : GET_MODE_BITSIZE (wanted_inner_mode));
6206 if (pos_rtx == 0)
6207 pos = width - len - pos;
6208 else
6209 pos_rtx
6210 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6211 /* POS may be less than 0 now, but we check for that below.
6212 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
6215 /* If INNER has a wider mode, make it smaller. If this is a constant
6216 extract, try to adjust the byte to point to the byte containing
6217 the value. */
6218 if (wanted_inner_mode != VOIDmode
6219 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6220 && ((GET_CODE (inner) == MEM
6221 && (inner_mode == wanted_inner_mode
6222 || (! mode_dependent_address_p (XEXP (inner, 0))
6223 && ! MEM_VOLATILE_P (inner))))))
6225 int offset = 0;
6227 /* The computations below will be correct if the machine is big
6228 endian in both bits and bytes or little endian in bits and bytes.
6229 If it is mixed, we must adjust. */
6231 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6232 adjust OFFSET to compensate. */
6233 if (BYTES_BIG_ENDIAN
6234 && ! spans_byte
6235 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6236 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6238 /* If this is a constant position, we can move to the desired byte. */
6239 if (pos_rtx == 0)
6241 offset += pos / BITS_PER_UNIT;
6242 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6245 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6246 && ! spans_byte
6247 && is_mode != wanted_inner_mode)
6248 offset = (GET_MODE_SIZE (is_mode)
6249 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6251 if (offset != 0 || inner_mode != wanted_inner_mode)
6252 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6255 /* If INNER is not memory, we can always get it into the proper mode. If we
6256 are changing its mode, POS must be a constant and smaller than the size
6257 of the new mode. */
6258 else if (GET_CODE (inner) != MEM)
6260 if (GET_MODE (inner) != wanted_inner_mode
6261 && (pos_rtx != 0
6262 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6263 return 0;
6265 inner = force_to_mode (inner, wanted_inner_mode,
6266 pos_rtx
6267 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6268 ? ~(unsigned HOST_WIDE_INT) 0
6269 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6270 << orig_pos),
6271 NULL_RTX, 0);
6274 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6275 have to zero extend. Otherwise, we can just use a SUBREG. */
6276 if (pos_rtx != 0
6277 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6279 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6281 /* If we know that no extraneous bits are set, and that the high
6282 bit is not set, convert extraction to cheaper one - either
6283 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6284 cases. */
6285 if (flag_expensive_optimizations
6286 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6287 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6288 & ~(((unsigned HOST_WIDE_INT)
6289 GET_MODE_MASK (GET_MODE (pos_rtx)))
6290 >> 1))
6291 == 0)))
6293 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6295 /* Prefer ZERO_EXTENSION, since it gives more information to
6296 backends. */
6297 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6298 temp = temp1;
6300 pos_rtx = temp;
6302 else if (pos_rtx != 0
6303 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6304 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6306 /* Make POS_RTX unless we already have it and it is correct. If we don't
6307 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6308 be a CONST_INT. */
6309 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6310 pos_rtx = orig_pos_rtx;
6312 else if (pos_rtx == 0)
6313 pos_rtx = GEN_INT (pos);
6315 /* Make the required operation. See if we can use existing rtx. */
6316 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6317 extraction_mode, inner, GEN_INT (len), pos_rtx);
6318 if (! in_dest)
6319 new = gen_lowpart_for_combine (mode, new);
6321 return new;
6324 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6325 with any other operations in X. Return X without that shift if so. */
6327 static rtx
6328 extract_left_shift (x, count)
6329 rtx x;
6330 int count;
6332 enum rtx_code code = GET_CODE (x);
6333 enum machine_mode mode = GET_MODE (x);
6334 rtx tem;
6336 switch (code)
6338 case ASHIFT:
6339 /* This is the shift itself. If it is wide enough, we will return
6340 either the value being shifted if the shift count is equal to
6341 COUNT or a shift for the difference. */
6342 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6343 && INTVAL (XEXP (x, 1)) >= count)
6344 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6345 INTVAL (XEXP (x, 1)) - count);
6346 break;
6348 case NEG: case NOT:
6349 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6350 return simplify_gen_unary (code, mode, tem, mode);
6352 break;
6354 case PLUS: case IOR: case XOR: case AND:
6355 /* If we can safely shift this constant and we find the inner shift,
6356 make a new operation. */
6357 if (GET_CODE (XEXP (x,1)) == CONST_INT
6358 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6359 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6360 return gen_binary (code, mode, tem,
6361 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6363 break;
6365 default:
6366 break;
6369 return 0;
6372 /* Look at the expression rooted at X. Look for expressions
6373 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6374 Form these expressions.
6376 Return the new rtx, usually just X.
6378 Also, for machines like the VAX that don't have logical shift insns,
6379 try to convert logical to arithmetic shift operations in cases where
6380 they are equivalent. This undoes the canonicalizations to logical
6381 shifts done elsewhere.
6383 We try, as much as possible, to re-use rtl expressions to save memory.
6385 IN_CODE says what kind of expression we are processing. Normally, it is
6386 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6387 being kludges), it is MEM. When processing the arguments of a comparison
6388 or a COMPARE against zero, it is COMPARE. */
6390 static rtx
6391 make_compound_operation (x, in_code)
6392 rtx x;
6393 enum rtx_code in_code;
6395 enum rtx_code code = GET_CODE (x);
6396 enum machine_mode mode = GET_MODE (x);
6397 int mode_width = GET_MODE_BITSIZE (mode);
6398 rtx rhs, lhs;
6399 enum rtx_code next_code;
6400 int i;
6401 rtx new = 0;
6402 rtx tem;
6403 const char *fmt;
6405 /* Select the code to be used in recursive calls. Once we are inside an
6406 address, we stay there. If we have a comparison, set to COMPARE,
6407 but once inside, go back to our default of SET. */
6409 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6410 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6411 && XEXP (x, 1) == const0_rtx) ? COMPARE
6412 : in_code == COMPARE ? SET : in_code);
6414 /* Process depending on the code of this operation. If NEW is set
6415 non-zero, it will be returned. */
6417 switch (code)
6419 case ASHIFT:
6420 /* Convert shifts by constants into multiplications if inside
6421 an address. */
6422 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6423 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6424 && INTVAL (XEXP (x, 1)) >= 0)
6426 new = make_compound_operation (XEXP (x, 0), next_code);
6427 new = gen_rtx_MULT (mode, new,
6428 GEN_INT ((HOST_WIDE_INT) 1
6429 << INTVAL (XEXP (x, 1))));
6431 break;
6433 case AND:
6434 /* If the second operand is not a constant, we can't do anything
6435 with it. */
6436 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6437 break;
6439 /* If the constant is a power of two minus one and the first operand
6440 is a logical right shift, make an extraction. */
6441 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6442 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6444 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6445 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6446 0, in_code == COMPARE);
6449 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6450 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6451 && subreg_lowpart_p (XEXP (x, 0))
6452 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6453 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6455 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6456 next_code);
6457 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6458 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6459 0, in_code == COMPARE);
6461 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6462 else if ((GET_CODE (XEXP (x, 0)) == XOR
6463 || GET_CODE (XEXP (x, 0)) == IOR)
6464 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6465 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6466 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6468 /* Apply the distributive law, and then try to make extractions. */
6469 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6470 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6471 XEXP (x, 1)),
6472 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6473 XEXP (x, 1)));
6474 new = make_compound_operation (new, in_code);
6477 /* If we are have (and (rotate X C) M) and C is larger than the number
6478 of bits in M, this is an extraction. */
6480 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6481 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6482 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6483 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6485 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6486 new = make_extraction (mode, new,
6487 (GET_MODE_BITSIZE (mode)
6488 - INTVAL (XEXP (XEXP (x, 0), 1))),
6489 NULL_RTX, i, 1, 0, in_code == COMPARE);
6492 /* On machines without logical shifts, if the operand of the AND is
6493 a logical shift and our mask turns off all the propagated sign
6494 bits, we can replace the logical shift with an arithmetic shift. */
6495 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6496 && !have_insn_for (LSHIFTRT, mode)
6497 && have_insn_for (ASHIFTRT, mode)
6498 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6499 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6500 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6501 && mode_width <= HOST_BITS_PER_WIDE_INT)
6503 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6505 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6506 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6507 SUBST (XEXP (x, 0),
6508 gen_rtx_ASHIFTRT (mode,
6509 make_compound_operation
6510 (XEXP (XEXP (x, 0), 0), next_code),
6511 XEXP (XEXP (x, 0), 1)));
6514 /* If the constant is one less than a power of two, this might be
6515 representable by an extraction even if no shift is present.
6516 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6517 we are in a COMPARE. */
6518 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6519 new = make_extraction (mode,
6520 make_compound_operation (XEXP (x, 0),
6521 next_code),
6522 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6524 /* If we are in a comparison and this is an AND with a power of two,
6525 convert this into the appropriate bit extract. */
6526 else if (in_code == COMPARE
6527 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6528 new = make_extraction (mode,
6529 make_compound_operation (XEXP (x, 0),
6530 next_code),
6531 i, NULL_RTX, 1, 1, 0, 1);
6533 break;
6535 case LSHIFTRT:
6536 /* If the sign bit is known to be zero, replace this with an
6537 arithmetic shift. */
6538 if (have_insn_for (ASHIFTRT, mode)
6539 && ! have_insn_for (LSHIFTRT, mode)
6540 && mode_width <= HOST_BITS_PER_WIDE_INT
6541 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6543 new = gen_rtx_ASHIFTRT (mode,
6544 make_compound_operation (XEXP (x, 0),
6545 next_code),
6546 XEXP (x, 1));
6547 break;
6550 /* ... fall through ... */
6552 case ASHIFTRT:
6553 lhs = XEXP (x, 0);
6554 rhs = XEXP (x, 1);
6556 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6557 this is a SIGN_EXTRACT. */
6558 if (GET_CODE (rhs) == CONST_INT
6559 && GET_CODE (lhs) == ASHIFT
6560 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6561 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6563 new = make_compound_operation (XEXP (lhs, 0), next_code);
6564 new = make_extraction (mode, new,
6565 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6566 NULL_RTX, mode_width - INTVAL (rhs),
6567 code == LSHIFTRT, 0, in_code == COMPARE);
6568 break;
6571 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6572 If so, try to merge the shifts into a SIGN_EXTEND. We could
6573 also do this for some cases of SIGN_EXTRACT, but it doesn't
6574 seem worth the effort; the case checked for occurs on Alpha. */
6576 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6577 && ! (GET_CODE (lhs) == SUBREG
6578 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6579 && GET_CODE (rhs) == CONST_INT
6580 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6581 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6582 new = make_extraction (mode, make_compound_operation (new, next_code),
6583 0, NULL_RTX, mode_width - INTVAL (rhs),
6584 code == LSHIFTRT, 0, in_code == COMPARE);
6586 break;
6588 case SUBREG:
6589 /* Call ourselves recursively on the inner expression. If we are
6590 narrowing the object and it has a different RTL code from
6591 what it originally did, do this SUBREG as a force_to_mode. */
6593 tem = make_compound_operation (SUBREG_REG (x), in_code);
6594 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6595 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6596 && subreg_lowpart_p (x))
6598 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6599 NULL_RTX, 0);
6601 /* If we have something other than a SUBREG, we might have
6602 done an expansion, so rerun ourselves. */
6603 if (GET_CODE (newer) != SUBREG)
6604 newer = make_compound_operation (newer, in_code);
6606 return newer;
6609 /* If this is a paradoxical subreg, and the new code is a sign or
6610 zero extension, omit the subreg and widen the extension. If it
6611 is a regular subreg, we can still get rid of the subreg by not
6612 widening so much, or in fact removing the extension entirely. */
6613 if ((GET_CODE (tem) == SIGN_EXTEND
6614 || GET_CODE (tem) == ZERO_EXTEND)
6615 && subreg_lowpart_p (x))
6617 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6618 || (GET_MODE_SIZE (mode) >
6619 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6620 tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6621 else
6622 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6623 return tem;
6625 break;
6627 default:
6628 break;
6631 if (new)
6633 x = gen_lowpart_for_combine (mode, new);
6634 code = GET_CODE (x);
6637 /* Now recursively process each operand of this operation. */
6638 fmt = GET_RTX_FORMAT (code);
6639 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6640 if (fmt[i] == 'e')
6642 new = make_compound_operation (XEXP (x, i), next_code);
6643 SUBST (XEXP (x, i), new);
6646 return x;
6649 /* Given M see if it is a value that would select a field of bits
6650 within an item, but not the entire word. Return -1 if not.
6651 Otherwise, return the starting position of the field, where 0 is the
6652 low-order bit.
6654 *PLEN is set to the length of the field. */
6656 static int
6657 get_pos_from_mask (m, plen)
6658 unsigned HOST_WIDE_INT m;
6659 unsigned HOST_WIDE_INT *plen;
6661 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6662 int pos = exact_log2 (m & -m);
6663 int len;
6665 if (pos < 0)
6666 return -1;
6668 /* Now shift off the low-order zero bits and see if we have a power of
6669 two minus 1. */
6670 len = exact_log2 ((m >> pos) + 1);
6672 if (len <= 0)
6673 return -1;
6675 *plen = len;
6676 return pos;
6679 /* See if X can be simplified knowing that we will only refer to it in
6680 MODE and will only refer to those bits that are nonzero in MASK.
6681 If other bits are being computed or if masking operations are done
6682 that select a superset of the bits in MASK, they can sometimes be
6683 ignored.
6685 Return a possibly simplified expression, but always convert X to
6686 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6688 Also, if REG is non-zero and X is a register equal in value to REG,
6689 replace X with REG.
6691 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6692 are all off in X. This is used when X will be complemented, by either
6693 NOT, NEG, or XOR. */
6695 static rtx
6696 force_to_mode (x, mode, mask, reg, just_select)
6697 rtx x;
6698 enum machine_mode mode;
6699 unsigned HOST_WIDE_INT mask;
6700 rtx reg;
6701 int just_select;
6703 enum rtx_code code = GET_CODE (x);
6704 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6705 enum machine_mode op_mode;
6706 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6707 rtx op0, op1, temp;
6709 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6710 code below will do the wrong thing since the mode of such an
6711 expression is VOIDmode.
6713 Also do nothing if X is a CLOBBER; this can happen if X was
6714 the return value from a call to gen_lowpart_for_combine. */
6715 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6716 return x;
6718 /* We want to perform the operation is its present mode unless we know
6719 that the operation is valid in MODE, in which case we do the operation
6720 in MODE. */
6721 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6722 && have_insn_for (code, mode))
6723 ? mode : GET_MODE (x));
6725 /* It is not valid to do a right-shift in a narrower mode
6726 than the one it came in with. */
6727 if ((code == LSHIFTRT || code == ASHIFTRT)
6728 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6729 op_mode = GET_MODE (x);
6731 /* Truncate MASK to fit OP_MODE. */
6732 if (op_mode)
6733 mask &= GET_MODE_MASK (op_mode);
6735 /* When we have an arithmetic operation, or a shift whose count we
6736 do not know, we need to assume that all bit the up to the highest-order
6737 bit in MASK will be needed. This is how we form such a mask. */
6738 if (op_mode)
6739 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6740 ? GET_MODE_MASK (op_mode)
6741 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6742 - 1));
6743 else
6744 fuller_mask = ~(HOST_WIDE_INT) 0;
6746 /* Determine what bits of X are guaranteed to be (non)zero. */
6747 nonzero = nonzero_bits (x, mode);
6749 /* If none of the bits in X are needed, return a zero. */
6750 if (! just_select && (nonzero & mask) == 0)
6751 return const0_rtx;
6753 /* If X is a CONST_INT, return a new one. Do this here since the
6754 test below will fail. */
6755 if (GET_CODE (x) == CONST_INT)
6756 return gen_int_mode (INTVAL (x) & mask, mode);
6758 /* If X is narrower than MODE and we want all the bits in X's mode, just
6759 get X in the proper mode. */
6760 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6761 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6762 return gen_lowpart_for_combine (mode, x);
6764 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6765 MASK are already known to be zero in X, we need not do anything. */
6766 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6767 return x;
6769 switch (code)
6771 case CLOBBER:
6772 /* If X is a (clobber (const_int)), return it since we know we are
6773 generating something that won't match. */
6774 return x;
6776 case USE:
6777 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6778 spanned the boundary of the MEM. If we are now masking so it is
6779 within that boundary, we don't need the USE any more. */
6780 if (! BITS_BIG_ENDIAN
6781 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6782 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6783 break;
6785 case SIGN_EXTEND:
6786 case ZERO_EXTEND:
6787 case ZERO_EXTRACT:
6788 case SIGN_EXTRACT:
6789 x = expand_compound_operation (x);
6790 if (GET_CODE (x) != code)
6791 return force_to_mode (x, mode, mask, reg, next_select);
6792 break;
6794 case REG:
6795 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6796 || rtx_equal_p (reg, get_last_value (x))))
6797 x = reg;
6798 break;
6800 case SUBREG:
6801 if (subreg_lowpart_p (x)
6802 /* We can ignore the effect of this SUBREG if it narrows the mode or
6803 if the constant masks to zero all the bits the mode doesn't
6804 have. */
6805 && ((GET_MODE_SIZE (GET_MODE (x))
6806 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6807 || (0 == (mask
6808 & GET_MODE_MASK (GET_MODE (x))
6809 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6810 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6811 break;
6813 case AND:
6814 /* If this is an AND with a constant, convert it into an AND
6815 whose constant is the AND of that constant with MASK. If it
6816 remains an AND of MASK, delete it since it is redundant. */
6818 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6820 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6821 mask & INTVAL (XEXP (x, 1)));
6823 /* If X is still an AND, see if it is an AND with a mask that
6824 is just some low-order bits. If so, and it is MASK, we don't
6825 need it. */
6827 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6828 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6829 == mask))
6830 x = XEXP (x, 0);
6832 /* If it remains an AND, try making another AND with the bits
6833 in the mode mask that aren't in MASK turned on. If the
6834 constant in the AND is wide enough, this might make a
6835 cheaper constant. */
6837 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6838 && GET_MODE_MASK (GET_MODE (x)) != mask
6839 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6841 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6842 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6843 int width = GET_MODE_BITSIZE (GET_MODE (x));
6844 rtx y;
6846 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6847 number, sign extend it. */
6848 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6849 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6850 cval |= (HOST_WIDE_INT) -1 << width;
6852 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6853 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6854 x = y;
6857 break;
6860 goto binop;
6862 case PLUS:
6863 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6864 low-order bits (as in an alignment operation) and FOO is already
6865 aligned to that boundary, mask C1 to that boundary as well.
6866 This may eliminate that PLUS and, later, the AND. */
6869 unsigned int width = GET_MODE_BITSIZE (mode);
6870 unsigned HOST_WIDE_INT smask = mask;
6872 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6873 number, sign extend it. */
6875 if (width < HOST_BITS_PER_WIDE_INT
6876 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6877 smask |= (HOST_WIDE_INT) -1 << width;
6879 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6880 && exact_log2 (- smask) >= 0
6881 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6882 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6883 return force_to_mode (plus_constant (XEXP (x, 0),
6884 (INTVAL (XEXP (x, 1)) & smask)),
6885 mode, smask, reg, next_select);
6888 /* ... fall through ... */
6890 case MULT:
6891 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6892 most significant bit in MASK since carries from those bits will
6893 affect the bits we are interested in. */
6894 mask = fuller_mask;
6895 goto binop;
6897 case MINUS:
6898 /* If X is (minus C Y) where C's least set bit is larger than any bit
6899 in the mask, then we may replace with (neg Y). */
6900 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6901 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6902 & -INTVAL (XEXP (x, 0))))
6903 > mask))
6905 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6906 GET_MODE (x));
6907 return force_to_mode (x, mode, mask, reg, next_select);
6910 /* Similarly, if C contains every bit in the mask, then we may
6911 replace with (not Y). */
6912 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6913 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6914 == INTVAL (XEXP (x, 0))))
6916 x = simplify_gen_unary (NOT, GET_MODE (x),
6917 XEXP (x, 1), GET_MODE (x));
6918 return force_to_mode (x, mode, mask, reg, next_select);
6921 mask = fuller_mask;
6922 goto binop;
6924 case IOR:
6925 case XOR:
6926 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6927 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6928 operation which may be a bitfield extraction. Ensure that the
6929 constant we form is not wider than the mode of X. */
6931 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6932 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6933 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6934 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6935 && GET_CODE (XEXP (x, 1)) == CONST_INT
6936 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6937 + floor_log2 (INTVAL (XEXP (x, 1))))
6938 < GET_MODE_BITSIZE (GET_MODE (x)))
6939 && (INTVAL (XEXP (x, 1))
6940 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6942 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6943 << INTVAL (XEXP (XEXP (x, 0), 1)));
6944 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6945 XEXP (XEXP (x, 0), 0), temp);
6946 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6947 XEXP (XEXP (x, 0), 1));
6948 return force_to_mode (x, mode, mask, reg, next_select);
6951 binop:
6952 /* For most binary operations, just propagate into the operation and
6953 change the mode if we have an operation of that mode. */
6955 op0 = gen_lowpart_for_combine (op_mode,
6956 force_to_mode (XEXP (x, 0), mode, mask,
6957 reg, next_select));
6958 op1 = gen_lowpart_for_combine (op_mode,
6959 force_to_mode (XEXP (x, 1), mode, mask,
6960 reg, next_select));
6962 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6963 x = gen_binary (code, op_mode, op0, op1);
6964 break;
6966 case ASHIFT:
6967 /* For left shifts, do the same, but just for the first operand.
6968 However, we cannot do anything with shifts where we cannot
6969 guarantee that the counts are smaller than the size of the mode
6970 because such a count will have a different meaning in a
6971 wider mode. */
6973 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6974 && INTVAL (XEXP (x, 1)) >= 0
6975 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6976 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6977 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6978 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6979 break;
6981 /* If the shift count is a constant and we can do arithmetic in
6982 the mode of the shift, refine which bits we need. Otherwise, use the
6983 conservative form of the mask. */
6984 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6985 && INTVAL (XEXP (x, 1)) >= 0
6986 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6987 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6988 mask >>= INTVAL (XEXP (x, 1));
6989 else
6990 mask = fuller_mask;
6992 op0 = gen_lowpart_for_combine (op_mode,
6993 force_to_mode (XEXP (x, 0), op_mode,
6994 mask, reg, next_select));
6996 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6997 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
6998 break;
7000 case LSHIFTRT:
7001 /* Here we can only do something if the shift count is a constant,
7002 this shift constant is valid for the host, and we can do arithmetic
7003 in OP_MODE. */
7005 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7006 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7007 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7009 rtx inner = XEXP (x, 0);
7010 unsigned HOST_WIDE_INT inner_mask;
7012 /* Select the mask of the bits we need for the shift operand. */
7013 inner_mask = mask << INTVAL (XEXP (x, 1));
7015 /* We can only change the mode of the shift if we can do arithmetic
7016 in the mode of the shift and INNER_MASK is no wider than the
7017 width of OP_MODE. */
7018 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7019 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7020 op_mode = GET_MODE (x);
7022 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7024 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7025 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7028 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7029 shift and AND produces only copies of the sign bit (C2 is one less
7030 than a power of two), we can do this with just a shift. */
7032 if (GET_CODE (x) == LSHIFTRT
7033 && GET_CODE (XEXP (x, 1)) == CONST_INT
7034 /* The shift puts one of the sign bit copies in the least significant
7035 bit. */
7036 && ((INTVAL (XEXP (x, 1))
7037 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7038 >= GET_MODE_BITSIZE (GET_MODE (x)))
7039 && exact_log2 (mask + 1) >= 0
7040 /* Number of bits left after the shift must be more than the mask
7041 needs. */
7042 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7043 <= GET_MODE_BITSIZE (GET_MODE (x)))
7044 /* Must be more sign bit copies than the mask needs. */
7045 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7046 >= exact_log2 (mask + 1)))
7047 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7048 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7049 - exact_log2 (mask + 1)));
7051 goto shiftrt;
7053 case ASHIFTRT:
7054 /* If we are just looking for the sign bit, we don't need this shift at
7055 all, even if it has a variable count. */
7056 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7057 && (mask == ((unsigned HOST_WIDE_INT) 1
7058 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7059 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7061 /* If this is a shift by a constant, get a mask that contains those bits
7062 that are not copies of the sign bit. We then have two cases: If
7063 MASK only includes those bits, this can be a logical shift, which may
7064 allow simplifications. If MASK is a single-bit field not within
7065 those bits, we are requesting a copy of the sign bit and hence can
7066 shift the sign bit to the appropriate location. */
7068 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7069 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7071 int i = -1;
7073 /* If the considered data is wider than HOST_WIDE_INT, we can't
7074 represent a mask for all its bits in a single scalar.
7075 But we only care about the lower bits, so calculate these. */
7077 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7079 nonzero = ~(HOST_WIDE_INT) 0;
7081 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7082 is the number of bits a full-width mask would have set.
7083 We need only shift if these are fewer than nonzero can
7084 hold. If not, we must keep all bits set in nonzero. */
7086 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7087 < HOST_BITS_PER_WIDE_INT)
7088 nonzero >>= INTVAL (XEXP (x, 1))
7089 + HOST_BITS_PER_WIDE_INT
7090 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7092 else
7094 nonzero = GET_MODE_MASK (GET_MODE (x));
7095 nonzero >>= INTVAL (XEXP (x, 1));
7098 if ((mask & ~nonzero) == 0
7099 || (i = exact_log2 (mask)) >= 0)
7101 x = simplify_shift_const
7102 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7103 i < 0 ? INTVAL (XEXP (x, 1))
7104 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7106 if (GET_CODE (x) != ASHIFTRT)
7107 return force_to_mode (x, mode, mask, reg, next_select);
7111 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7112 even if the shift count isn't a constant. */
7113 if (mask == 1)
7114 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7116 shiftrt:
7118 /* If this is a zero- or sign-extension operation that just affects bits
7119 we don't care about, remove it. Be sure the call above returned
7120 something that is still a shift. */
7122 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7123 && GET_CODE (XEXP (x, 1)) == CONST_INT
7124 && INTVAL (XEXP (x, 1)) >= 0
7125 && (INTVAL (XEXP (x, 1))
7126 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7127 && GET_CODE (XEXP (x, 0)) == ASHIFT
7128 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7129 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7130 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7131 reg, next_select);
7133 break;
7135 case ROTATE:
7136 case ROTATERT:
7137 /* If the shift count is constant and we can do computations
7138 in the mode of X, compute where the bits we care about are.
7139 Otherwise, we can't do anything. Don't change the mode of
7140 the shift or propagate MODE into the shift, though. */
7141 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7142 && INTVAL (XEXP (x, 1)) >= 0)
7144 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7145 GET_MODE (x), GEN_INT (mask),
7146 XEXP (x, 1));
7147 if (temp && GET_CODE(temp) == CONST_INT)
7148 SUBST (XEXP (x, 0),
7149 force_to_mode (XEXP (x, 0), GET_MODE (x),
7150 INTVAL (temp), reg, next_select));
7152 break;
7154 case NEG:
7155 /* If we just want the low-order bit, the NEG isn't needed since it
7156 won't change the low-order bit. */
7157 if (mask == 1)
7158 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7160 /* We need any bits less significant than the most significant bit in
7161 MASK since carries from those bits will affect the bits we are
7162 interested in. */
7163 mask = fuller_mask;
7164 goto unop;
7166 case NOT:
7167 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7168 same as the XOR case above. Ensure that the constant we form is not
7169 wider than the mode of X. */
7171 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7172 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7173 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7174 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7175 < GET_MODE_BITSIZE (GET_MODE (x)))
7176 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7178 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7179 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7180 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7182 return force_to_mode (x, mode, mask, reg, next_select);
7185 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7186 use the full mask inside the NOT. */
7187 mask = fuller_mask;
7189 unop:
7190 op0 = gen_lowpart_for_combine (op_mode,
7191 force_to_mode (XEXP (x, 0), mode, mask,
7192 reg, next_select));
7193 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7194 x = simplify_gen_unary (code, op_mode, op0, op_mode);
7195 break;
7197 case NE:
7198 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7199 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7200 which is equal to STORE_FLAG_VALUE. */
7201 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7202 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7203 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7204 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7206 break;
7208 case IF_THEN_ELSE:
7209 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7210 written in a narrower mode. We play it safe and do not do so. */
7212 SUBST (XEXP (x, 1),
7213 gen_lowpart_for_combine (GET_MODE (x),
7214 force_to_mode (XEXP (x, 1), mode,
7215 mask, reg, next_select)));
7216 SUBST (XEXP (x, 2),
7217 gen_lowpart_for_combine (GET_MODE (x),
7218 force_to_mode (XEXP (x, 2), mode,
7219 mask, reg,next_select)));
7220 break;
7222 default:
7223 break;
7226 /* Ensure we return a value of the proper mode. */
7227 return gen_lowpart_for_combine (mode, x);
7230 /* Return nonzero if X is an expression that has one of two values depending on
7231 whether some other value is zero or nonzero. In that case, we return the
7232 value that is being tested, *PTRUE is set to the value if the rtx being
7233 returned has a nonzero value, and *PFALSE is set to the other alternative.
7235 If we return zero, we set *PTRUE and *PFALSE to X. */
7237 static rtx
7238 if_then_else_cond (x, ptrue, pfalse)
7239 rtx x;
7240 rtx *ptrue, *pfalse;
7242 enum machine_mode mode = GET_MODE (x);
7243 enum rtx_code code = GET_CODE (x);
7244 rtx cond0, cond1, true0, true1, false0, false1;
7245 unsigned HOST_WIDE_INT nz;
7247 /* If we are comparing a value against zero, we are done. */
7248 if ((code == NE || code == EQ)
7249 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7251 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7252 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7253 return XEXP (x, 0);
7256 /* If this is a unary operation whose operand has one of two values, apply
7257 our opcode to compute those values. */
7258 else if (GET_RTX_CLASS (code) == '1'
7259 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7261 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7262 *pfalse = simplify_gen_unary (code, mode, false0,
7263 GET_MODE (XEXP (x, 0)));
7264 return cond0;
7267 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7268 make can't possibly match and would suppress other optimizations. */
7269 else if (code == COMPARE)
7272 /* If this is a binary operation, see if either side has only one of two
7273 values. If either one does or if both do and they are conditional on
7274 the same value, compute the new true and false values. */
7275 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7276 || GET_RTX_CLASS (code) == '<')
7278 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7279 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7281 if ((cond0 != 0 || cond1 != 0)
7282 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7284 /* If if_then_else_cond returned zero, then true/false are the
7285 same rtl. We must copy one of them to prevent invalid rtl
7286 sharing. */
7287 if (cond0 == 0)
7288 true0 = copy_rtx (true0);
7289 else if (cond1 == 0)
7290 true1 = copy_rtx (true1);
7292 *ptrue = gen_binary (code, mode, true0, true1);
7293 *pfalse = gen_binary (code, mode, false0, false1);
7294 return cond0 ? cond0 : cond1;
7297 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7298 operands is zero when the other is non-zero, and vice-versa,
7299 and STORE_FLAG_VALUE is 1 or -1. */
7301 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7302 && (code == PLUS || code == IOR || code == XOR || code == MINUS
7303 || code == UMAX)
7304 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7306 rtx op0 = XEXP (XEXP (x, 0), 1);
7307 rtx op1 = XEXP (XEXP (x, 1), 1);
7309 cond0 = XEXP (XEXP (x, 0), 0);
7310 cond1 = XEXP (XEXP (x, 1), 0);
7312 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7313 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7314 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7315 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7316 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7317 || ((swap_condition (GET_CODE (cond0))
7318 == combine_reversed_comparison_code (cond1))
7319 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7320 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7321 && ! side_effects_p (x))
7323 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7324 *pfalse = gen_binary (MULT, mode,
7325 (code == MINUS
7326 ? simplify_gen_unary (NEG, mode, op1,
7327 mode)
7328 : op1),
7329 const_true_rtx);
7330 return cond0;
7334 /* Similarly for MULT, AND and UMIN, except that for these the result
7335 is always zero. */
7336 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7337 && (code == MULT || code == AND || code == UMIN)
7338 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7340 cond0 = XEXP (XEXP (x, 0), 0);
7341 cond1 = XEXP (XEXP (x, 1), 0);
7343 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7344 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7345 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7346 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7347 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7348 || ((swap_condition (GET_CODE (cond0))
7349 == combine_reversed_comparison_code (cond1))
7350 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7351 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7352 && ! side_effects_p (x))
7354 *ptrue = *pfalse = const0_rtx;
7355 return cond0;
7360 else if (code == IF_THEN_ELSE)
7362 /* If we have IF_THEN_ELSE already, extract the condition and
7363 canonicalize it if it is NE or EQ. */
7364 cond0 = XEXP (x, 0);
7365 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7366 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7367 return XEXP (cond0, 0);
7368 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7370 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7371 return XEXP (cond0, 0);
7373 else
7374 return cond0;
7377 /* If X is a SUBREG, we can narrow both the true and false values
7378 if the inner expression, if there is a condition. */
7379 else if (code == SUBREG
7380 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7381 &true0, &false0)))
7383 *ptrue = simplify_gen_subreg (mode, true0,
7384 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7385 *pfalse = simplify_gen_subreg (mode, false0,
7386 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7388 return cond0;
7391 /* If X is a constant, this isn't special and will cause confusions
7392 if we treat it as such. Likewise if it is equivalent to a constant. */
7393 else if (CONSTANT_P (x)
7394 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7397 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7398 will be least confusing to the rest of the compiler. */
7399 else if (mode == BImode)
7401 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7402 return x;
7405 /* If X is known to be either 0 or -1, those are the true and
7406 false values when testing X. */
7407 else if (x == constm1_rtx || x == const0_rtx
7408 || (mode != VOIDmode
7409 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7411 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7412 return x;
7415 /* Likewise for 0 or a single bit. */
7416 else if (mode != VOIDmode
7417 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7418 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7420 *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7421 return x;
7424 /* Otherwise fail; show no condition with true and false values the same. */
7425 *ptrue = *pfalse = x;
7426 return 0;
7429 /* Return the value of expression X given the fact that condition COND
7430 is known to be true when applied to REG as its first operand and VAL
7431 as its second. X is known to not be shared and so can be modified in
7432 place.
7434 We only handle the simplest cases, and specifically those cases that
7435 arise with IF_THEN_ELSE expressions. */
7437 static rtx
7438 known_cond (x, cond, reg, val)
7439 rtx x;
7440 enum rtx_code cond;
7441 rtx reg, val;
7443 enum rtx_code code = GET_CODE (x);
7444 rtx temp;
7445 const char *fmt;
7446 int i, j;
7448 if (side_effects_p (x))
7449 return x;
7451 /* If either operand of the condition is a floating point value,
7452 then we have to avoid collapsing an EQ comparison. */
7453 if (cond == EQ
7454 && rtx_equal_p (x, reg)
7455 && ! FLOAT_MODE_P (GET_MODE (x))
7456 && ! FLOAT_MODE_P (GET_MODE (val)))
7457 return val;
7459 if (cond == UNEQ && rtx_equal_p (x, reg))
7460 return val;
7462 /* If X is (abs REG) and we know something about REG's relationship
7463 with zero, we may be able to simplify this. */
7465 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7466 switch (cond)
7468 case GE: case GT: case EQ:
7469 return XEXP (x, 0);
7470 case LT: case LE:
7471 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7472 XEXP (x, 0),
7473 GET_MODE (XEXP (x, 0)));
7474 default:
7475 break;
7478 /* The only other cases we handle are MIN, MAX, and comparisons if the
7479 operands are the same as REG and VAL. */
7481 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7483 if (rtx_equal_p (XEXP (x, 0), val))
7484 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7486 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7488 if (GET_RTX_CLASS (code) == '<')
7490 if (comparison_dominates_p (cond, code))
7491 return const_true_rtx;
7493 code = combine_reversed_comparison_code (x);
7494 if (code != UNKNOWN
7495 && comparison_dominates_p (cond, code))
7496 return const0_rtx;
7497 else
7498 return x;
7500 else if (code == SMAX || code == SMIN
7501 || code == UMIN || code == UMAX)
7503 int unsignedp = (code == UMIN || code == UMAX);
7505 /* Do not reverse the condition when it is NE or EQ.
7506 This is because we cannot conclude anything about
7507 the value of 'SMAX (x, y)' when x is not equal to y,
7508 but we can when x equals y. */
7509 if ((code == SMAX || code == UMAX)
7510 && ! (cond == EQ || cond == NE))
7511 cond = reverse_condition (cond);
7513 switch (cond)
7515 case GE: case GT:
7516 return unsignedp ? x : XEXP (x, 1);
7517 case LE: case LT:
7518 return unsignedp ? x : XEXP (x, 0);
7519 case GEU: case GTU:
7520 return unsignedp ? XEXP (x, 1) : x;
7521 case LEU: case LTU:
7522 return unsignedp ? XEXP (x, 0) : x;
7523 default:
7524 break;
7529 else if (code == SUBREG)
7531 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7532 rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7534 if (SUBREG_REG (x) != r)
7536 /* We must simplify subreg here, before we lose track of the
7537 original inner_mode. */
7538 new = simplify_subreg (GET_MODE (x), r,
7539 inner_mode, SUBREG_BYTE (x));
7540 if (new)
7541 return new;
7542 else
7543 SUBST (SUBREG_REG (x), r);
7546 return x;
7548 /* We don't have to handle SIGN_EXTEND here, because even in the
7549 case of replacing something with a modeless CONST_INT, a
7550 CONST_INT is already (supposed to be) a valid sign extension for
7551 its narrower mode, which implies it's already properly
7552 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
7553 story is different. */
7554 else if (code == ZERO_EXTEND)
7556 enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7557 rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7559 if (XEXP (x, 0) != r)
7561 /* We must simplify the zero_extend here, before we lose
7562 track of the original inner_mode. */
7563 new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7564 r, inner_mode);
7565 if (new)
7566 return new;
7567 else
7568 SUBST (XEXP (x, 0), r);
7571 return x;
7574 fmt = GET_RTX_FORMAT (code);
7575 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7577 if (fmt[i] == 'e')
7578 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7579 else if (fmt[i] == 'E')
7580 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7581 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7582 cond, reg, val));
7585 return x;
7588 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7589 assignment as a field assignment. */
7591 static int
7592 rtx_equal_for_field_assignment_p (x, y)
7593 rtx x;
7594 rtx y;
7596 if (x == y || rtx_equal_p (x, y))
7597 return 1;
7599 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7600 return 0;
7602 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7603 Note that all SUBREGs of MEM are paradoxical; otherwise they
7604 would have been rewritten. */
7605 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7606 && GET_CODE (SUBREG_REG (y)) == MEM
7607 && rtx_equal_p (SUBREG_REG (y),
7608 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7609 return 1;
7611 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7612 && GET_CODE (SUBREG_REG (x)) == MEM
7613 && rtx_equal_p (SUBREG_REG (x),
7614 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7615 return 1;
7617 /* We used to see if get_last_value of X and Y were the same but that's
7618 not correct. In one direction, we'll cause the assignment to have
7619 the wrong destination and in the case, we'll import a register into this
7620 insn that might have already have been dead. So fail if none of the
7621 above cases are true. */
7622 return 0;
7625 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7626 Return that assignment if so.
7628 We only handle the most common cases. */
7630 static rtx
7631 make_field_assignment (x)
7632 rtx x;
7634 rtx dest = SET_DEST (x);
7635 rtx src = SET_SRC (x);
7636 rtx assign;
7637 rtx rhs, lhs;
7638 HOST_WIDE_INT c1;
7639 HOST_WIDE_INT pos;
7640 unsigned HOST_WIDE_INT len;
7641 rtx other;
7642 enum machine_mode mode;
7644 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7645 a clear of a one-bit field. We will have changed it to
7646 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7647 for a SUBREG. */
7649 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7650 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7651 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7652 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7654 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7655 1, 1, 1, 0);
7656 if (assign != 0)
7657 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7658 return x;
7661 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7662 && subreg_lowpart_p (XEXP (src, 0))
7663 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7664 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7665 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7666 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7667 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7669 assign = make_extraction (VOIDmode, dest, 0,
7670 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7671 1, 1, 1, 0);
7672 if (assign != 0)
7673 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7674 return x;
7677 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7678 one-bit field. */
7679 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7680 && XEXP (XEXP (src, 0), 0) == const1_rtx
7681 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7683 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7684 1, 1, 1, 0);
7685 if (assign != 0)
7686 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7687 return x;
7690 /* The other case we handle is assignments into a constant-position
7691 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
7692 a mask that has all one bits except for a group of zero bits and
7693 OTHER is known to have zeros where C1 has ones, this is such an
7694 assignment. Compute the position and length from C1. Shift OTHER
7695 to the appropriate position, force it to the required mode, and
7696 make the extraction. Check for the AND in both operands. */
7698 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7699 return x;
7701 rhs = expand_compound_operation (XEXP (src, 0));
7702 lhs = expand_compound_operation (XEXP (src, 1));
7704 if (GET_CODE (rhs) == AND
7705 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7706 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7707 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7708 else if (GET_CODE (lhs) == AND
7709 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7710 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7711 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7712 else
7713 return x;
7715 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7716 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7717 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7718 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7719 return x;
7721 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7722 if (assign == 0)
7723 return x;
7725 /* The mode to use for the source is the mode of the assignment, or of
7726 what is inside a possible STRICT_LOW_PART. */
7727 mode = (GET_CODE (assign) == STRICT_LOW_PART
7728 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7730 /* Shift OTHER right POS places and make it the source, restricting it
7731 to the proper length and mode. */
7733 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7734 GET_MODE (src), other, pos),
7735 mode,
7736 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7737 ? ~(unsigned HOST_WIDE_INT) 0
7738 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7739 dest, 0);
7741 return gen_rtx_SET (VOIDmode, assign, src);
7744 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7745 if so. */
7747 static rtx
7748 apply_distributive_law (x)
7749 rtx x;
7751 enum rtx_code code = GET_CODE (x);
7752 rtx lhs, rhs, other;
7753 rtx tem;
7754 enum rtx_code inner_code;
7756 /* Distributivity is not true for floating point.
7757 It can change the value. So don't do it.
7758 -- rms and moshier@world.std.com. */
7759 if (FLOAT_MODE_P (GET_MODE (x)))
7760 return x;
7762 /* The outer operation can only be one of the following: */
7763 if (code != IOR && code != AND && code != XOR
7764 && code != PLUS && code != MINUS)
7765 return x;
7767 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7769 /* If either operand is a primitive we can't do anything, so get out
7770 fast. */
7771 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7772 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7773 return x;
7775 lhs = expand_compound_operation (lhs);
7776 rhs = expand_compound_operation (rhs);
7777 inner_code = GET_CODE (lhs);
7778 if (inner_code != GET_CODE (rhs))
7779 return x;
7781 /* See if the inner and outer operations distribute. */
7782 switch (inner_code)
7784 case LSHIFTRT:
7785 case ASHIFTRT:
7786 case AND:
7787 case IOR:
7788 /* These all distribute except over PLUS. */
7789 if (code == PLUS || code == MINUS)
7790 return x;
7791 break;
7793 case MULT:
7794 if (code != PLUS && code != MINUS)
7795 return x;
7796 break;
7798 case ASHIFT:
7799 /* This is also a multiply, so it distributes over everything. */
7800 break;
7802 case SUBREG:
7803 /* Non-paradoxical SUBREGs distributes over all operations, provided
7804 the inner modes and byte offsets are the same, this is an extraction
7805 of a low-order part, we don't convert an fp operation to int or
7806 vice versa, and we would not be converting a single-word
7807 operation into a multi-word operation. The latter test is not
7808 required, but it prevents generating unneeded multi-word operations.
7809 Some of the previous tests are redundant given the latter test, but
7810 are retained because they are required for correctness.
7812 We produce the result slightly differently in this case. */
7814 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7815 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7816 || ! subreg_lowpart_p (lhs)
7817 || (GET_MODE_CLASS (GET_MODE (lhs))
7818 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7819 || (GET_MODE_SIZE (GET_MODE (lhs))
7820 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7821 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7822 return x;
7824 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7825 SUBREG_REG (lhs), SUBREG_REG (rhs));
7826 return gen_lowpart_for_combine (GET_MODE (x), tem);
7828 default:
7829 return x;
7832 /* Set LHS and RHS to the inner operands (A and B in the example
7833 above) and set OTHER to the common operand (C in the example).
7834 These is only one way to do this unless the inner operation is
7835 commutative. */
7836 if (GET_RTX_CLASS (inner_code) == 'c'
7837 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7838 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7839 else if (GET_RTX_CLASS (inner_code) == 'c'
7840 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7841 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7842 else if (GET_RTX_CLASS (inner_code) == 'c'
7843 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7844 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7845 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7846 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7847 else
7848 return x;
7850 /* Form the new inner operation, seeing if it simplifies first. */
7851 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7853 /* There is one exception to the general way of distributing:
7854 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7855 if (code == XOR && inner_code == IOR)
7857 inner_code = AND;
7858 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7861 /* We may be able to continuing distributing the result, so call
7862 ourselves recursively on the inner operation before forming the
7863 outer operation, which we return. */
7864 return gen_binary (inner_code, GET_MODE (x),
7865 apply_distributive_law (tem), other);
7868 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7869 in MODE.
7871 Return an equivalent form, if different from X. Otherwise, return X. If
7872 X is zero, we are to always construct the equivalent form. */
7874 static rtx
7875 simplify_and_const_int (x, mode, varop, constop)
7876 rtx x;
7877 enum machine_mode mode;
7878 rtx varop;
7879 unsigned HOST_WIDE_INT constop;
7881 unsigned HOST_WIDE_INT nonzero;
7882 int i;
7884 /* Simplify VAROP knowing that we will be only looking at some of the
7885 bits in it.
7887 Note by passing in CONSTOP, we guarantee that the bits not set in
7888 CONSTOP are not significant and will never be examined. We must
7889 ensure that is the case by explicitly masking out those bits
7890 before returning. */
7891 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7893 /* If VAROP is a CLOBBER, we will fail so return it. */
7894 if (GET_CODE (varop) == CLOBBER)
7895 return varop;
7897 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
7898 to VAROP and return the new constant. */
7899 if (GET_CODE (varop) == CONST_INT)
7900 return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
7902 /* See what bits may be nonzero in VAROP. Unlike the general case of
7903 a call to nonzero_bits, here we don't care about bits outside
7904 MODE. */
7906 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7908 /* Turn off all bits in the constant that are known to already be zero.
7909 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7910 which is tested below. */
7912 constop &= nonzero;
7914 /* If we don't have any bits left, return zero. */
7915 if (constop == 0)
7916 return const0_rtx;
7918 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7919 a power of two, we can replace this with an ASHIFT. */
7920 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7921 && (i = exact_log2 (constop)) >= 0)
7922 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7924 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7925 or XOR, then try to apply the distributive law. This may eliminate
7926 operations if either branch can be simplified because of the AND.
7927 It may also make some cases more complex, but those cases probably
7928 won't match a pattern either with or without this. */
7930 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7931 return
7932 gen_lowpart_for_combine
7933 (mode,
7934 apply_distributive_law
7935 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7936 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7937 XEXP (varop, 0), constop),
7938 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7939 XEXP (varop, 1), constop))));
7941 /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
7942 the AND and see if one of the operands simplifies to zero. If so, we
7943 may eliminate it. */
7945 if (GET_CODE (varop) == PLUS
7946 && exact_log2 (constop + 1) >= 0)
7948 rtx o0, o1;
7950 o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
7951 o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
7952 if (o0 == const0_rtx)
7953 return o1;
7954 if (o1 == const0_rtx)
7955 return o0;
7958 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7959 if we already had one (just check for the simplest cases). */
7960 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7961 && GET_MODE (XEXP (x, 0)) == mode
7962 && SUBREG_REG (XEXP (x, 0)) == varop)
7963 varop = XEXP (x, 0);
7964 else
7965 varop = gen_lowpart_for_combine (mode, varop);
7967 /* If we can't make the SUBREG, try to return what we were given. */
7968 if (GET_CODE (varop) == CLOBBER)
7969 return x ? x : varop;
7971 /* If we are only masking insignificant bits, return VAROP. */
7972 if (constop == nonzero)
7973 x = varop;
7974 else
7976 /* Otherwise, return an AND. */
7977 constop = trunc_int_for_mode (constop, mode);
7978 /* See how much, if any, of X we can use. */
7979 if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7980 x = gen_binary (AND, mode, varop, GEN_INT (constop));
7982 else
7984 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7985 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
7986 SUBST (XEXP (x, 1), GEN_INT (constop));
7988 SUBST (XEXP (x, 0), varop);
7992 return x;
7995 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7996 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7997 is less useful. We can't allow both, because that results in exponential
7998 run time recursion. There is a nullstone testcase that triggered
7999 this. This macro avoids accidental uses of num_sign_bit_copies. */
8000 #define num_sign_bit_copies()
8002 /* Given an expression, X, compute which bits in X can be non-zero.
8003 We don't care about bits outside of those defined in MODE.
8005 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8006 a shift, AND, or zero_extract, we can do better. */
8008 static unsigned HOST_WIDE_INT
8009 nonzero_bits (x, mode)
8010 rtx x;
8011 enum machine_mode mode;
8013 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
8014 unsigned HOST_WIDE_INT inner_nz;
8015 enum rtx_code code;
8016 unsigned int mode_width = GET_MODE_BITSIZE (mode);
8017 rtx tem;
8019 /* For floating-point values, assume all bits are needed. */
8020 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
8021 return nonzero;
8023 /* If X is wider than MODE, use its mode instead. */
8024 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
8026 mode = GET_MODE (x);
8027 nonzero = GET_MODE_MASK (mode);
8028 mode_width = GET_MODE_BITSIZE (mode);
8031 if (mode_width > HOST_BITS_PER_WIDE_INT)
8032 /* Our only callers in this case look for single bit values. So
8033 just return the mode mask. Those tests will then be false. */
8034 return nonzero;
8036 #ifndef WORD_REGISTER_OPERATIONS
8037 /* If MODE is wider than X, but both are a single word for both the host
8038 and target machines, we can compute this from which bits of the
8039 object might be nonzero in its own mode, taking into account the fact
8040 that on many CISC machines, accessing an object in a wider mode
8041 causes the high-order bits to become undefined. So they are
8042 not known to be zero. */
8044 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8045 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8046 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8047 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8049 nonzero &= nonzero_bits (x, GET_MODE (x));
8050 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8051 return nonzero;
8053 #endif
8055 code = GET_CODE (x);
8056 switch (code)
8058 case REG:
8059 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8060 /* If pointers extend unsigned and this is a pointer in Pmode, say that
8061 all the bits above ptr_mode are known to be zero. */
8062 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8063 && REG_POINTER (x))
8064 nonzero &= GET_MODE_MASK (ptr_mode);
8065 #endif
8067 /* Include declared information about alignment of pointers. */
8068 /* ??? We don't properly preserve REG_POINTER changes across
8069 pointer-to-integer casts, so we can't trust it except for
8070 things that we know must be pointers. See execute/960116-1.c. */
8071 if ((x == stack_pointer_rtx
8072 || x == frame_pointer_rtx
8073 || x == arg_pointer_rtx)
8074 && REGNO_POINTER_ALIGN (REGNO (x)))
8076 unsigned HOST_WIDE_INT alignment
8077 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8079 #ifdef PUSH_ROUNDING
8080 /* If PUSH_ROUNDING is defined, it is possible for the
8081 stack to be momentarily aligned only to that amount,
8082 so we pick the least alignment. */
8083 if (x == stack_pointer_rtx && PUSH_ARGS)
8084 alignment = MIN (PUSH_ROUNDING (1), alignment);
8085 #endif
8087 nonzero &= ~(alignment - 1);
8090 /* If X is a register whose nonzero bits value is current, use it.
8091 Otherwise, if X is a register whose value we can find, use that
8092 value. Otherwise, use the previously-computed global nonzero bits
8093 for this register. */
8095 if (reg_last_set_value[REGNO (x)] != 0
8096 && (reg_last_set_mode[REGNO (x)] == mode
8097 || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8098 && GET_MODE_CLASS (mode) == MODE_INT))
8099 && (reg_last_set_label[REGNO (x)] == label_tick
8100 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8101 && REG_N_SETS (REGNO (x)) == 1
8102 && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8103 REGNO (x))))
8104 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8105 return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8107 tem = get_last_value (x);
8109 if (tem)
8111 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8112 /* If X is narrower than MODE and TEM is a non-negative
8113 constant that would appear negative in the mode of X,
8114 sign-extend it for use in reg_nonzero_bits because some
8115 machines (maybe most) will actually do the sign-extension
8116 and this is the conservative approach.
8118 ??? For 2.5, try to tighten up the MD files in this regard
8119 instead of this kludge. */
8121 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8122 && GET_CODE (tem) == CONST_INT
8123 && INTVAL (tem) > 0
8124 && 0 != (INTVAL (tem)
8125 & ((HOST_WIDE_INT) 1
8126 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8127 tem = GEN_INT (INTVAL (tem)
8128 | ((HOST_WIDE_INT) (-1)
8129 << GET_MODE_BITSIZE (GET_MODE (x))));
8130 #endif
8131 return nonzero_bits (tem, mode) & nonzero;
8133 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8135 unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8137 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8138 /* We don't know anything about the upper bits. */
8139 mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8140 return nonzero & mask;
8142 else
8143 return nonzero;
8145 case CONST_INT:
8146 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8147 /* If X is negative in MODE, sign-extend the value. */
8148 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8149 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8150 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8151 #endif
8153 return INTVAL (x);
8155 case MEM:
8156 #ifdef LOAD_EXTEND_OP
8157 /* In many, if not most, RISC machines, reading a byte from memory
8158 zeros the rest of the register. Noticing that fact saves a lot
8159 of extra zero-extends. */
8160 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8161 nonzero &= GET_MODE_MASK (GET_MODE (x));
8162 #endif
8163 break;
8165 case EQ: case NE:
8166 case UNEQ: case LTGT:
8167 case GT: case GTU: case UNGT:
8168 case LT: case LTU: case UNLT:
8169 case GE: case GEU: case UNGE:
8170 case LE: case LEU: case UNLE:
8171 case UNORDERED: case ORDERED:
8173 /* If this produces an integer result, we know which bits are set.
8174 Code here used to clear bits outside the mode of X, but that is
8175 now done above. */
8177 if (GET_MODE_CLASS (mode) == MODE_INT
8178 && mode_width <= HOST_BITS_PER_WIDE_INT)
8179 nonzero = STORE_FLAG_VALUE;
8180 break;
8182 case NEG:
8183 #if 0
8184 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8185 and num_sign_bit_copies. */
8186 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8187 == GET_MODE_BITSIZE (GET_MODE (x)))
8188 nonzero = 1;
8189 #endif
8191 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8192 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8193 break;
8195 case ABS:
8196 #if 0
8197 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8198 and num_sign_bit_copies. */
8199 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8200 == GET_MODE_BITSIZE (GET_MODE (x)))
8201 nonzero = 1;
8202 #endif
8203 break;
8205 case TRUNCATE:
8206 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
8207 break;
8209 case ZERO_EXTEND:
8210 nonzero &= nonzero_bits (XEXP (x, 0), mode);
8211 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8212 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8213 break;
8215 case SIGN_EXTEND:
8216 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8217 Otherwise, show all the bits in the outer mode but not the inner
8218 may be non-zero. */
8219 inner_nz = nonzero_bits (XEXP (x, 0), mode);
8220 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8222 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8223 if (inner_nz
8224 & (((HOST_WIDE_INT) 1
8225 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8226 inner_nz |= (GET_MODE_MASK (mode)
8227 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8230 nonzero &= inner_nz;
8231 break;
8233 case AND:
8234 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8235 & nonzero_bits (XEXP (x, 1), mode));
8236 break;
8238 case XOR: case IOR:
8239 case UMIN: case UMAX: case SMIN: case SMAX:
8241 unsigned HOST_WIDE_INT nonzero0 = nonzero_bits (XEXP (x, 0), mode);
8243 /* Don't call nonzero_bits for the second time if it cannot change
8244 anything. */
8245 if ((nonzero & nonzero0) != nonzero)
8246 nonzero &= (nonzero0 | nonzero_bits (XEXP (x, 1), mode));
8248 break;
8250 case PLUS: case MINUS:
8251 case MULT:
8252 case DIV: case UDIV:
8253 case MOD: case UMOD:
8254 /* We can apply the rules of arithmetic to compute the number of
8255 high- and low-order zero bits of these operations. We start by
8256 computing the width (position of the highest-order non-zero bit)
8257 and the number of low-order zero bits for each value. */
8259 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8260 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8261 int width0 = floor_log2 (nz0) + 1;
8262 int width1 = floor_log2 (nz1) + 1;
8263 int low0 = floor_log2 (nz0 & -nz0);
8264 int low1 = floor_log2 (nz1 & -nz1);
8265 HOST_WIDE_INT op0_maybe_minusp
8266 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8267 HOST_WIDE_INT op1_maybe_minusp
8268 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8269 unsigned int result_width = mode_width;
8270 int result_low = 0;
8272 switch (code)
8274 case PLUS:
8275 result_width = MAX (width0, width1) + 1;
8276 result_low = MIN (low0, low1);
8277 break;
8278 case MINUS:
8279 result_low = MIN (low0, low1);
8280 break;
8281 case MULT:
8282 result_width = width0 + width1;
8283 result_low = low0 + low1;
8284 break;
8285 case DIV:
8286 if (width1 == 0)
8287 break;
8288 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8289 result_width = width0;
8290 break;
8291 case UDIV:
8292 if (width1 == 0)
8293 break;
8294 result_width = width0;
8295 break;
8296 case MOD:
8297 if (width1 == 0)
8298 break;
8299 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8300 result_width = MIN (width0, width1);
8301 result_low = MIN (low0, low1);
8302 break;
8303 case UMOD:
8304 if (width1 == 0)
8305 break;
8306 result_width = MIN (width0, width1);
8307 result_low = MIN (low0, low1);
8308 break;
8309 default:
8310 abort ();
8313 if (result_width < mode_width)
8314 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8316 if (result_low > 0)
8317 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8319 #ifdef POINTERS_EXTEND_UNSIGNED
8320 /* If pointers extend unsigned and this is an addition or subtraction
8321 to a pointer in Pmode, all the bits above ptr_mode are known to be
8322 zero. */
8323 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8324 && (code == PLUS || code == MINUS)
8325 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8326 nonzero &= GET_MODE_MASK (ptr_mode);
8327 #endif
8329 break;
8331 case ZERO_EXTRACT:
8332 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8333 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8334 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8335 break;
8337 case SUBREG:
8338 /* If this is a SUBREG formed for a promoted variable that has
8339 been zero-extended, we know that at least the high-order bits
8340 are zero, though others might be too. */
8342 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
8343 nonzero = (GET_MODE_MASK (GET_MODE (x))
8344 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
8346 /* If the inner mode is a single word for both the host and target
8347 machines, we can compute this from which bits of the inner
8348 object might be nonzero. */
8349 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8350 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8351 <= HOST_BITS_PER_WIDE_INT))
8353 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8355 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8356 /* If this is a typical RISC machine, we only have to worry
8357 about the way loads are extended. */
8358 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8359 ? (((nonzero
8360 & (((unsigned HOST_WIDE_INT) 1
8361 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8362 != 0))
8363 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8364 || GET_CODE (SUBREG_REG (x)) != MEM)
8365 #endif
8367 /* On many CISC machines, accessing an object in a wider mode
8368 causes the high-order bits to become undefined. So they are
8369 not known to be zero. */
8370 if (GET_MODE_SIZE (GET_MODE (x))
8371 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8372 nonzero |= (GET_MODE_MASK (GET_MODE (x))
8373 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8376 break;
8378 case ASHIFTRT:
8379 case LSHIFTRT:
8380 case ASHIFT:
8381 case ROTATE:
8382 /* The nonzero bits are in two classes: any bits within MODE
8383 that aren't in GET_MODE (x) are always significant. The rest of the
8384 nonzero bits are those that are significant in the operand of
8385 the shift when shifted the appropriate number of bits. This
8386 shows that high-order bits are cleared by the right shift and
8387 low-order bits by left shifts. */
8388 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8389 && INTVAL (XEXP (x, 1)) >= 0
8390 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8392 enum machine_mode inner_mode = GET_MODE (x);
8393 unsigned int width = GET_MODE_BITSIZE (inner_mode);
8394 int count = INTVAL (XEXP (x, 1));
8395 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8396 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8397 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8398 unsigned HOST_WIDE_INT outer = 0;
8400 if (mode_width > width)
8401 outer = (op_nonzero & nonzero & ~mode_mask);
8403 if (code == LSHIFTRT)
8404 inner >>= count;
8405 else if (code == ASHIFTRT)
8407 inner >>= count;
8409 /* If the sign bit may have been nonzero before the shift, we
8410 need to mark all the places it could have been copied to
8411 by the shift as possibly nonzero. */
8412 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8413 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8415 else if (code == ASHIFT)
8416 inner <<= count;
8417 else
8418 inner = ((inner << (count % width)
8419 | (inner >> (width - (count % width)))) & mode_mask);
8421 nonzero &= (outer | inner);
8423 break;
8425 case FFS:
8426 /* This is at most the number of bits in the mode. */
8427 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8428 break;
8430 case IF_THEN_ELSE:
8431 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8432 | nonzero_bits (XEXP (x, 2), mode));
8433 break;
8435 default:
8436 break;
8439 return nonzero;
8442 /* See the macro definition above. */
8443 #undef num_sign_bit_copies
8445 /* Return the number of bits at the high-order end of X that are known to
8446 be equal to the sign bit. X will be used in mode MODE; if MODE is
8447 VOIDmode, X will be used in its own mode. The returned value will always
8448 be between 1 and the number of bits in MODE. */
8450 static unsigned int
8451 num_sign_bit_copies (x, mode)
8452 rtx x;
8453 enum machine_mode mode;
8455 enum rtx_code code = GET_CODE (x);
8456 unsigned int bitwidth;
8457 int num0, num1, result;
8458 unsigned HOST_WIDE_INT nonzero;
8459 rtx tem;
8461 /* If we weren't given a mode, use the mode of X. If the mode is still
8462 VOIDmode, we don't know anything. Likewise if one of the modes is
8463 floating-point. */
8465 if (mode == VOIDmode)
8466 mode = GET_MODE (x);
8468 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8469 return 1;
8471 bitwidth = GET_MODE_BITSIZE (mode);
8473 /* For a smaller object, just ignore the high bits. */
8474 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8476 num0 = num_sign_bit_copies (x, GET_MODE (x));
8477 return MAX (1,
8478 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8481 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8483 #ifndef WORD_REGISTER_OPERATIONS
8484 /* If this machine does not do all register operations on the entire
8485 register and MODE is wider than the mode of X, we can say nothing
8486 at all about the high-order bits. */
8487 return 1;
8488 #else
8489 /* Likewise on machines that do, if the mode of the object is smaller
8490 than a word and loads of that size don't sign extend, we can say
8491 nothing about the high order bits. */
8492 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8493 #ifdef LOAD_EXTEND_OP
8494 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8495 #endif
8497 return 1;
8498 #endif
8501 switch (code)
8503 case REG:
8505 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8506 /* If pointers extend signed and this is a pointer in Pmode, say that
8507 all the bits above ptr_mode are known to be sign bit copies. */
8508 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8509 && REG_POINTER (x))
8510 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8511 #endif
8513 if (reg_last_set_value[REGNO (x)] != 0
8514 && reg_last_set_mode[REGNO (x)] == mode
8515 && (reg_last_set_label[REGNO (x)] == label_tick
8516 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8517 && REG_N_SETS (REGNO (x)) == 1
8518 && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8519 REGNO (x))))
8520 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8521 return reg_last_set_sign_bit_copies[REGNO (x)];
8523 tem = get_last_value (x);
8524 if (tem != 0)
8525 return num_sign_bit_copies (tem, mode);
8527 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8528 && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8529 return reg_sign_bit_copies[REGNO (x)];
8530 break;
8532 case MEM:
8533 #ifdef LOAD_EXTEND_OP
8534 /* Some RISC machines sign-extend all loads of smaller than a word. */
8535 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8536 return MAX (1, ((int) bitwidth
8537 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8538 #endif
8539 break;
8541 case CONST_INT:
8542 /* If the constant is negative, take its 1's complement and remask.
8543 Then see how many zero bits we have. */
8544 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8545 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8546 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8547 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8549 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8551 case SUBREG:
8552 /* If this is a SUBREG for a promoted object that is sign-extended
8553 and we are looking at it in a wider mode, we know that at least the
8554 high-order bits are known to be sign bit copies. */
8556 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8558 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8559 return MAX ((int) bitwidth
8560 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8561 num0);
8564 /* For a smaller object, just ignore the high bits. */
8565 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8567 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8568 return MAX (1, (num0
8569 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8570 - bitwidth)));
8573 #ifdef WORD_REGISTER_OPERATIONS
8574 #ifdef LOAD_EXTEND_OP
8575 /* For paradoxical SUBREGs on machines where all register operations
8576 affect the entire register, just look inside. Note that we are
8577 passing MODE to the recursive call, so the number of sign bit copies
8578 will remain relative to that mode, not the inner mode. */
8580 /* This works only if loads sign extend. Otherwise, if we get a
8581 reload for the inner part, it may be loaded from the stack, and
8582 then we lose all sign bit copies that existed before the store
8583 to the stack. */
8585 if ((GET_MODE_SIZE (GET_MODE (x))
8586 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8587 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8588 && GET_CODE (SUBREG_REG (x)) == MEM)
8589 return num_sign_bit_copies (SUBREG_REG (x), mode);
8590 #endif
8591 #endif
8592 break;
8594 case SIGN_EXTRACT:
8595 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8596 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8597 break;
8599 case SIGN_EXTEND:
8600 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8601 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8603 case TRUNCATE:
8604 /* For a smaller object, just ignore the high bits. */
8605 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8606 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8607 - bitwidth)));
8609 case NOT:
8610 return num_sign_bit_copies (XEXP (x, 0), mode);
8612 case ROTATE: case ROTATERT:
8613 /* If we are rotating left by a number of bits less than the number
8614 of sign bit copies, we can just subtract that amount from the
8615 number. */
8616 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8617 && INTVAL (XEXP (x, 1)) >= 0
8618 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8620 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8621 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8622 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8624 break;
8626 case NEG:
8627 /* In general, this subtracts one sign bit copy. But if the value
8628 is known to be positive, the number of sign bit copies is the
8629 same as that of the input. Finally, if the input has just one bit
8630 that might be nonzero, all the bits are copies of the sign bit. */
8631 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8632 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8633 return num0 > 1 ? num0 - 1 : 1;
8635 nonzero = nonzero_bits (XEXP (x, 0), mode);
8636 if (nonzero == 1)
8637 return bitwidth;
8639 if (num0 > 1
8640 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8641 num0--;
8643 return num0;
8645 case IOR: case AND: case XOR:
8646 case SMIN: case SMAX: case UMIN: case UMAX:
8647 /* Logical operations will preserve the number of sign-bit copies.
8648 MIN and MAX operations always return one of the operands. */
8649 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8650 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8651 return MIN (num0, num1);
8653 case PLUS: case MINUS:
8654 /* For addition and subtraction, we can have a 1-bit carry. However,
8655 if we are subtracting 1 from a positive number, there will not
8656 be such a carry. Furthermore, if the positive number is known to
8657 be 0 or 1, we know the result is either -1 or 0. */
8659 if (code == PLUS && XEXP (x, 1) == constm1_rtx
8660 && bitwidth <= HOST_BITS_PER_WIDE_INT)
8662 nonzero = nonzero_bits (XEXP (x, 0), mode);
8663 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8664 return (nonzero == 1 || nonzero == 0 ? bitwidth
8665 : bitwidth - floor_log2 (nonzero) - 1);
8668 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8669 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8670 result = MAX (1, MIN (num0, num1) - 1);
8672 #ifdef POINTERS_EXTEND_UNSIGNED
8673 /* If pointers extend signed and this is an addition or subtraction
8674 to a pointer in Pmode, all the bits above ptr_mode are known to be
8675 sign bit copies. */
8676 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8677 && (code == PLUS || code == MINUS)
8678 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8679 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8680 - GET_MODE_BITSIZE (ptr_mode) + 1),
8681 result);
8682 #endif
8683 return result;
8685 case MULT:
8686 /* The number of bits of the product is the sum of the number of
8687 bits of both terms. However, unless one of the terms if known
8688 to be positive, we must allow for an additional bit since negating
8689 a negative number can remove one sign bit copy. */
8691 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8692 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8694 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8695 if (result > 0
8696 && (bitwidth > HOST_BITS_PER_WIDE_INT
8697 || (((nonzero_bits (XEXP (x, 0), mode)
8698 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8699 && ((nonzero_bits (XEXP (x, 1), mode)
8700 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8701 result--;
8703 return MAX (1, result);
8705 case UDIV:
8706 /* The result must be <= the first operand. If the first operand
8707 has the high bit set, we know nothing about the number of sign
8708 bit copies. */
8709 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8710 return 1;
8711 else if ((nonzero_bits (XEXP (x, 0), mode)
8712 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8713 return 1;
8714 else
8715 return num_sign_bit_copies (XEXP (x, 0), mode);
8717 case UMOD:
8718 /* The result must be <= the second operand. */
8719 return num_sign_bit_copies (XEXP (x, 1), mode);
8721 case DIV:
8722 /* Similar to unsigned division, except that we have to worry about
8723 the case where the divisor is negative, in which case we have
8724 to add 1. */
8725 result = num_sign_bit_copies (XEXP (x, 0), mode);
8726 if (result > 1
8727 && (bitwidth > HOST_BITS_PER_WIDE_INT
8728 || (nonzero_bits (XEXP (x, 1), mode)
8729 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8730 result--;
8732 return result;
8734 case MOD:
8735 result = num_sign_bit_copies (XEXP (x, 1), mode);
8736 if (result > 1
8737 && (bitwidth > HOST_BITS_PER_WIDE_INT
8738 || (nonzero_bits (XEXP (x, 1), mode)
8739 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8740 result--;
8742 return result;
8744 case ASHIFTRT:
8745 /* Shifts by a constant add to the number of bits equal to the
8746 sign bit. */
8747 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8748 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8749 && INTVAL (XEXP (x, 1)) > 0)
8750 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8752 return num0;
8754 case ASHIFT:
8755 /* Left shifts destroy copies. */
8756 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8757 || INTVAL (XEXP (x, 1)) < 0
8758 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8759 return 1;
8761 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8762 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8764 case IF_THEN_ELSE:
8765 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8766 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8767 return MIN (num0, num1);
8769 case EQ: case NE: case GE: case GT: case LE: case LT:
8770 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
8771 case GEU: case GTU: case LEU: case LTU:
8772 case UNORDERED: case ORDERED:
8773 /* If the constant is negative, take its 1's complement and remask.
8774 Then see how many zero bits we have. */
8775 nonzero = STORE_FLAG_VALUE;
8776 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8777 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8778 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8780 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8781 break;
8783 default:
8784 break;
8787 /* If we haven't been able to figure it out by one of the above rules,
8788 see if some of the high-order bits are known to be zero. If so,
8789 count those bits and return one less than that amount. If we can't
8790 safely compute the mask for this mode, always return BITWIDTH. */
8792 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8793 return 1;
8795 nonzero = nonzero_bits (x, mode);
8796 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8797 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8800 /* Return the number of "extended" bits there are in X, when interpreted
8801 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8802 unsigned quantities, this is the number of high-order zero bits.
8803 For signed quantities, this is the number of copies of the sign bit
8804 minus 1. In both case, this function returns the number of "spare"
8805 bits. For example, if two quantities for which this function returns
8806 at least 1 are added, the addition is known not to overflow.
8808 This function will always return 0 unless called during combine, which
8809 implies that it must be called from a define_split. */
8811 unsigned int
8812 extended_count (x, mode, unsignedp)
8813 rtx x;
8814 enum machine_mode mode;
8815 int unsignedp;
8817 if (nonzero_sign_valid == 0)
8818 return 0;
8820 return (unsignedp
8821 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8822 ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8823 - floor_log2 (nonzero_bits (x, mode)))
8824 : 0)
8825 : num_sign_bit_copies (x, mode) - 1);
8828 /* This function is called from `simplify_shift_const' to merge two
8829 outer operations. Specifically, we have already found that we need
8830 to perform operation *POP0 with constant *PCONST0 at the outermost
8831 position. We would now like to also perform OP1 with constant CONST1
8832 (with *POP0 being done last).
8834 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8835 the resulting operation. *PCOMP_P is set to 1 if we would need to
8836 complement the innermost operand, otherwise it is unchanged.
8838 MODE is the mode in which the operation will be done. No bits outside
8839 the width of this mode matter. It is assumed that the width of this mode
8840 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8842 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8843 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8844 result is simply *PCONST0.
8846 If the resulting operation cannot be expressed as one operation, we
8847 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8849 static int
8850 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8851 enum rtx_code *pop0;
8852 HOST_WIDE_INT *pconst0;
8853 enum rtx_code op1;
8854 HOST_WIDE_INT const1;
8855 enum machine_mode mode;
8856 int *pcomp_p;
8858 enum rtx_code op0 = *pop0;
8859 HOST_WIDE_INT const0 = *pconst0;
8861 const0 &= GET_MODE_MASK (mode);
8862 const1 &= GET_MODE_MASK (mode);
8864 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8865 if (op0 == AND)
8866 const1 &= const0;
8868 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8869 if OP0 is SET. */
8871 if (op1 == NIL || op0 == SET)
8872 return 1;
8874 else if (op0 == NIL)
8875 op0 = op1, const0 = const1;
8877 else if (op0 == op1)
8879 switch (op0)
8881 case AND:
8882 const0 &= const1;
8883 break;
8884 case IOR:
8885 const0 |= const1;
8886 break;
8887 case XOR:
8888 const0 ^= const1;
8889 break;
8890 case PLUS:
8891 const0 += const1;
8892 break;
8893 case NEG:
8894 op0 = NIL;
8895 break;
8896 default:
8897 break;
8901 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8902 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8903 return 0;
8905 /* If the two constants aren't the same, we can't do anything. The
8906 remaining six cases can all be done. */
8907 else if (const0 != const1)
8908 return 0;
8910 else
8911 switch (op0)
8913 case IOR:
8914 if (op1 == AND)
8915 /* (a & b) | b == b */
8916 op0 = SET;
8917 else /* op1 == XOR */
8918 /* (a ^ b) | b == a | b */
8920 break;
8922 case XOR:
8923 if (op1 == AND)
8924 /* (a & b) ^ b == (~a) & b */
8925 op0 = AND, *pcomp_p = 1;
8926 else /* op1 == IOR */
8927 /* (a | b) ^ b == a & ~b */
8928 op0 = AND, *pconst0 = ~const0;
8929 break;
8931 case AND:
8932 if (op1 == IOR)
8933 /* (a | b) & b == b */
8934 op0 = SET;
8935 else /* op1 == XOR */
8936 /* (a ^ b) & b) == (~a) & b */
8937 *pcomp_p = 1;
8938 break;
8939 default:
8940 break;
8943 /* Check for NO-OP cases. */
8944 const0 &= GET_MODE_MASK (mode);
8945 if (const0 == 0
8946 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8947 op0 = NIL;
8948 else if (const0 == 0 && op0 == AND)
8949 op0 = SET;
8950 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8951 && op0 == AND)
8952 op0 = NIL;
8954 /* ??? Slightly redundant with the above mask, but not entirely.
8955 Moving this above means we'd have to sign-extend the mode mask
8956 for the final test. */
8957 const0 = trunc_int_for_mode (const0, mode);
8959 *pop0 = op0;
8960 *pconst0 = const0;
8962 return 1;
8965 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8966 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8967 that we started with.
8969 The shift is normally computed in the widest mode we find in VAROP, as
8970 long as it isn't a different number of words than RESULT_MODE. Exceptions
8971 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8973 static rtx
8974 simplify_shift_const (x, code, result_mode, varop, orig_count)
8975 rtx x;
8976 enum rtx_code code;
8977 enum machine_mode result_mode;
8978 rtx varop;
8979 int orig_count;
8981 enum rtx_code orig_code = code;
8982 unsigned int count;
8983 int signed_count;
8984 enum machine_mode mode = result_mode;
8985 enum machine_mode shift_mode, tmode;
8986 unsigned int mode_words
8987 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8988 /* We form (outer_op (code varop count) (outer_const)). */
8989 enum rtx_code outer_op = NIL;
8990 HOST_WIDE_INT outer_const = 0;
8991 rtx const_rtx;
8992 int complement_p = 0;
8993 rtx new;
8995 /* Make sure and truncate the "natural" shift on the way in. We don't
8996 want to do this inside the loop as it makes it more difficult to
8997 combine shifts. */
8998 #ifdef SHIFT_COUNT_TRUNCATED
8999 if (SHIFT_COUNT_TRUNCATED)
9000 orig_count &= GET_MODE_BITSIZE (mode) - 1;
9001 #endif
9003 /* If we were given an invalid count, don't do anything except exactly
9004 what was requested. */
9006 if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9008 if (x)
9009 return x;
9011 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
9014 count = orig_count;
9016 /* Unless one of the branches of the `if' in this loop does a `continue',
9017 we will `break' the loop after the `if'. */
9019 while (count != 0)
9021 /* If we have an operand of (clobber (const_int 0)), just return that
9022 value. */
9023 if (GET_CODE (varop) == CLOBBER)
9024 return varop;
9026 /* If we discovered we had to complement VAROP, leave. Making a NOT
9027 here would cause an infinite loop. */
9028 if (complement_p)
9029 break;
9031 /* Convert ROTATERT to ROTATE. */
9032 if (code == ROTATERT)
9033 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
9035 /* We need to determine what mode we will do the shift in. If the
9036 shift is a right shift or a ROTATE, we must always do it in the mode
9037 it was originally done in. Otherwise, we can do it in MODE, the
9038 widest mode encountered. */
9039 shift_mode
9040 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9041 ? result_mode : mode);
9043 /* Handle cases where the count is greater than the size of the mode
9044 minus 1. For ASHIFT, use the size minus one as the count (this can
9045 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
9046 take the count modulo the size. For other shifts, the result is
9047 zero.
9049 Since these shifts are being produced by the compiler by combining
9050 multiple operations, each of which are defined, we know what the
9051 result is supposed to be. */
9053 if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
9055 if (code == ASHIFTRT)
9056 count = GET_MODE_BITSIZE (shift_mode) - 1;
9057 else if (code == ROTATE || code == ROTATERT)
9058 count %= GET_MODE_BITSIZE (shift_mode);
9059 else
9061 /* We can't simply return zero because there may be an
9062 outer op. */
9063 varop = const0_rtx;
9064 count = 0;
9065 break;
9069 /* An arithmetic right shift of a quantity known to be -1 or 0
9070 is a no-op. */
9071 if (code == ASHIFTRT
9072 && (num_sign_bit_copies (varop, shift_mode)
9073 == GET_MODE_BITSIZE (shift_mode)))
9075 count = 0;
9076 break;
9079 /* If we are doing an arithmetic right shift and discarding all but
9080 the sign bit copies, this is equivalent to doing a shift by the
9081 bitsize minus one. Convert it into that shift because it will often
9082 allow other simplifications. */
9084 if (code == ASHIFTRT
9085 && (count + num_sign_bit_copies (varop, shift_mode)
9086 >= GET_MODE_BITSIZE (shift_mode)))
9087 count = GET_MODE_BITSIZE (shift_mode) - 1;
9089 /* We simplify the tests below and elsewhere by converting
9090 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9091 `make_compound_operation' will convert it to an ASHIFTRT for
9092 those machines (such as VAX) that don't have an LSHIFTRT. */
9093 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9094 && code == ASHIFTRT
9095 && ((nonzero_bits (varop, shift_mode)
9096 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9097 == 0))
9098 code = LSHIFTRT;
9100 switch (GET_CODE (varop))
9102 case SIGN_EXTEND:
9103 case ZERO_EXTEND:
9104 case SIGN_EXTRACT:
9105 case ZERO_EXTRACT:
9106 new = expand_compound_operation (varop);
9107 if (new != varop)
9109 varop = new;
9110 continue;
9112 break;
9114 case MEM:
9115 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9116 minus the width of a smaller mode, we can do this with a
9117 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9118 if ((code == ASHIFTRT || code == LSHIFTRT)
9119 && ! mode_dependent_address_p (XEXP (varop, 0))
9120 && ! MEM_VOLATILE_P (varop)
9121 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9122 MODE_INT, 1)) != BLKmode)
9124 new = adjust_address_nv (varop, tmode,
9125 BYTES_BIG_ENDIAN ? 0
9126 : count / BITS_PER_UNIT);
9128 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9129 : ZERO_EXTEND, mode, new);
9130 count = 0;
9131 continue;
9133 break;
9135 case USE:
9136 /* Similar to the case above, except that we can only do this if
9137 the resulting mode is the same as that of the underlying
9138 MEM and adjust the address depending on the *bits* endianness
9139 because of the way that bit-field extract insns are defined. */
9140 if ((code == ASHIFTRT || code == LSHIFTRT)
9141 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9142 MODE_INT, 1)) != BLKmode
9143 && tmode == GET_MODE (XEXP (varop, 0)))
9145 if (BITS_BIG_ENDIAN)
9146 new = XEXP (varop, 0);
9147 else
9149 new = copy_rtx (XEXP (varop, 0));
9150 SUBST (XEXP (new, 0),
9151 plus_constant (XEXP (new, 0),
9152 count / BITS_PER_UNIT));
9155 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9156 : ZERO_EXTEND, mode, new);
9157 count = 0;
9158 continue;
9160 break;
9162 case SUBREG:
9163 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9164 the same number of words as what we've seen so far. Then store
9165 the widest mode in MODE. */
9166 if (subreg_lowpart_p (varop)
9167 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9168 > GET_MODE_SIZE (GET_MODE (varop)))
9169 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9170 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9171 == mode_words)
9173 varop = SUBREG_REG (varop);
9174 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9175 mode = GET_MODE (varop);
9176 continue;
9178 break;
9180 case MULT:
9181 /* Some machines use MULT instead of ASHIFT because MULT
9182 is cheaper. But it is still better on those machines to
9183 merge two shifts into one. */
9184 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9185 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9187 varop
9188 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9189 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9190 continue;
9192 break;
9194 case UDIV:
9195 /* Similar, for when divides are cheaper. */
9196 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9197 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9199 varop
9200 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9201 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9202 continue;
9204 break;
9206 case ASHIFTRT:
9207 /* If we are extracting just the sign bit of an arithmetic
9208 right shift, that shift is not needed. However, the sign
9209 bit of a wider mode may be different from what would be
9210 interpreted as the sign bit in a narrower mode, so, if
9211 the result is narrower, don't discard the shift. */
9212 if (code == LSHIFTRT
9213 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9214 && (GET_MODE_BITSIZE (result_mode)
9215 >= GET_MODE_BITSIZE (GET_MODE (varop))))
9217 varop = XEXP (varop, 0);
9218 continue;
9221 /* ... fall through ... */
9223 case LSHIFTRT:
9224 case ASHIFT:
9225 case ROTATE:
9226 /* Here we have two nested shifts. The result is usually the
9227 AND of a new shift with a mask. We compute the result below. */
9228 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9229 && INTVAL (XEXP (varop, 1)) >= 0
9230 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9231 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9232 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9234 enum rtx_code first_code = GET_CODE (varop);
9235 unsigned int first_count = INTVAL (XEXP (varop, 1));
9236 unsigned HOST_WIDE_INT mask;
9237 rtx mask_rtx;
9239 /* We have one common special case. We can't do any merging if
9240 the inner code is an ASHIFTRT of a smaller mode. However, if
9241 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9242 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9243 we can convert it to
9244 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9245 This simplifies certain SIGN_EXTEND operations. */
9246 if (code == ASHIFT && first_code == ASHIFTRT
9247 && count == (unsigned int)
9248 (GET_MODE_BITSIZE (result_mode)
9249 - GET_MODE_BITSIZE (GET_MODE (varop))))
9251 /* C3 has the low-order C1 bits zero. */
9253 mask = (GET_MODE_MASK (mode)
9254 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9256 varop = simplify_and_const_int (NULL_RTX, result_mode,
9257 XEXP (varop, 0), mask);
9258 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9259 varop, count);
9260 count = first_count;
9261 code = ASHIFTRT;
9262 continue;
9265 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9266 than C1 high-order bits equal to the sign bit, we can convert
9267 this to either an ASHIFT or an ASHIFTRT depending on the
9268 two counts.
9270 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9272 if (code == ASHIFTRT && first_code == ASHIFT
9273 && GET_MODE (varop) == shift_mode
9274 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9275 > first_count))
9277 varop = XEXP (varop, 0);
9279 signed_count = count - first_count;
9280 if (signed_count < 0)
9281 count = -signed_count, code = ASHIFT;
9282 else
9283 count = signed_count;
9285 continue;
9288 /* There are some cases we can't do. If CODE is ASHIFTRT,
9289 we can only do this if FIRST_CODE is also ASHIFTRT.
9291 We can't do the case when CODE is ROTATE and FIRST_CODE is
9292 ASHIFTRT.
9294 If the mode of this shift is not the mode of the outer shift,
9295 we can't do this if either shift is a right shift or ROTATE.
9297 Finally, we can't do any of these if the mode is too wide
9298 unless the codes are the same.
9300 Handle the case where the shift codes are the same
9301 first. */
9303 if (code == first_code)
9305 if (GET_MODE (varop) != result_mode
9306 && (code == ASHIFTRT || code == LSHIFTRT
9307 || code == ROTATE))
9308 break;
9310 count += first_count;
9311 varop = XEXP (varop, 0);
9312 continue;
9315 if (code == ASHIFTRT
9316 || (code == ROTATE && first_code == ASHIFTRT)
9317 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9318 || (GET_MODE (varop) != result_mode
9319 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9320 || first_code == ROTATE
9321 || code == ROTATE)))
9322 break;
9324 /* To compute the mask to apply after the shift, shift the
9325 nonzero bits of the inner shift the same way the
9326 outer shift will. */
9328 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9330 mask_rtx
9331 = simplify_binary_operation (code, result_mode, mask_rtx,
9332 GEN_INT (count));
9334 /* Give up if we can't compute an outer operation to use. */
9335 if (mask_rtx == 0
9336 || GET_CODE (mask_rtx) != CONST_INT
9337 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9338 INTVAL (mask_rtx),
9339 result_mode, &complement_p))
9340 break;
9342 /* If the shifts are in the same direction, we add the
9343 counts. Otherwise, we subtract them. */
9344 signed_count = count;
9345 if ((code == ASHIFTRT || code == LSHIFTRT)
9346 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9347 signed_count += first_count;
9348 else
9349 signed_count -= first_count;
9351 /* If COUNT is positive, the new shift is usually CODE,
9352 except for the two exceptions below, in which case it is
9353 FIRST_CODE. If the count is negative, FIRST_CODE should
9354 always be used */
9355 if (signed_count > 0
9356 && ((first_code == ROTATE && code == ASHIFT)
9357 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9358 code = first_code, count = signed_count;
9359 else if (signed_count < 0)
9360 code = first_code, count = -signed_count;
9361 else
9362 count = signed_count;
9364 varop = XEXP (varop, 0);
9365 continue;
9368 /* If we have (A << B << C) for any shift, we can convert this to
9369 (A << C << B). This wins if A is a constant. Only try this if
9370 B is not a constant. */
9372 else if (GET_CODE (varop) == code
9373 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9374 && 0 != (new
9375 = simplify_binary_operation (code, mode,
9376 XEXP (varop, 0),
9377 GEN_INT (count))))
9379 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9380 count = 0;
9381 continue;
9383 break;
9385 case NOT:
9386 /* Make this fit the case below. */
9387 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9388 GEN_INT (GET_MODE_MASK (mode)));
9389 continue;
9391 case IOR:
9392 case AND:
9393 case XOR:
9394 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9395 with C the size of VAROP - 1 and the shift is logical if
9396 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9397 we have an (le X 0) operation. If we have an arithmetic shift
9398 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9399 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9401 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9402 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9403 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9404 && (code == LSHIFTRT || code == ASHIFTRT)
9405 && count == (unsigned int)
9406 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9407 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9409 count = 0;
9410 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9411 const0_rtx);
9413 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9414 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9416 continue;
9419 /* If we have (shift (logical)), move the logical to the outside
9420 to allow it to possibly combine with another logical and the
9421 shift to combine with another shift. This also canonicalizes to
9422 what a ZERO_EXTRACT looks like. Also, some machines have
9423 (and (shift)) insns. */
9425 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9426 && (new = simplify_binary_operation (code, result_mode,
9427 XEXP (varop, 1),
9428 GEN_INT (count))) != 0
9429 && GET_CODE (new) == CONST_INT
9430 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9431 INTVAL (new), result_mode, &complement_p))
9433 varop = XEXP (varop, 0);
9434 continue;
9437 /* If we can't do that, try to simplify the shift in each arm of the
9438 logical expression, make a new logical expression, and apply
9439 the inverse distributive law. */
9441 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9442 XEXP (varop, 0), count);
9443 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9444 XEXP (varop, 1), count);
9446 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9447 varop = apply_distributive_law (varop);
9449 count = 0;
9451 break;
9453 case EQ:
9454 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9455 says that the sign bit can be tested, FOO has mode MODE, C is
9456 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9457 that may be nonzero. */
9458 if (code == LSHIFTRT
9459 && XEXP (varop, 1) == const0_rtx
9460 && GET_MODE (XEXP (varop, 0)) == result_mode
9461 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9462 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9463 && ((STORE_FLAG_VALUE
9464 & ((HOST_WIDE_INT) 1
9465 < (GET_MODE_BITSIZE (result_mode) - 1))))
9466 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9467 && merge_outer_ops (&outer_op, &outer_const, XOR,
9468 (HOST_WIDE_INT) 1, result_mode,
9469 &complement_p))
9471 varop = XEXP (varop, 0);
9472 count = 0;
9473 continue;
9475 break;
9477 case NEG:
9478 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9479 than the number of bits in the mode is equivalent to A. */
9480 if (code == LSHIFTRT
9481 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9482 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9484 varop = XEXP (varop, 0);
9485 count = 0;
9486 continue;
9489 /* NEG commutes with ASHIFT since it is multiplication. Move the
9490 NEG outside to allow shifts to combine. */
9491 if (code == ASHIFT
9492 && merge_outer_ops (&outer_op, &outer_const, NEG,
9493 (HOST_WIDE_INT) 0, result_mode,
9494 &complement_p))
9496 varop = XEXP (varop, 0);
9497 continue;
9499 break;
9501 case PLUS:
9502 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9503 is one less than the number of bits in the mode is
9504 equivalent to (xor A 1). */
9505 if (code == LSHIFTRT
9506 && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9507 && XEXP (varop, 1) == constm1_rtx
9508 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9509 && merge_outer_ops (&outer_op, &outer_const, XOR,
9510 (HOST_WIDE_INT) 1, result_mode,
9511 &complement_p))
9513 count = 0;
9514 varop = XEXP (varop, 0);
9515 continue;
9518 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9519 that might be nonzero in BAR are those being shifted out and those
9520 bits are known zero in FOO, we can replace the PLUS with FOO.
9521 Similarly in the other operand order. This code occurs when
9522 we are computing the size of a variable-size array. */
9524 if ((code == ASHIFTRT || code == LSHIFTRT)
9525 && count < HOST_BITS_PER_WIDE_INT
9526 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9527 && (nonzero_bits (XEXP (varop, 1), result_mode)
9528 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9530 varop = XEXP (varop, 0);
9531 continue;
9533 else if ((code == ASHIFTRT || code == LSHIFTRT)
9534 && count < HOST_BITS_PER_WIDE_INT
9535 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9536 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9537 >> count)
9538 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9539 & nonzero_bits (XEXP (varop, 1),
9540 result_mode)))
9542 varop = XEXP (varop, 1);
9543 continue;
9546 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9547 if (code == ASHIFT
9548 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9549 && (new = simplify_binary_operation (ASHIFT, result_mode,
9550 XEXP (varop, 1),
9551 GEN_INT (count))) != 0
9552 && GET_CODE (new) == CONST_INT
9553 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9554 INTVAL (new), result_mode, &complement_p))
9556 varop = XEXP (varop, 0);
9557 continue;
9559 break;
9561 case MINUS:
9562 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9563 with C the size of VAROP - 1 and the shift is logical if
9564 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9565 we have a (gt X 0) operation. If the shift is arithmetic with
9566 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9567 we have a (neg (gt X 0)) operation. */
9569 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9570 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9571 && count == (unsigned int)
9572 (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9573 && (code == LSHIFTRT || code == ASHIFTRT)
9574 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9575 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9576 == count
9577 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9579 count = 0;
9580 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9581 const0_rtx);
9583 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9584 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9586 continue;
9588 break;
9590 case TRUNCATE:
9591 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9592 if the truncate does not affect the value. */
9593 if (code == LSHIFTRT
9594 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9595 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9596 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9597 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9598 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9600 rtx varop_inner = XEXP (varop, 0);
9602 varop_inner
9603 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9604 XEXP (varop_inner, 0),
9605 GEN_INT
9606 (count + INTVAL (XEXP (varop_inner, 1))));
9607 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9608 count = 0;
9609 continue;
9611 break;
9613 default:
9614 break;
9617 break;
9620 /* We need to determine what mode to do the shift in. If the shift is
9621 a right shift or ROTATE, we must always do it in the mode it was
9622 originally done in. Otherwise, we can do it in MODE, the widest mode
9623 encountered. The code we care about is that of the shift that will
9624 actually be done, not the shift that was originally requested. */
9625 shift_mode
9626 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9627 ? result_mode : mode);
9629 /* We have now finished analyzing the shift. The result should be
9630 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9631 OUTER_OP is non-NIL, it is an operation that needs to be applied
9632 to the result of the shift. OUTER_CONST is the relevant constant,
9633 but we must turn off all bits turned off in the shift.
9635 If we were passed a value for X, see if we can use any pieces of
9636 it. If not, make new rtx. */
9638 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9639 && GET_CODE (XEXP (x, 1)) == CONST_INT
9640 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9641 const_rtx = XEXP (x, 1);
9642 else
9643 const_rtx = GEN_INT (count);
9645 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9646 && GET_MODE (XEXP (x, 0)) == shift_mode
9647 && SUBREG_REG (XEXP (x, 0)) == varop)
9648 varop = XEXP (x, 0);
9649 else if (GET_MODE (varop) != shift_mode)
9650 varop = gen_lowpart_for_combine (shift_mode, varop);
9652 /* If we can't make the SUBREG, try to return what we were given. */
9653 if (GET_CODE (varop) == CLOBBER)
9654 return x ? x : varop;
9656 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9657 if (new != 0)
9658 x = new;
9659 else
9660 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9662 /* If we have an outer operation and we just made a shift, it is
9663 possible that we could have simplified the shift were it not
9664 for the outer operation. So try to do the simplification
9665 recursively. */
9667 if (outer_op != NIL && GET_CODE (x) == code
9668 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9669 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9670 INTVAL (XEXP (x, 1)));
9672 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9673 turn off all the bits that the shift would have turned off. */
9674 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9675 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9676 GET_MODE_MASK (result_mode) >> orig_count);
9678 /* Do the remainder of the processing in RESULT_MODE. */
9679 x = gen_lowpart_for_combine (result_mode, x);
9681 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9682 operation. */
9683 if (complement_p)
9684 x =simplify_gen_unary (NOT, result_mode, x, result_mode);
9686 if (outer_op != NIL)
9688 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9689 outer_const = trunc_int_for_mode (outer_const, result_mode);
9691 if (outer_op == AND)
9692 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9693 else if (outer_op == SET)
9694 /* This means that we have determined that the result is
9695 equivalent to a constant. This should be rare. */
9696 x = GEN_INT (outer_const);
9697 else if (GET_RTX_CLASS (outer_op) == '1')
9698 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9699 else
9700 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9703 return x;
9706 /* Like recog, but we receive the address of a pointer to a new pattern.
9707 We try to match the rtx that the pointer points to.
9708 If that fails, we may try to modify or replace the pattern,
9709 storing the replacement into the same pointer object.
9711 Modifications include deletion or addition of CLOBBERs.
9713 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9714 the CLOBBERs are placed.
9716 The value is the final insn code from the pattern ultimately matched,
9717 or -1. */
9719 static int
9720 recog_for_combine (pnewpat, insn, pnotes)
9721 rtx *pnewpat;
9722 rtx insn;
9723 rtx *pnotes;
9725 rtx pat = *pnewpat;
9726 int insn_code_number;
9727 int num_clobbers_to_add = 0;
9728 int i;
9729 rtx notes = 0;
9730 rtx dummy_insn;
9732 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9733 we use to indicate that something didn't match. If we find such a
9734 thing, force rejection. */
9735 if (GET_CODE (pat) == PARALLEL)
9736 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9737 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9738 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9739 return -1;
9741 /* *pnewpat does not have to be actual PATTERN (insn), so make a dummy
9742 instruction for pattern recognition. */
9743 dummy_insn = shallow_copy_rtx (insn);
9744 PATTERN (dummy_insn) = pat;
9745 REG_NOTES (dummy_insn) = 0;
9747 insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
9749 /* If it isn't, there is the possibility that we previously had an insn
9750 that clobbered some register as a side effect, but the combined
9751 insn doesn't need to do that. So try once more without the clobbers
9752 unless this represents an ASM insn. */
9754 if (insn_code_number < 0 && ! check_asm_operands (pat)
9755 && GET_CODE (pat) == PARALLEL)
9757 int pos;
9759 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9760 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9762 if (i != pos)
9763 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9764 pos++;
9767 SUBST_INT (XVECLEN (pat, 0), pos);
9769 if (pos == 1)
9770 pat = XVECEXP (pat, 0, 0);
9772 PATTERN (dummy_insn) = pat;
9773 insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
9776 /* Recognize all noop sets, these will be killed by followup pass. */
9777 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9778 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9780 /* If we had any clobbers to add, make a new pattern than contains
9781 them. Then check to make sure that all of them are dead. */
9782 if (num_clobbers_to_add)
9784 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9785 rtvec_alloc (GET_CODE (pat) == PARALLEL
9786 ? (XVECLEN (pat, 0)
9787 + num_clobbers_to_add)
9788 : num_clobbers_to_add + 1));
9790 if (GET_CODE (pat) == PARALLEL)
9791 for (i = 0; i < XVECLEN (pat, 0); i++)
9792 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9793 else
9794 XVECEXP (newpat, 0, 0) = pat;
9796 add_clobbers (newpat, insn_code_number);
9798 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9799 i < XVECLEN (newpat, 0); i++)
9801 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9802 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9803 return -1;
9804 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9805 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9807 pat = newpat;
9810 *pnewpat = pat;
9811 *pnotes = notes;
9813 return insn_code_number;
9816 /* Like gen_lowpart but for use by combine. In combine it is not possible
9817 to create any new pseudoregs. However, it is safe to create
9818 invalid memory addresses, because combine will try to recognize
9819 them and all they will do is make the combine attempt fail.
9821 If for some reason this cannot do its job, an rtx
9822 (clobber (const_int 0)) is returned.
9823 An insn containing that will not be recognized. */
9825 #undef gen_lowpart
9827 static rtx
9828 gen_lowpart_for_combine (mode, x)
9829 enum machine_mode mode;
9830 rtx x;
9832 rtx result;
9834 if (GET_MODE (x) == mode)
9835 return x;
9837 /* We can only support MODE being wider than a word if X is a
9838 constant integer or has a mode the same size. */
9840 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9841 && ! ((GET_MODE (x) == VOIDmode
9842 && (GET_CODE (x) == CONST_INT
9843 || GET_CODE (x) == CONST_DOUBLE))
9844 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9845 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9847 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9848 won't know what to do. So we will strip off the SUBREG here and
9849 process normally. */
9850 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9852 x = SUBREG_REG (x);
9853 if (GET_MODE (x) == mode)
9854 return x;
9857 result = gen_lowpart_common (mode, x);
9858 #ifdef CLASS_CANNOT_CHANGE_MODE
9859 if (result != 0
9860 && GET_CODE (result) == SUBREG
9861 && GET_CODE (SUBREG_REG (result)) == REG
9862 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9863 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9864 GET_MODE (SUBREG_REG (result))))
9865 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9866 #endif
9868 if (result)
9869 return result;
9871 if (GET_CODE (x) == MEM)
9873 int offset = 0;
9875 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9876 address. */
9877 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9878 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9880 /* If we want to refer to something bigger than the original memref,
9881 generate a perverse subreg instead. That will force a reload
9882 of the original memref X. */
9883 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9884 return gen_rtx_SUBREG (mode, x, 0);
9886 if (WORDS_BIG_ENDIAN)
9887 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9888 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9890 if (BYTES_BIG_ENDIAN)
9892 /* Adjust the address so that the address-after-the-data is
9893 unchanged. */
9894 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9895 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9898 return adjust_address_nv (x, mode, offset);
9901 /* If X is a comparison operator, rewrite it in a new mode. This
9902 probably won't match, but may allow further simplifications. */
9903 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9904 return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9906 /* If we couldn't simplify X any other way, just enclose it in a
9907 SUBREG. Normally, this SUBREG won't match, but some patterns may
9908 include an explicit SUBREG or we may simplify it further in combine. */
9909 else
9911 int offset = 0;
9912 rtx res;
9913 enum machine_mode sub_mode = GET_MODE (x);
9915 offset = subreg_lowpart_offset (mode, sub_mode);
9916 if (sub_mode == VOIDmode)
9918 sub_mode = int_mode_for_mode (mode);
9919 x = gen_lowpart_common (sub_mode, x);
9921 res = simplify_gen_subreg (mode, x, sub_mode, offset);
9922 if (res)
9923 return res;
9924 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9928 /* These routines make binary and unary operations by first seeing if they
9929 fold; if not, a new expression is allocated. */
9931 static rtx
9932 gen_binary (code, mode, op0, op1)
9933 enum rtx_code code;
9934 enum machine_mode mode;
9935 rtx op0, op1;
9937 rtx result;
9938 rtx tem;
9940 if (GET_RTX_CLASS (code) == 'c'
9941 && swap_commutative_operands_p (op0, op1))
9942 tem = op0, op0 = op1, op1 = tem;
9944 if (GET_RTX_CLASS (code) == '<')
9946 enum machine_mode op_mode = GET_MODE (op0);
9948 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9949 just (REL_OP X Y). */
9950 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9952 op1 = XEXP (op0, 1);
9953 op0 = XEXP (op0, 0);
9954 op_mode = GET_MODE (op0);
9957 if (op_mode == VOIDmode)
9958 op_mode = GET_MODE (op1);
9959 result = simplify_relational_operation (code, op_mode, op0, op1);
9961 else
9962 result = simplify_binary_operation (code, mode, op0, op1);
9964 if (result)
9965 return result;
9967 /* Put complex operands first and constants second. */
9968 if (GET_RTX_CLASS (code) == 'c'
9969 && swap_commutative_operands_p (op0, op1))
9970 return gen_rtx_fmt_ee (code, mode, op1, op0);
9972 /* If we are turning off bits already known off in OP0, we need not do
9973 an AND. */
9974 else if (code == AND && GET_CODE (op1) == CONST_INT
9975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9976 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
9977 return op0;
9979 return gen_rtx_fmt_ee (code, mode, op0, op1);
9982 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9983 comparison code that will be tested.
9985 The result is a possibly different comparison code to use. *POP0 and
9986 *POP1 may be updated.
9988 It is possible that we might detect that a comparison is either always
9989 true or always false. However, we do not perform general constant
9990 folding in combine, so this knowledge isn't useful. Such tautologies
9991 should have been detected earlier. Hence we ignore all such cases. */
9993 static enum rtx_code
9994 simplify_comparison (code, pop0, pop1)
9995 enum rtx_code code;
9996 rtx *pop0;
9997 rtx *pop1;
9999 rtx op0 = *pop0;
10000 rtx op1 = *pop1;
10001 rtx tem, tem1;
10002 int i;
10003 enum machine_mode mode, tmode;
10005 /* Try a few ways of applying the same transformation to both operands. */
10006 while (1)
10008 #ifndef WORD_REGISTER_OPERATIONS
10009 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10010 so check specially. */
10011 if (code != GTU && code != GEU && code != LTU && code != LEU
10012 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10013 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10014 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10015 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10016 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10017 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10018 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10019 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10020 && GET_CODE (XEXP (op1, 1)) == CONST_INT
10021 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10022 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
10023 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
10024 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
10025 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
10026 && (INTVAL (XEXP (op0, 1))
10027 == (GET_MODE_BITSIZE (GET_MODE (op0))
10028 - (GET_MODE_BITSIZE
10029 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10031 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10032 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10034 #endif
10036 /* If both operands are the same constant shift, see if we can ignore the
10037 shift. We can if the shift is a rotate or if the bits shifted out of
10038 this shift are known to be zero for both inputs and if the type of
10039 comparison is compatible with the shift. */
10040 if (GET_CODE (op0) == GET_CODE (op1)
10041 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10042 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10043 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10044 && (code != GT && code != LT && code != GE && code != LE))
10045 || (GET_CODE (op0) == ASHIFTRT
10046 && (code != GTU && code != LTU
10047 && code != GEU && code != LEU)))
10048 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10049 && INTVAL (XEXP (op0, 1)) >= 0
10050 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10051 && XEXP (op0, 1) == XEXP (op1, 1))
10053 enum machine_mode mode = GET_MODE (op0);
10054 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10055 int shift_count = INTVAL (XEXP (op0, 1));
10057 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10058 mask &= (mask >> shift_count) << shift_count;
10059 else if (GET_CODE (op0) == ASHIFT)
10060 mask = (mask & (mask << shift_count)) >> shift_count;
10062 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10063 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10064 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10065 else
10066 break;
10069 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10070 SUBREGs are of the same mode, and, in both cases, the AND would
10071 be redundant if the comparison was done in the narrower mode,
10072 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10073 and the operand's possibly nonzero bits are 0xffffff01; in that case
10074 if we only care about QImode, we don't need the AND). This case
10075 occurs if the output mode of an scc insn is not SImode and
10076 STORE_FLAG_VALUE == 1 (e.g., the 386).
10078 Similarly, check for a case where the AND's are ZERO_EXTEND
10079 operations from some narrower mode even though a SUBREG is not
10080 present. */
10082 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10083 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10084 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10086 rtx inner_op0 = XEXP (op0, 0);
10087 rtx inner_op1 = XEXP (op1, 0);
10088 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10089 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10090 int changed = 0;
10092 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10093 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10094 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10095 && (GET_MODE (SUBREG_REG (inner_op0))
10096 == GET_MODE (SUBREG_REG (inner_op1)))
10097 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10098 <= HOST_BITS_PER_WIDE_INT)
10099 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10100 GET_MODE (SUBREG_REG (inner_op0)))))
10101 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10102 GET_MODE (SUBREG_REG (inner_op1))))))
10104 op0 = SUBREG_REG (inner_op0);
10105 op1 = SUBREG_REG (inner_op1);
10107 /* The resulting comparison is always unsigned since we masked
10108 off the original sign bit. */
10109 code = unsigned_condition (code);
10111 changed = 1;
10114 else if (c0 == c1)
10115 for (tmode = GET_CLASS_NARROWEST_MODE
10116 (GET_MODE_CLASS (GET_MODE (op0)));
10117 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10118 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10120 op0 = gen_lowpart_for_combine (tmode, inner_op0);
10121 op1 = gen_lowpart_for_combine (tmode, inner_op1);
10122 code = unsigned_condition (code);
10123 changed = 1;
10124 break;
10127 if (! changed)
10128 break;
10131 /* If both operands are NOT, we can strip off the outer operation
10132 and adjust the comparison code for swapped operands; similarly for
10133 NEG, except that this must be an equality comparison. */
10134 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10135 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10136 && (code == EQ || code == NE)))
10137 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10139 else
10140 break;
10143 /* If the first operand is a constant, swap the operands and adjust the
10144 comparison code appropriately, but don't do this if the second operand
10145 is already a constant integer. */
10146 if (swap_commutative_operands_p (op0, op1))
10148 tem = op0, op0 = op1, op1 = tem;
10149 code = swap_condition (code);
10152 /* We now enter a loop during which we will try to simplify the comparison.
10153 For the most part, we only are concerned with comparisons with zero,
10154 but some things may really be comparisons with zero but not start
10155 out looking that way. */
10157 while (GET_CODE (op1) == CONST_INT)
10159 enum machine_mode mode = GET_MODE (op0);
10160 unsigned int mode_width = GET_MODE_BITSIZE (mode);
10161 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10162 int equality_comparison_p;
10163 int sign_bit_comparison_p;
10164 int unsigned_comparison_p;
10165 HOST_WIDE_INT const_op;
10167 /* We only want to handle integral modes. This catches VOIDmode,
10168 CCmode, and the floating-point modes. An exception is that we
10169 can handle VOIDmode if OP0 is a COMPARE or a comparison
10170 operation. */
10172 if (GET_MODE_CLASS (mode) != MODE_INT
10173 && ! (mode == VOIDmode
10174 && (GET_CODE (op0) == COMPARE
10175 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10176 break;
10178 /* Get the constant we are comparing against and turn off all bits
10179 not on in our mode. */
10180 const_op = trunc_int_for_mode (INTVAL (op1), mode);
10181 op1 = GEN_INT (const_op);
10183 /* If we are comparing against a constant power of two and the value
10184 being compared can only have that single bit nonzero (e.g., it was
10185 `and'ed with that bit), we can replace this with a comparison
10186 with zero. */
10187 if (const_op
10188 && (code == EQ || code == NE || code == GE || code == GEU
10189 || code == LT || code == LTU)
10190 && mode_width <= HOST_BITS_PER_WIDE_INT
10191 && exact_log2 (const_op) >= 0
10192 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10194 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10195 op1 = const0_rtx, const_op = 0;
10198 /* Similarly, if we are comparing a value known to be either -1 or
10199 0 with -1, change it to the opposite comparison against zero. */
10201 if (const_op == -1
10202 && (code == EQ || code == NE || code == GT || code == LE
10203 || code == GEU || code == LTU)
10204 && num_sign_bit_copies (op0, mode) == mode_width)
10206 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10207 op1 = const0_rtx, const_op = 0;
10210 /* Do some canonicalizations based on the comparison code. We prefer
10211 comparisons against zero and then prefer equality comparisons.
10212 If we can reduce the size of a constant, we will do that too. */
10214 switch (code)
10216 case LT:
10217 /* < C is equivalent to <= (C - 1) */
10218 if (const_op > 0)
10220 const_op -= 1;
10221 op1 = GEN_INT (const_op);
10222 code = LE;
10223 /* ... fall through to LE case below. */
10225 else
10226 break;
10228 case LE:
10229 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10230 if (const_op < 0)
10232 const_op += 1;
10233 op1 = GEN_INT (const_op);
10234 code = LT;
10237 /* If we are doing a <= 0 comparison on a value known to have
10238 a zero sign bit, we can replace this with == 0. */
10239 else if (const_op == 0
10240 && mode_width <= HOST_BITS_PER_WIDE_INT
10241 && (nonzero_bits (op0, mode)
10242 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10243 code = EQ;
10244 break;
10246 case GE:
10247 /* >= C is equivalent to > (C - 1). */
10248 if (const_op > 0)
10250 const_op -= 1;
10251 op1 = GEN_INT (const_op);
10252 code = GT;
10253 /* ... fall through to GT below. */
10255 else
10256 break;
10258 case GT:
10259 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
10260 if (const_op < 0)
10262 const_op += 1;
10263 op1 = GEN_INT (const_op);
10264 code = GE;
10267 /* If we are doing a > 0 comparison on a value known to have
10268 a zero sign bit, we can replace this with != 0. */
10269 else if (const_op == 0
10270 && mode_width <= HOST_BITS_PER_WIDE_INT
10271 && (nonzero_bits (op0, mode)
10272 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10273 code = NE;
10274 break;
10276 case LTU:
10277 /* < C is equivalent to <= (C - 1). */
10278 if (const_op > 0)
10280 const_op -= 1;
10281 op1 = GEN_INT (const_op);
10282 code = LEU;
10283 /* ... fall through ... */
10286 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10287 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10288 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10290 const_op = 0, op1 = const0_rtx;
10291 code = GE;
10292 break;
10294 else
10295 break;
10297 case LEU:
10298 /* unsigned <= 0 is equivalent to == 0 */
10299 if (const_op == 0)
10300 code = EQ;
10302 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10303 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10304 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10306 const_op = 0, op1 = const0_rtx;
10307 code = GE;
10309 break;
10311 case GEU:
10312 /* >= C is equivalent to < (C - 1). */
10313 if (const_op > 1)
10315 const_op -= 1;
10316 op1 = GEN_INT (const_op);
10317 code = GTU;
10318 /* ... fall through ... */
10321 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10322 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10323 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10325 const_op = 0, op1 = const0_rtx;
10326 code = LT;
10327 break;
10329 else
10330 break;
10332 case GTU:
10333 /* unsigned > 0 is equivalent to != 0 */
10334 if (const_op == 0)
10335 code = NE;
10337 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10338 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10339 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10341 const_op = 0, op1 = const0_rtx;
10342 code = LT;
10344 break;
10346 default:
10347 break;
10350 /* Compute some predicates to simplify code below. */
10352 equality_comparison_p = (code == EQ || code == NE);
10353 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10354 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10355 || code == GEU);
10357 /* If this is a sign bit comparison and we can do arithmetic in
10358 MODE, say that we will only be needing the sign bit of OP0. */
10359 if (sign_bit_comparison_p
10360 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10361 op0 = force_to_mode (op0, mode,
10362 ((HOST_WIDE_INT) 1
10363 << (GET_MODE_BITSIZE (mode) - 1)),
10364 NULL_RTX, 0);
10366 /* Now try cases based on the opcode of OP0. If none of the cases
10367 does a "continue", we exit this loop immediately after the
10368 switch. */
10370 switch (GET_CODE (op0))
10372 case ZERO_EXTRACT:
10373 /* If we are extracting a single bit from a variable position in
10374 a constant that has only a single bit set and are comparing it
10375 with zero, we can convert this into an equality comparison
10376 between the position and the location of the single bit. */
10378 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10379 && XEXP (op0, 1) == const1_rtx
10380 && equality_comparison_p && const_op == 0
10381 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10383 if (BITS_BIG_ENDIAN)
10385 enum machine_mode new_mode
10386 = mode_for_extraction (EP_extzv, 1);
10387 if (new_mode == MAX_MACHINE_MODE)
10388 i = BITS_PER_WORD - 1 - i;
10389 else
10391 mode = new_mode;
10392 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10396 op0 = XEXP (op0, 2);
10397 op1 = GEN_INT (i);
10398 const_op = i;
10400 /* Result is nonzero iff shift count is equal to I. */
10401 code = reverse_condition (code);
10402 continue;
10405 /* ... fall through ... */
10407 case SIGN_EXTRACT:
10408 tem = expand_compound_operation (op0);
10409 if (tem != op0)
10411 op0 = tem;
10412 continue;
10414 break;
10416 case NOT:
10417 /* If testing for equality, we can take the NOT of the constant. */
10418 if (equality_comparison_p
10419 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10421 op0 = XEXP (op0, 0);
10422 op1 = tem;
10423 continue;
10426 /* If just looking at the sign bit, reverse the sense of the
10427 comparison. */
10428 if (sign_bit_comparison_p)
10430 op0 = XEXP (op0, 0);
10431 code = (code == GE ? LT : GE);
10432 continue;
10434 break;
10436 case NEG:
10437 /* If testing for equality, we can take the NEG of the constant. */
10438 if (equality_comparison_p
10439 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10441 op0 = XEXP (op0, 0);
10442 op1 = tem;
10443 continue;
10446 /* The remaining cases only apply to comparisons with zero. */
10447 if (const_op != 0)
10448 break;
10450 /* When X is ABS or is known positive,
10451 (neg X) is < 0 if and only if X != 0. */
10453 if (sign_bit_comparison_p
10454 && (GET_CODE (XEXP (op0, 0)) == ABS
10455 || (mode_width <= HOST_BITS_PER_WIDE_INT
10456 && (nonzero_bits (XEXP (op0, 0), mode)
10457 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10459 op0 = XEXP (op0, 0);
10460 code = (code == LT ? NE : EQ);
10461 continue;
10464 /* If we have NEG of something whose two high-order bits are the
10465 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10466 if (num_sign_bit_copies (op0, mode) >= 2)
10468 op0 = XEXP (op0, 0);
10469 code = swap_condition (code);
10470 continue;
10472 break;
10474 case ROTATE:
10475 /* If we are testing equality and our count is a constant, we
10476 can perform the inverse operation on our RHS. */
10477 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10478 && (tem = simplify_binary_operation (ROTATERT, mode,
10479 op1, XEXP (op0, 1))) != 0)
10481 op0 = XEXP (op0, 0);
10482 op1 = tem;
10483 continue;
10486 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10487 a particular bit. Convert it to an AND of a constant of that
10488 bit. This will be converted into a ZERO_EXTRACT. */
10489 if (const_op == 0 && sign_bit_comparison_p
10490 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10491 && mode_width <= HOST_BITS_PER_WIDE_INT)
10493 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10494 ((HOST_WIDE_INT) 1
10495 << (mode_width - 1
10496 - INTVAL (XEXP (op0, 1)))));
10497 code = (code == LT ? NE : EQ);
10498 continue;
10501 /* Fall through. */
10503 case ABS:
10504 /* ABS is ignorable inside an equality comparison with zero. */
10505 if (const_op == 0 && equality_comparison_p)
10507 op0 = XEXP (op0, 0);
10508 continue;
10510 break;
10512 case SIGN_EXTEND:
10513 /* Can simplify (compare (zero/sign_extend FOO) CONST)
10514 to (compare FOO CONST) if CONST fits in FOO's mode and we
10515 are either testing inequality or have an unsigned comparison
10516 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10517 if (! unsigned_comparison_p
10518 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10519 <= HOST_BITS_PER_WIDE_INT)
10520 && ((unsigned HOST_WIDE_INT) const_op
10521 < (((unsigned HOST_WIDE_INT) 1
10522 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10524 op0 = XEXP (op0, 0);
10525 continue;
10527 break;
10529 case SUBREG:
10530 /* Check for the case where we are comparing A - C1 with C2,
10531 both constants are smaller than 1/2 the maximum positive
10532 value in MODE, and the comparison is equality or unsigned.
10533 In that case, if A is either zero-extended to MODE or has
10534 sufficient sign bits so that the high-order bit in MODE
10535 is a copy of the sign in the inner mode, we can prove that it is
10536 safe to do the operation in the wider mode. This simplifies
10537 many range checks. */
10539 if (mode_width <= HOST_BITS_PER_WIDE_INT
10540 && subreg_lowpart_p (op0)
10541 && GET_CODE (SUBREG_REG (op0)) == PLUS
10542 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10543 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10544 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10545 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10546 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10547 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10548 GET_MODE (SUBREG_REG (op0)))
10549 & ~GET_MODE_MASK (mode))
10550 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10551 GET_MODE (SUBREG_REG (op0)))
10552 > (unsigned int)
10553 (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10554 - GET_MODE_BITSIZE (mode)))))
10556 op0 = SUBREG_REG (op0);
10557 continue;
10560 /* If the inner mode is narrower and we are extracting the low part,
10561 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10562 if (subreg_lowpart_p (op0)
10563 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10564 /* Fall through */ ;
10565 else
10566 break;
10568 /* ... fall through ... */
10570 case ZERO_EXTEND:
10571 if ((unsigned_comparison_p || equality_comparison_p)
10572 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10573 <= HOST_BITS_PER_WIDE_INT)
10574 && ((unsigned HOST_WIDE_INT) const_op
10575 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10577 op0 = XEXP (op0, 0);
10578 continue;
10580 break;
10582 case PLUS:
10583 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10584 this for equality comparisons due to pathological cases involving
10585 overflows. */
10586 if (equality_comparison_p
10587 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10588 op1, XEXP (op0, 1))))
10590 op0 = XEXP (op0, 0);
10591 op1 = tem;
10592 continue;
10595 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10596 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10597 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10599 op0 = XEXP (XEXP (op0, 0), 0);
10600 code = (code == LT ? EQ : NE);
10601 continue;
10603 break;
10605 case MINUS:
10606 /* We used to optimize signed comparisons against zero, but that
10607 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10608 arrive here as equality comparisons, or (GEU, LTU) are
10609 optimized away. No need to special-case them. */
10611 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10612 (eq B (minus A C)), whichever simplifies. We can only do
10613 this for equality comparisons due to pathological cases involving
10614 overflows. */
10615 if (equality_comparison_p
10616 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10617 XEXP (op0, 1), op1)))
10619 op0 = XEXP (op0, 0);
10620 op1 = tem;
10621 continue;
10624 if (equality_comparison_p
10625 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10626 XEXP (op0, 0), op1)))
10628 op0 = XEXP (op0, 1);
10629 op1 = tem;
10630 continue;
10633 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10634 of bits in X minus 1, is one iff X > 0. */
10635 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10636 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10637 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10638 == mode_width - 1
10639 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10641 op0 = XEXP (op0, 1);
10642 code = (code == GE ? LE : GT);
10643 continue;
10645 break;
10647 case XOR:
10648 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10649 if C is zero or B is a constant. */
10650 if (equality_comparison_p
10651 && 0 != (tem = simplify_binary_operation (XOR, mode,
10652 XEXP (op0, 1), op1)))
10654 op0 = XEXP (op0, 0);
10655 op1 = tem;
10656 continue;
10658 break;
10660 case EQ: case NE:
10661 case UNEQ: case LTGT:
10662 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10663 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10664 case UNORDERED: case ORDERED:
10665 /* We can't do anything if OP0 is a condition code value, rather
10666 than an actual data value. */
10667 if (const_op != 0
10668 #ifdef HAVE_cc0
10669 || XEXP (op0, 0) == cc0_rtx
10670 #endif
10671 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10672 break;
10674 /* Get the two operands being compared. */
10675 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10676 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10677 else
10678 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10680 /* Check for the cases where we simply want the result of the
10681 earlier test or the opposite of that result. */
10682 if (code == NE || code == EQ
10683 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10684 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10685 && (STORE_FLAG_VALUE
10686 & (((HOST_WIDE_INT) 1
10687 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10688 && (code == LT || code == GE)))
10690 enum rtx_code new_code;
10691 if (code == LT || code == NE)
10692 new_code = GET_CODE (op0);
10693 else
10694 new_code = combine_reversed_comparison_code (op0);
10696 if (new_code != UNKNOWN)
10698 code = new_code;
10699 op0 = tem;
10700 op1 = tem1;
10701 continue;
10704 break;
10706 case IOR:
10707 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10708 iff X <= 0. */
10709 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10710 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10711 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10713 op0 = XEXP (op0, 1);
10714 code = (code == GE ? GT : LE);
10715 continue;
10717 break;
10719 case AND:
10720 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10721 will be converted to a ZERO_EXTRACT later. */
10722 if (const_op == 0 && equality_comparison_p
10723 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10724 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10726 op0 = simplify_and_const_int
10727 (op0, mode, gen_rtx_LSHIFTRT (mode,
10728 XEXP (op0, 1),
10729 XEXP (XEXP (op0, 0), 1)),
10730 (HOST_WIDE_INT) 1);
10731 continue;
10734 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10735 zero and X is a comparison and C1 and C2 describe only bits set
10736 in STORE_FLAG_VALUE, we can compare with X. */
10737 if (const_op == 0 && equality_comparison_p
10738 && mode_width <= HOST_BITS_PER_WIDE_INT
10739 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10740 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10741 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10742 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10743 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10745 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10746 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10747 if ((~STORE_FLAG_VALUE & mask) == 0
10748 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10749 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10750 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10752 op0 = XEXP (XEXP (op0, 0), 0);
10753 continue;
10757 /* If we are doing an equality comparison of an AND of a bit equal
10758 to the sign bit, replace this with a LT or GE comparison of
10759 the underlying value. */
10760 if (equality_comparison_p
10761 && const_op == 0
10762 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10763 && mode_width <= HOST_BITS_PER_WIDE_INT
10764 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10765 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10767 op0 = XEXP (op0, 0);
10768 code = (code == EQ ? GE : LT);
10769 continue;
10772 /* If this AND operation is really a ZERO_EXTEND from a narrower
10773 mode, the constant fits within that mode, and this is either an
10774 equality or unsigned comparison, try to do this comparison in
10775 the narrower mode. */
10776 if ((equality_comparison_p || unsigned_comparison_p)
10777 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10778 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10779 & GET_MODE_MASK (mode))
10780 + 1)) >= 0
10781 && const_op >> i == 0
10782 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10784 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10785 continue;
10788 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10789 in both M1 and M2 and the SUBREG is either paradoxical or
10790 represents the low part, permute the SUBREG and the AND and
10791 try again. */
10792 if (GET_CODE (XEXP (op0, 0)) == SUBREG
10793 && (0
10794 #ifdef WORD_REGISTER_OPERATIONS
10795 || ((mode_width
10796 > (GET_MODE_BITSIZE
10797 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10798 && mode_width <= BITS_PER_WORD)
10799 #endif
10800 || ((mode_width
10801 <= (GET_MODE_BITSIZE
10802 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10803 && subreg_lowpart_p (XEXP (op0, 0))))
10804 #ifndef WORD_REGISTER_OPERATIONS
10805 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10806 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10807 As originally written the upper bits have a defined value
10808 due to the AND operation. However, if we commute the AND
10809 inside the SUBREG then they no longer have defined values
10810 and the meaning of the code has been changed. */
10811 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10812 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10813 #endif
10814 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10815 && mode_width <= HOST_BITS_PER_WIDE_INT
10816 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10817 <= HOST_BITS_PER_WIDE_INT)
10818 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10819 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10820 & INTVAL (XEXP (op0, 1)))
10821 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10822 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10823 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10827 = gen_lowpart_for_combine
10828 (mode,
10829 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10830 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10831 continue;
10834 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10835 (eq (and (lshiftrt X) 1) 0). */
10836 if (const_op == 0 && equality_comparison_p
10837 && XEXP (op0, 1) == const1_rtx
10838 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10839 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10841 op0 = simplify_and_const_int
10842 (op0, mode,
10843 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
10844 XEXP (XEXP (op0, 0), 1)),
10845 (HOST_WIDE_INT) 1);
10846 code = (code == NE ? EQ : NE);
10847 continue;
10849 break;
10851 case ASHIFT:
10852 /* If we have (compare (ashift FOO N) (const_int C)) and
10853 the high order N bits of FOO (N+1 if an inequality comparison)
10854 are known to be zero, we can do this by comparing FOO with C
10855 shifted right N bits so long as the low-order N bits of C are
10856 zero. */
10857 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10858 && INTVAL (XEXP (op0, 1)) >= 0
10859 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10860 < HOST_BITS_PER_WIDE_INT)
10861 && ((const_op
10862 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10863 && mode_width <= HOST_BITS_PER_WIDE_INT
10864 && (nonzero_bits (XEXP (op0, 0), mode)
10865 & ~(mask >> (INTVAL (XEXP (op0, 1))
10866 + ! equality_comparison_p))) == 0)
10868 /* We must perform a logical shift, not an arithmetic one,
10869 as we want the top N bits of C to be zero. */
10870 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10872 temp >>= INTVAL (XEXP (op0, 1));
10873 op1 = gen_int_mode (temp, mode);
10874 op0 = XEXP (op0, 0);
10875 continue;
10878 /* If we are doing a sign bit comparison, it means we are testing
10879 a particular bit. Convert it to the appropriate AND. */
10880 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10881 && mode_width <= HOST_BITS_PER_WIDE_INT)
10883 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10884 ((HOST_WIDE_INT) 1
10885 << (mode_width - 1
10886 - INTVAL (XEXP (op0, 1)))));
10887 code = (code == LT ? NE : EQ);
10888 continue;
10891 /* If this an equality comparison with zero and we are shifting
10892 the low bit to the sign bit, we can convert this to an AND of the
10893 low-order bit. */
10894 if (const_op == 0 && equality_comparison_p
10895 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10896 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10897 == mode_width - 1)
10899 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10900 (HOST_WIDE_INT) 1);
10901 continue;
10903 break;
10905 case ASHIFTRT:
10906 /* If this is an equality comparison with zero, we can do this
10907 as a logical shift, which might be much simpler. */
10908 if (equality_comparison_p && const_op == 0
10909 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10911 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10912 XEXP (op0, 0),
10913 INTVAL (XEXP (op0, 1)));
10914 continue;
10917 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10918 do the comparison in a narrower mode. */
10919 if (! unsigned_comparison_p
10920 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10921 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10922 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10923 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10924 MODE_INT, 1)) != BLKmode
10925 && (((unsigned HOST_WIDE_INT) const_op
10926 + (GET_MODE_MASK (tmode) >> 1) + 1)
10927 <= GET_MODE_MASK (tmode)))
10929 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10930 continue;
10933 /* Likewise if OP0 is a PLUS of a sign extension with a
10934 constant, which is usually represented with the PLUS
10935 between the shifts. */
10936 if (! unsigned_comparison_p
10937 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10938 && GET_CODE (XEXP (op0, 0)) == PLUS
10939 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10940 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10941 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10942 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10943 MODE_INT, 1)) != BLKmode
10944 && (((unsigned HOST_WIDE_INT) const_op
10945 + (GET_MODE_MASK (tmode) >> 1) + 1)
10946 <= GET_MODE_MASK (tmode)))
10948 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10949 rtx add_const = XEXP (XEXP (op0, 0), 1);
10950 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10951 XEXP (op0, 1));
10953 op0 = gen_binary (PLUS, tmode,
10954 gen_lowpart_for_combine (tmode, inner),
10955 new_const);
10956 continue;
10959 /* ... fall through ... */
10960 case LSHIFTRT:
10961 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10962 the low order N bits of FOO are known to be zero, we can do this
10963 by comparing FOO with C shifted left N bits so long as no
10964 overflow occurs. */
10965 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10966 && INTVAL (XEXP (op0, 1)) >= 0
10967 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10968 && mode_width <= HOST_BITS_PER_WIDE_INT
10969 && (nonzero_bits (XEXP (op0, 0), mode)
10970 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10971 && (((unsigned HOST_WIDE_INT) const_op
10972 + (GET_CODE (op0) != LSHIFTRT
10973 ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10974 + 1)
10975 : 0))
10976 <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10978 /* If the shift was logical, then we must make the condition
10979 unsigned. */
10980 if (GET_CODE (op0) == LSHIFTRT)
10981 code = unsigned_condition (code);
10983 const_op <<= INTVAL (XEXP (op0, 1));
10984 op1 = GEN_INT (const_op);
10985 op0 = XEXP (op0, 0);
10986 continue;
10989 /* If we are using this shift to extract just the sign bit, we
10990 can replace this with an LT or GE comparison. */
10991 if (const_op == 0
10992 && (equality_comparison_p || sign_bit_comparison_p)
10993 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10994 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10995 == mode_width - 1)
10997 op0 = XEXP (op0, 0);
10998 code = (code == NE || code == GT ? LT : GE);
10999 continue;
11001 break;
11003 default:
11004 break;
11007 break;
11010 /* Now make any compound operations involved in this comparison. Then,
11011 check for an outmost SUBREG on OP0 that is not doing anything or is
11012 paradoxical. The latter transformation must only be performed when
11013 it is known that the "extra" bits will be the same in op0 and op1 or
11014 that they don't matter. There are three cases to consider:
11016 1. SUBREG_REG (op0) is a register. In this case the bits are don't
11017 care bits and we can assume they have any convenient value. So
11018 making the transformation is safe.
11020 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11021 In this case the upper bits of op0 are undefined. We should not make
11022 the simplification in that case as we do not know the contents of
11023 those bits.
11025 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11026 NIL. In that case we know those bits are zeros or ones. We must
11027 also be sure that they are the same as the upper bits of op1.
11029 We can never remove a SUBREG for a non-equality comparison because
11030 the sign bit is in a different place in the underlying object. */
11032 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11033 op1 = make_compound_operation (op1, SET);
11035 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11036 /* Case 3 above, to sometimes allow (subreg (mem x)), isn't
11037 implemented. */
11038 && GET_CODE (SUBREG_REG (op0)) == REG
11039 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11040 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11041 && (code == NE || code == EQ))
11043 if (GET_MODE_SIZE (GET_MODE (op0))
11044 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11046 op0 = SUBREG_REG (op0);
11047 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11049 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11050 <= HOST_BITS_PER_WIDE_INT)
11051 && (nonzero_bits (SUBREG_REG (op0),
11052 GET_MODE (SUBREG_REG (op0)))
11053 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11055 tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
11057 if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11058 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11059 op0 = SUBREG_REG (op0), op1 = tem;
11063 /* We now do the opposite procedure: Some machines don't have compare
11064 insns in all modes. If OP0's mode is an integer mode smaller than a
11065 word and we can't do a compare in that mode, see if there is a larger
11066 mode for which we can do the compare. There are a number of cases in
11067 which we can use the wider mode. */
11069 mode = GET_MODE (op0);
11070 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11071 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11072 && ! have_insn_for (COMPARE, mode))
11073 for (tmode = GET_MODE_WIDER_MODE (mode);
11074 (tmode != VOIDmode
11075 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11076 tmode = GET_MODE_WIDER_MODE (tmode))
11077 if (have_insn_for (COMPARE, tmode))
11079 int zero_extended;
11081 /* If the only nonzero bits in OP0 and OP1 are those in the
11082 narrower mode and this is an equality or unsigned comparison,
11083 we can use the wider mode. Similarly for sign-extended
11084 values, in which case it is true for all comparisons. */
11085 zero_extended = ((code == EQ || code == NE
11086 || code == GEU || code == GTU
11087 || code == LEU || code == LTU)
11088 && (nonzero_bits (op0, tmode)
11089 & ~GET_MODE_MASK (mode)) == 0
11090 && ((GET_CODE (op1) == CONST_INT
11091 || (nonzero_bits (op1, tmode)
11092 & ~GET_MODE_MASK (mode)) == 0)));
11094 if (zero_extended
11095 || ((num_sign_bit_copies (op0, tmode)
11096 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11097 - GET_MODE_BITSIZE (mode)))
11098 && (num_sign_bit_copies (op1, tmode)
11099 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11100 - GET_MODE_BITSIZE (mode)))))
11102 /* If OP0 is an AND and we don't have an AND in MODE either,
11103 make a new AND in the proper mode. */
11104 if (GET_CODE (op0) == AND
11105 && !have_insn_for (AND, mode))
11106 op0 = gen_binary (AND, tmode,
11107 gen_lowpart_for_combine (tmode,
11108 XEXP (op0, 0)),
11109 gen_lowpart_for_combine (tmode,
11110 XEXP (op0, 1)));
11112 op0 = gen_lowpart_for_combine (tmode, op0);
11113 if (zero_extended && GET_CODE (op1) == CONST_INT)
11114 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11115 op1 = gen_lowpart_for_combine (tmode, op1);
11116 break;
11119 /* If this is a test for negative, we can make an explicit
11120 test of the sign bit. */
11122 if (op1 == const0_rtx && (code == LT || code == GE)
11123 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11125 op0 = gen_binary (AND, tmode,
11126 gen_lowpart_for_combine (tmode, op0),
11127 GEN_INT ((HOST_WIDE_INT) 1
11128 << (GET_MODE_BITSIZE (mode) - 1)));
11129 code = (code == LT) ? NE : EQ;
11130 break;
11134 #ifdef CANONICALIZE_COMPARISON
11135 /* If this machine only supports a subset of valid comparisons, see if we
11136 can convert an unsupported one into a supported one. */
11137 CANONICALIZE_COMPARISON (code, op0, op1);
11138 #endif
11140 *pop0 = op0;
11141 *pop1 = op1;
11143 return code;
11146 /* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11147 searching backward. */
11148 static enum rtx_code
11149 combine_reversed_comparison_code (exp)
11150 rtx exp;
11152 enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11153 rtx x;
11155 if (code1 != UNKNOWN
11156 || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11157 return code1;
11158 /* Otherwise try and find where the condition codes were last set and
11159 use that. */
11160 x = get_last_value (XEXP (exp, 0));
11161 if (!x || GET_CODE (x) != COMPARE)
11162 return UNKNOWN;
11163 return reversed_comparison_code_parts (GET_CODE (exp),
11164 XEXP (x, 0), XEXP (x, 1), NULL);
11166 /* Return comparison with reversed code of EXP and operands OP0 and OP1.
11167 Return NULL_RTX in case we fail to do the reversal. */
11168 static rtx
11169 reversed_comparison (exp, mode, op0, op1)
11170 rtx exp, op0, op1;
11171 enum machine_mode mode;
11173 enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11174 if (reversed_code == UNKNOWN)
11175 return NULL_RTX;
11176 else
11177 return gen_binary (reversed_code, mode, op0, op1);
11180 /* Utility function for following routine. Called when X is part of a value
11181 being stored into reg_last_set_value. Sets reg_last_set_table_tick
11182 for each register mentioned. Similar to mention_regs in cse.c */
11184 static void
11185 update_table_tick (x)
11186 rtx x;
11188 enum rtx_code code = GET_CODE (x);
11189 const char *fmt = GET_RTX_FORMAT (code);
11190 int i;
11192 if (code == REG)
11194 unsigned int regno = REGNO (x);
11195 unsigned int endregno
11196 = regno + (regno < FIRST_PSEUDO_REGISTER
11197 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11198 unsigned int r;
11200 for (r = regno; r < endregno; r++)
11201 reg_last_set_table_tick[r] = label_tick;
11203 return;
11206 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11207 /* Note that we can't have an "E" in values stored; see
11208 get_last_value_validate. */
11209 if (fmt[i] == 'e')
11210 update_table_tick (XEXP (x, i));
11213 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11214 are saying that the register is clobbered and we no longer know its
11215 value. If INSN is zero, don't update reg_last_set; this is only permitted
11216 with VALUE also zero and is used to invalidate the register. */
11218 static void
11219 record_value_for_reg (reg, insn, value)
11220 rtx reg;
11221 rtx insn;
11222 rtx value;
11224 unsigned int regno = REGNO (reg);
11225 unsigned int endregno
11226 = regno + (regno < FIRST_PSEUDO_REGISTER
11227 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11228 unsigned int i;
11230 /* If VALUE contains REG and we have a previous value for REG, substitute
11231 the previous value. */
11232 if (value && insn && reg_overlap_mentioned_p (reg, value))
11234 rtx tem;
11236 /* Set things up so get_last_value is allowed to see anything set up to
11237 our insn. */
11238 subst_low_cuid = INSN_CUID (insn);
11239 tem = get_last_value (reg);
11241 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11242 it isn't going to be useful and will take a lot of time to process,
11243 so just use the CLOBBER. */
11245 if (tem)
11247 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11248 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11249 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11250 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11251 tem = XEXP (tem, 0);
11253 value = replace_rtx (copy_rtx (value), reg, tem);
11257 /* For each register modified, show we don't know its value, that
11258 we don't know about its bitwise content, that its value has been
11259 updated, and that we don't know the location of the death of the
11260 register. */
11261 for (i = regno; i < endregno; i++)
11263 if (insn)
11264 reg_last_set[i] = insn;
11266 reg_last_set_value[i] = 0;
11267 reg_last_set_mode[i] = 0;
11268 reg_last_set_nonzero_bits[i] = 0;
11269 reg_last_set_sign_bit_copies[i] = 0;
11270 reg_last_death[i] = 0;
11273 /* Mark registers that are being referenced in this value. */
11274 if (value)
11275 update_table_tick (value);
11277 /* Now update the status of each register being set.
11278 If someone is using this register in this block, set this register
11279 to invalid since we will get confused between the two lives in this
11280 basic block. This makes using this register always invalid. In cse, we
11281 scan the table to invalidate all entries using this register, but this
11282 is too much work for us. */
11284 for (i = regno; i < endregno; i++)
11286 reg_last_set_label[i] = label_tick;
11287 if (value && reg_last_set_table_tick[i] == label_tick)
11288 reg_last_set_invalid[i] = 1;
11289 else
11290 reg_last_set_invalid[i] = 0;
11293 /* The value being assigned might refer to X (like in "x++;"). In that
11294 case, we must replace it with (clobber (const_int 0)) to prevent
11295 infinite loops. */
11296 if (value && ! get_last_value_validate (&value, insn,
11297 reg_last_set_label[regno], 0))
11299 value = copy_rtx (value);
11300 if (! get_last_value_validate (&value, insn,
11301 reg_last_set_label[regno], 1))
11302 value = 0;
11305 /* For the main register being modified, update the value, the mode, the
11306 nonzero bits, and the number of sign bit copies. */
11308 reg_last_set_value[regno] = value;
11310 if (value)
11312 enum machine_mode mode = GET_MODE (reg);
11313 subst_low_cuid = INSN_CUID (insn);
11314 reg_last_set_mode[regno] = mode;
11315 if (GET_MODE_CLASS (mode) == MODE_INT
11316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11317 mode = nonzero_bits_mode;
11318 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11319 reg_last_set_sign_bit_copies[regno]
11320 = num_sign_bit_copies (value, GET_MODE (reg));
11324 /* Called via note_stores from record_dead_and_set_regs to handle one
11325 SET or CLOBBER in an insn. DATA is the instruction in which the
11326 set is occurring. */
11328 static void
11329 record_dead_and_set_regs_1 (dest, setter, data)
11330 rtx dest, setter;
11331 void *data;
11333 rtx record_dead_insn = (rtx) data;
11335 if (GET_CODE (dest) == SUBREG)
11336 dest = SUBREG_REG (dest);
11338 if (GET_CODE (dest) == REG)
11340 /* If we are setting the whole register, we know its value. Otherwise
11341 show that we don't know the value. We can handle SUBREG in
11342 some cases. */
11343 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11344 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11345 else if (GET_CODE (setter) == SET
11346 && GET_CODE (SET_DEST (setter)) == SUBREG
11347 && SUBREG_REG (SET_DEST (setter)) == dest
11348 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11349 && subreg_lowpart_p (SET_DEST (setter)))
11350 record_value_for_reg (dest, record_dead_insn,
11351 gen_lowpart_for_combine (GET_MODE (dest),
11352 SET_SRC (setter)));
11353 else
11354 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11356 else if (GET_CODE (dest) == MEM
11357 /* Ignore pushes, they clobber nothing. */
11358 && ! push_operand (dest, GET_MODE (dest)))
11359 mem_last_set = INSN_CUID (record_dead_insn);
11362 /* Update the records of when each REG was most recently set or killed
11363 for the things done by INSN. This is the last thing done in processing
11364 INSN in the combiner loop.
11366 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11367 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11368 and also the similar information mem_last_set (which insn most recently
11369 modified memory) and last_call_cuid (which insn was the most recent
11370 subroutine call). */
11372 static void
11373 record_dead_and_set_regs (insn)
11374 rtx insn;
11376 rtx link;
11377 unsigned int i;
11379 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11381 if (REG_NOTE_KIND (link) == REG_DEAD
11382 && GET_CODE (XEXP (link, 0)) == REG)
11384 unsigned int regno = REGNO (XEXP (link, 0));
11385 unsigned int endregno
11386 = regno + (regno < FIRST_PSEUDO_REGISTER
11387 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11388 : 1);
11390 for (i = regno; i < endregno; i++)
11391 reg_last_death[i] = insn;
11393 else if (REG_NOTE_KIND (link) == REG_INC)
11394 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11397 if (GET_CODE (insn) == CALL_INSN)
11399 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11400 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11402 reg_last_set_value[i] = 0;
11403 reg_last_set_mode[i] = 0;
11404 reg_last_set_nonzero_bits[i] = 0;
11405 reg_last_set_sign_bit_copies[i] = 0;
11406 reg_last_death[i] = 0;
11409 last_call_cuid = mem_last_set = INSN_CUID (insn);
11411 /* Don't bother recording what this insn does. It might set the
11412 return value register, but we can't combine into a call
11413 pattern anyway, so there's no point trying (and it may cause
11414 a crash, if e.g. we wind up asking for last_set_value of a
11415 SUBREG of the return value register). */
11416 return;
11419 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11422 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11423 register present in the SUBREG, so for each such SUBREG go back and
11424 adjust nonzero and sign bit information of the registers that are
11425 known to have some zero/sign bits set.
11427 This is needed because when combine blows the SUBREGs away, the
11428 information on zero/sign bits is lost and further combines can be
11429 missed because of that. */
11431 static void
11432 record_promoted_value (insn, subreg)
11433 rtx insn;
11434 rtx subreg;
11436 rtx links, set;
11437 unsigned int regno = REGNO (SUBREG_REG (subreg));
11438 enum machine_mode mode = GET_MODE (subreg);
11440 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11441 return;
11443 for (links = LOG_LINKS (insn); links;)
11445 insn = XEXP (links, 0);
11446 set = single_set (insn);
11448 if (! set || GET_CODE (SET_DEST (set)) != REG
11449 || REGNO (SET_DEST (set)) != regno
11450 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11452 links = XEXP (links, 1);
11453 continue;
11456 if (reg_last_set[regno] == insn)
11458 if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11459 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11462 if (GET_CODE (SET_SRC (set)) == REG)
11464 regno = REGNO (SET_SRC (set));
11465 links = LOG_LINKS (insn);
11467 else
11468 break;
11472 /* Scan X for promoted SUBREGs. For each one found,
11473 note what it implies to the registers used in it. */
11475 static void
11476 check_promoted_subreg (insn, x)
11477 rtx insn;
11478 rtx x;
11480 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11481 && GET_CODE (SUBREG_REG (x)) == REG)
11482 record_promoted_value (insn, x);
11483 else
11485 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11486 int i, j;
11488 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11489 switch (format[i])
11491 case 'e':
11492 check_promoted_subreg (insn, XEXP (x, i));
11493 break;
11494 case 'V':
11495 case 'E':
11496 if (XVEC (x, i) != 0)
11497 for (j = 0; j < XVECLEN (x, i); j++)
11498 check_promoted_subreg (insn, XVECEXP (x, i, j));
11499 break;
11504 /* Utility routine for the following function. Verify that all the registers
11505 mentioned in *LOC are valid when *LOC was part of a value set when
11506 label_tick == TICK. Return 0 if some are not.
11508 If REPLACE is non-zero, replace the invalid reference with
11509 (clobber (const_int 0)) and return 1. This replacement is useful because
11510 we often can get useful information about the form of a value (e.g., if
11511 it was produced by a shift that always produces -1 or 0) even though
11512 we don't know exactly what registers it was produced from. */
11514 static int
11515 get_last_value_validate (loc, insn, tick, replace)
11516 rtx *loc;
11517 rtx insn;
11518 int tick;
11519 int replace;
11521 rtx x = *loc;
11522 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11523 int len = GET_RTX_LENGTH (GET_CODE (x));
11524 int i;
11526 if (GET_CODE (x) == REG)
11528 unsigned int regno = REGNO (x);
11529 unsigned int endregno
11530 = regno + (regno < FIRST_PSEUDO_REGISTER
11531 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11532 unsigned int j;
11534 for (j = regno; j < endregno; j++)
11535 if (reg_last_set_invalid[j]
11536 /* If this is a pseudo-register that was only set once and not
11537 live at the beginning of the function, it is always valid. */
11538 || (! (regno >= FIRST_PSEUDO_REGISTER
11539 && REG_N_SETS (regno) == 1
11540 && (! REGNO_REG_SET_P
11541 (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))
11542 && reg_last_set_label[j] > tick))
11544 if (replace)
11545 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11546 return replace;
11549 return 1;
11551 /* If this is a memory reference, make sure that there were
11552 no stores after it that might have clobbered the value. We don't
11553 have alias info, so we assume any store invalidates it. */
11554 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11555 && INSN_CUID (insn) <= mem_last_set)
11557 if (replace)
11558 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11559 return replace;
11562 for (i = 0; i < len; i++)
11563 if ((fmt[i] == 'e'
11564 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
11565 /* Don't bother with these. They shouldn't occur anyway. */
11566 || fmt[i] == 'E')
11567 return 0;
11569 /* If we haven't found a reason for it to be invalid, it is valid. */
11570 return 1;
11573 /* Get the last value assigned to X, if known. Some registers
11574 in the value may be replaced with (clobber (const_int 0)) if their value
11575 is known longer known reliably. */
11577 static rtx
11578 get_last_value (x)
11579 rtx x;
11581 unsigned int regno;
11582 rtx value;
11584 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11585 then convert it to the desired mode. If this is a paradoxical SUBREG,
11586 we cannot predict what values the "extra" bits might have. */
11587 if (GET_CODE (x) == SUBREG
11588 && subreg_lowpart_p (x)
11589 && (GET_MODE_SIZE (GET_MODE (x))
11590 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11591 && (value = get_last_value (SUBREG_REG (x))) != 0)
11592 return gen_lowpart_for_combine (GET_MODE (x), value);
11594 if (GET_CODE (x) != REG)
11595 return 0;
11597 regno = REGNO (x);
11598 value = reg_last_set_value[regno];
11600 /* If we don't have a value, or if it isn't for this basic block and
11601 it's either a hard register, set more than once, or it's a live
11602 at the beginning of the function, return 0.
11604 Because if it's not live at the beginning of the function then the reg
11605 is always set before being used (is never used without being set).
11606 And, if it's set only once, and it's always set before use, then all
11607 uses must have the same last value, even if it's not from this basic
11608 block. */
11610 if (value == 0
11611 || (reg_last_set_label[regno] != label_tick
11612 && (regno < FIRST_PSEUDO_REGISTER
11613 || REG_N_SETS (regno) != 1
11614 || (REGNO_REG_SET_P
11615 (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))))
11616 return 0;
11618 /* If the value was set in a later insn than the ones we are processing,
11619 we can't use it even if the register was only set once. */
11620 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11621 return 0;
11623 /* If the value has all its registers valid, return it. */
11624 if (get_last_value_validate (&value, reg_last_set[regno],
11625 reg_last_set_label[regno], 0))
11626 return value;
11628 /* Otherwise, make a copy and replace any invalid register with
11629 (clobber (const_int 0)). If that fails for some reason, return 0. */
11631 value = copy_rtx (value);
11632 if (get_last_value_validate (&value, reg_last_set[regno],
11633 reg_last_set_label[regno], 1))
11634 return value;
11636 return 0;
11639 /* Return nonzero if expression X refers to a REG or to memory
11640 that is set in an instruction more recent than FROM_CUID. */
11642 static int
11643 use_crosses_set_p (x, from_cuid)
11644 rtx x;
11645 int from_cuid;
11647 const char *fmt;
11648 int i;
11649 enum rtx_code code = GET_CODE (x);
11651 if (code == REG)
11653 unsigned int regno = REGNO (x);
11654 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11655 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11657 #ifdef PUSH_ROUNDING
11658 /* Don't allow uses of the stack pointer to be moved,
11659 because we don't know whether the move crosses a push insn. */
11660 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11661 return 1;
11662 #endif
11663 for (; regno < endreg; regno++)
11664 if (reg_last_set[regno]
11665 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11666 return 1;
11667 return 0;
11670 if (code == MEM && mem_last_set > from_cuid)
11671 return 1;
11673 fmt = GET_RTX_FORMAT (code);
11675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11677 if (fmt[i] == 'E')
11679 int j;
11680 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11681 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11682 return 1;
11684 else if (fmt[i] == 'e'
11685 && use_crosses_set_p (XEXP (x, i), from_cuid))
11686 return 1;
11688 return 0;
11691 /* Define three variables used for communication between the following
11692 routines. */
11694 static unsigned int reg_dead_regno, reg_dead_endregno;
11695 static int reg_dead_flag;
11697 /* Function called via note_stores from reg_dead_at_p.
11699 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11700 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11702 static void
11703 reg_dead_at_p_1 (dest, x, data)
11704 rtx dest;
11705 rtx x;
11706 void *data ATTRIBUTE_UNUSED;
11708 unsigned int regno, endregno;
11710 if (GET_CODE (dest) != REG)
11711 return;
11713 regno = REGNO (dest);
11714 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11715 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11717 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11718 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11721 /* Return non-zero if REG is known to be dead at INSN.
11723 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11724 referencing REG, it is dead. If we hit a SET referencing REG, it is
11725 live. Otherwise, see if it is live or dead at the start of the basic
11726 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11727 must be assumed to be always live. */
11729 static int
11730 reg_dead_at_p (reg, insn)
11731 rtx reg;
11732 rtx insn;
11734 basic_block block;
11735 unsigned int i;
11737 /* Set variables for reg_dead_at_p_1. */
11738 reg_dead_regno = REGNO (reg);
11739 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11740 ? HARD_REGNO_NREGS (reg_dead_regno,
11741 GET_MODE (reg))
11742 : 1);
11744 reg_dead_flag = 0;
11746 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11747 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11749 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11750 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11751 return 0;
11754 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11755 beginning of function. */
11756 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11757 insn = prev_nonnote_insn (insn))
11759 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11760 if (reg_dead_flag)
11761 return reg_dead_flag == 1 ? 1 : 0;
11763 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11764 return 1;
11767 /* Get the basic block that we were in. */
11768 if (insn == 0)
11769 block = ENTRY_BLOCK_PTR->next_bb;
11770 else
11772 FOR_EACH_BB (block)
11773 if (insn == block->head)
11774 break;
11776 if (block == EXIT_BLOCK_PTR)
11777 return 0;
11780 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11781 if (REGNO_REG_SET_P (block->global_live_at_start, i))
11782 return 0;
11784 return 1;
11787 /* Note hard registers in X that are used. This code is similar to
11788 that in flow.c, but much simpler since we don't care about pseudos. */
11790 static void
11791 mark_used_regs_combine (x)
11792 rtx x;
11794 RTX_CODE code = GET_CODE (x);
11795 unsigned int regno;
11796 int i;
11798 switch (code)
11800 case LABEL_REF:
11801 case SYMBOL_REF:
11802 case CONST_INT:
11803 case CONST:
11804 case CONST_DOUBLE:
11805 case CONST_VECTOR:
11806 case PC:
11807 case ADDR_VEC:
11808 case ADDR_DIFF_VEC:
11809 case ASM_INPUT:
11810 #ifdef HAVE_cc0
11811 /* CC0 must die in the insn after it is set, so we don't need to take
11812 special note of it here. */
11813 case CC0:
11814 #endif
11815 return;
11817 case CLOBBER:
11818 /* If we are clobbering a MEM, mark any hard registers inside the
11819 address as used. */
11820 if (GET_CODE (XEXP (x, 0)) == MEM)
11821 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11822 return;
11824 case REG:
11825 regno = REGNO (x);
11826 /* A hard reg in a wide mode may really be multiple registers.
11827 If so, mark all of them just like the first. */
11828 if (regno < FIRST_PSEUDO_REGISTER)
11830 unsigned int endregno, r;
11832 /* None of this applies to the stack, frame or arg pointers */
11833 if (regno == STACK_POINTER_REGNUM
11834 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11835 || regno == HARD_FRAME_POINTER_REGNUM
11836 #endif
11837 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11838 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11839 #endif
11840 || regno == FRAME_POINTER_REGNUM)
11841 return;
11843 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11844 for (r = regno; r < endregno; r++)
11845 SET_HARD_REG_BIT (newpat_used_regs, r);
11847 return;
11849 case SET:
11851 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11852 the address. */
11853 rtx testreg = SET_DEST (x);
11855 while (GET_CODE (testreg) == SUBREG
11856 || GET_CODE (testreg) == ZERO_EXTRACT
11857 || GET_CODE (testreg) == SIGN_EXTRACT
11858 || GET_CODE (testreg) == STRICT_LOW_PART)
11859 testreg = XEXP (testreg, 0);
11861 if (GET_CODE (testreg) == MEM)
11862 mark_used_regs_combine (XEXP (testreg, 0));
11864 mark_used_regs_combine (SET_SRC (x));
11866 return;
11868 default:
11869 break;
11872 /* Recursively scan the operands of this expression. */
11875 const char *fmt = GET_RTX_FORMAT (code);
11877 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11879 if (fmt[i] == 'e')
11880 mark_used_regs_combine (XEXP (x, i));
11881 else if (fmt[i] == 'E')
11883 int j;
11885 for (j = 0; j < XVECLEN (x, i); j++)
11886 mark_used_regs_combine (XVECEXP (x, i, j));
11892 /* Remove register number REGNO from the dead registers list of INSN.
11894 Return the note used to record the death, if there was one. */
11897 remove_death (regno, insn)
11898 unsigned int regno;
11899 rtx insn;
11901 rtx note = find_regno_note (insn, REG_DEAD, regno);
11903 if (note)
11905 REG_N_DEATHS (regno)--;
11906 remove_note (insn, note);
11909 return note;
11912 /* For each register (hardware or pseudo) used within expression X, if its
11913 death is in an instruction with cuid between FROM_CUID (inclusive) and
11914 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11915 list headed by PNOTES.
11917 That said, don't move registers killed by maybe_kill_insn.
11919 This is done when X is being merged by combination into TO_INSN. These
11920 notes will then be distributed as needed. */
11922 static void
11923 move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11924 rtx x;
11925 rtx maybe_kill_insn;
11926 int from_cuid;
11927 rtx to_insn;
11928 rtx *pnotes;
11930 const char *fmt;
11931 int len, i;
11932 enum rtx_code code = GET_CODE (x);
11934 if (code == REG)
11936 unsigned int regno = REGNO (x);
11937 rtx where_dead = reg_last_death[regno];
11938 rtx before_dead, after_dead;
11940 /* Don't move the register if it gets killed in between from and to */
11941 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11942 && ! reg_referenced_p (x, maybe_kill_insn))
11943 return;
11945 /* WHERE_DEAD could be a USE insn made by combine, so first we
11946 make sure that we have insns with valid INSN_CUID values. */
11947 before_dead = where_dead;
11948 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11949 before_dead = PREV_INSN (before_dead);
11951 after_dead = where_dead;
11952 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11953 after_dead = NEXT_INSN (after_dead);
11955 if (before_dead && after_dead
11956 && INSN_CUID (before_dead) >= from_cuid
11957 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11958 || (where_dead != after_dead
11959 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11961 rtx note = remove_death (regno, where_dead);
11963 /* It is possible for the call above to return 0. This can occur
11964 when reg_last_death points to I2 or I1 that we combined with.
11965 In that case make a new note.
11967 We must also check for the case where X is a hard register
11968 and NOTE is a death note for a range of hard registers
11969 including X. In that case, we must put REG_DEAD notes for
11970 the remaining registers in place of NOTE. */
11972 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11973 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11974 > GET_MODE_SIZE (GET_MODE (x))))
11976 unsigned int deadregno = REGNO (XEXP (note, 0));
11977 unsigned int deadend
11978 = (deadregno + HARD_REGNO_NREGS (deadregno,
11979 GET_MODE (XEXP (note, 0))));
11980 unsigned int ourend
11981 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11982 unsigned int i;
11984 for (i = deadregno; i < deadend; i++)
11985 if (i < regno || i >= ourend)
11986 REG_NOTES (where_dead)
11987 = gen_rtx_EXPR_LIST (REG_DEAD,
11988 regno_reg_rtx[i],
11989 REG_NOTES (where_dead));
11992 /* If we didn't find any note, or if we found a REG_DEAD note that
11993 covers only part of the given reg, and we have a multi-reg hard
11994 register, then to be safe we must check for REG_DEAD notes
11995 for each register other than the first. They could have
11996 their own REG_DEAD notes lying around. */
11997 else if ((note == 0
11998 || (note != 0
11999 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12000 < GET_MODE_SIZE (GET_MODE (x)))))
12001 && regno < FIRST_PSEUDO_REGISTER
12002 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
12004 unsigned int ourend
12005 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12006 unsigned int i, offset;
12007 rtx oldnotes = 0;
12009 if (note)
12010 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
12011 else
12012 offset = 1;
12014 for (i = regno + offset; i < ourend; i++)
12015 move_deaths (regno_reg_rtx[i],
12016 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
12019 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12021 XEXP (note, 1) = *pnotes;
12022 *pnotes = note;
12024 else
12025 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
12027 REG_N_DEATHS (regno)++;
12030 return;
12033 else if (GET_CODE (x) == SET)
12035 rtx dest = SET_DEST (x);
12037 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12039 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12040 that accesses one word of a multi-word item, some
12041 piece of everything register in the expression is used by
12042 this insn, so remove any old death. */
12043 /* ??? So why do we test for equality of the sizes? */
12045 if (GET_CODE (dest) == ZERO_EXTRACT
12046 || GET_CODE (dest) == STRICT_LOW_PART
12047 || (GET_CODE (dest) == SUBREG
12048 && (((GET_MODE_SIZE (GET_MODE (dest))
12049 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12050 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12051 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12053 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12054 return;
12057 /* If this is some other SUBREG, we know it replaces the entire
12058 value, so use that as the destination. */
12059 if (GET_CODE (dest) == SUBREG)
12060 dest = SUBREG_REG (dest);
12062 /* If this is a MEM, adjust deaths of anything used in the address.
12063 For a REG (the only other possibility), the entire value is
12064 being replaced so the old value is not used in this insn. */
12066 if (GET_CODE (dest) == MEM)
12067 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12068 to_insn, pnotes);
12069 return;
12072 else if (GET_CODE (x) == CLOBBER)
12073 return;
12075 len = GET_RTX_LENGTH (code);
12076 fmt = GET_RTX_FORMAT (code);
12078 for (i = 0; i < len; i++)
12080 if (fmt[i] == 'E')
12082 int j;
12083 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12084 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12085 to_insn, pnotes);
12087 else if (fmt[i] == 'e')
12088 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12092 /* Return 1 if X is the target of a bit-field assignment in BODY, the
12093 pattern of an insn. X must be a REG. */
12095 static int
12096 reg_bitfield_target_p (x, body)
12097 rtx x;
12098 rtx body;
12100 int i;
12102 if (GET_CODE (body) == SET)
12104 rtx dest = SET_DEST (body);
12105 rtx target;
12106 unsigned int regno, tregno, endregno, endtregno;
12108 if (GET_CODE (dest) == ZERO_EXTRACT)
12109 target = XEXP (dest, 0);
12110 else if (GET_CODE (dest) == STRICT_LOW_PART)
12111 target = SUBREG_REG (XEXP (dest, 0));
12112 else
12113 return 0;
12115 if (GET_CODE (target) == SUBREG)
12116 target = SUBREG_REG (target);
12118 if (GET_CODE (target) != REG)
12119 return 0;
12121 tregno = REGNO (target), regno = REGNO (x);
12122 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12123 return target == x;
12125 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12126 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12128 return endregno > tregno && regno < endtregno;
12131 else if (GET_CODE (body) == PARALLEL)
12132 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12133 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12134 return 1;
12136 return 0;
12139 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12140 as appropriate. I3 and I2 are the insns resulting from the combination
12141 insns including FROM (I2 may be zero).
12143 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12144 not need REG_DEAD notes because they are being substituted for. This
12145 saves searching in the most common cases.
12147 Each note in the list is either ignored or placed on some insns, depending
12148 on the type of note. */
12150 static void
12151 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12152 rtx notes;
12153 rtx from_insn;
12154 rtx i3, i2;
12155 rtx elim_i2, elim_i1;
12157 rtx note, next_note;
12158 rtx tem;
12160 for (note = notes; note; note = next_note)
12162 rtx place = 0, place2 = 0;
12164 /* If this NOTE references a pseudo register, ensure it references
12165 the latest copy of that register. */
12166 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12167 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12168 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12170 next_note = XEXP (note, 1);
12171 switch (REG_NOTE_KIND (note))
12173 case REG_BR_PROB:
12174 case REG_BR_PRED:
12175 case REG_EXEC_COUNT:
12176 /* Doesn't matter much where we put this, as long as it's somewhere.
12177 It is preferable to keep these notes on branches, which is most
12178 likely to be i3. */
12179 place = i3;
12180 break;
12182 case REG_VTABLE_REF:
12183 /* ??? Should remain with *a particular* memory load. Given the
12184 nature of vtable data, the last insn seems relatively safe. */
12185 place = i3;
12186 break;
12188 case REG_NON_LOCAL_GOTO:
12189 if (GET_CODE (i3) == JUMP_INSN)
12190 place = i3;
12191 else if (i2 && GET_CODE (i2) == JUMP_INSN)
12192 place = i2;
12193 else
12194 abort ();
12195 break;
12197 case REG_EH_REGION:
12198 /* These notes must remain with the call or trapping instruction. */
12199 if (GET_CODE (i3) == CALL_INSN)
12200 place = i3;
12201 else if (i2 && GET_CODE (i2) == CALL_INSN)
12202 place = i2;
12203 else if (flag_non_call_exceptions)
12205 if (may_trap_p (i3))
12206 place = i3;
12207 else if (i2 && may_trap_p (i2))
12208 place = i2;
12209 /* ??? Otherwise assume we've combined things such that we
12210 can now prove that the instructions can't trap. Drop the
12211 note in this case. */
12213 else
12214 abort ();
12215 break;
12217 case REG_NORETURN:
12218 case REG_SETJMP:
12219 /* These notes must remain with the call. It should not be
12220 possible for both I2 and I3 to be a call. */
12221 if (GET_CODE (i3) == CALL_INSN)
12222 place = i3;
12223 else if (i2 && GET_CODE (i2) == CALL_INSN)
12224 place = i2;
12225 else
12226 abort ();
12227 break;
12229 case REG_UNUSED:
12230 /* Any clobbers for i3 may still exist, and so we must process
12231 REG_UNUSED notes from that insn.
12233 Any clobbers from i2 or i1 can only exist if they were added by
12234 recog_for_combine. In that case, recog_for_combine created the
12235 necessary REG_UNUSED notes. Trying to keep any original
12236 REG_UNUSED notes from these insns can cause incorrect output
12237 if it is for the same register as the original i3 dest.
12238 In that case, we will notice that the register is set in i3,
12239 and then add a REG_UNUSED note for the destination of i3, which
12240 is wrong. However, it is possible to have REG_UNUSED notes from
12241 i2 or i1 for register which were both used and clobbered, so
12242 we keep notes from i2 or i1 if they will turn into REG_DEAD
12243 notes. */
12245 /* If this register is set or clobbered in I3, put the note there
12246 unless there is one already. */
12247 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12249 if (from_insn != i3)
12250 break;
12252 if (! (GET_CODE (XEXP (note, 0)) == REG
12253 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12254 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12255 place = i3;
12257 /* Otherwise, if this register is used by I3, then this register
12258 now dies here, so we must put a REG_DEAD note here unless there
12259 is one already. */
12260 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12261 && ! (GET_CODE (XEXP (note, 0)) == REG
12262 ? find_regno_note (i3, REG_DEAD,
12263 REGNO (XEXP (note, 0)))
12264 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12266 PUT_REG_NOTE_KIND (note, REG_DEAD);
12267 place = i3;
12269 break;
12271 case REG_EQUAL:
12272 case REG_EQUIV:
12273 case REG_NOALIAS:
12274 /* These notes say something about results of an insn. We can
12275 only support them if they used to be on I3 in which case they
12276 remain on I3. Otherwise they are ignored.
12278 If the note refers to an expression that is not a constant, we
12279 must also ignore the note since we cannot tell whether the
12280 equivalence is still true. It might be possible to do
12281 slightly better than this (we only have a problem if I2DEST
12282 or I1DEST is present in the expression), but it doesn't
12283 seem worth the trouble. */
12285 if (from_insn == i3
12286 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12287 place = i3;
12288 break;
12290 case REG_INC:
12291 case REG_NO_CONFLICT:
12292 /* These notes say something about how a register is used. They must
12293 be present on any use of the register in I2 or I3. */
12294 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12295 place = i3;
12297 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12299 if (place)
12300 place2 = i2;
12301 else
12302 place = i2;
12304 break;
12306 case REG_LABEL:
12307 /* This can show up in several ways -- either directly in the
12308 pattern, or hidden off in the constant pool with (or without?)
12309 a REG_EQUAL note. */
12310 /* ??? Ignore the without-reg_equal-note problem for now. */
12311 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12312 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12313 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12314 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12315 place = i3;
12317 if (i2
12318 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12319 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12320 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12321 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12323 if (place)
12324 place2 = i2;
12325 else
12326 place = i2;
12329 /* Don't attach REG_LABEL note to a JUMP_INSN which has
12330 JUMP_LABEL already. Instead, decrement LABEL_NUSES. */
12331 if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12333 if (JUMP_LABEL (place) != XEXP (note, 0))
12334 abort ();
12335 if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12336 LABEL_NUSES (JUMP_LABEL (place))--;
12337 place = 0;
12339 if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12341 if (JUMP_LABEL (place2) != XEXP (note, 0))
12342 abort ();
12343 if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12344 LABEL_NUSES (JUMP_LABEL (place2))--;
12345 place2 = 0;
12347 break;
12349 case REG_NONNEG:
12350 case REG_WAS_0:
12351 /* These notes say something about the value of a register prior
12352 to the execution of an insn. It is too much trouble to see
12353 if the note is still correct in all situations. It is better
12354 to simply delete it. */
12355 break;
12357 case REG_RETVAL:
12358 /* If the insn previously containing this note still exists,
12359 put it back where it was. Otherwise move it to the previous
12360 insn. Adjust the corresponding REG_LIBCALL note. */
12361 if (GET_CODE (from_insn) != NOTE)
12362 place = from_insn;
12363 else
12365 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12366 place = prev_real_insn (from_insn);
12367 if (tem && place)
12368 XEXP (tem, 0) = place;
12369 /* If we're deleting the last remaining instruction of a
12370 libcall sequence, don't add the notes. */
12371 else if (XEXP (note, 0) == from_insn)
12372 tem = place = 0;
12374 break;
12376 case REG_LIBCALL:
12377 /* This is handled similarly to REG_RETVAL. */
12378 if (GET_CODE (from_insn) != NOTE)
12379 place = from_insn;
12380 else
12382 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12383 place = next_real_insn (from_insn);
12384 if (tem && place)
12385 XEXP (tem, 0) = place;
12386 /* If we're deleting the last remaining instruction of a
12387 libcall sequence, don't add the notes. */
12388 else if (XEXP (note, 0) == from_insn)
12389 tem = place = 0;
12391 break;
12393 case REG_DEAD:
12394 /* If the register is used as an input in I3, it dies there.
12395 Similarly for I2, if it is non-zero and adjacent to I3.
12397 If the register is not used as an input in either I3 or I2
12398 and it is not one of the registers we were supposed to eliminate,
12399 there are two possibilities. We might have a non-adjacent I2
12400 or we might have somehow eliminated an additional register
12401 from a computation. For example, we might have had A & B where
12402 we discover that B will always be zero. In this case we will
12403 eliminate the reference to A.
12405 In both cases, we must search to see if we can find a previous
12406 use of A and put the death note there. */
12408 if (from_insn
12409 && GET_CODE (from_insn) == CALL_INSN
12410 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12411 place = from_insn;
12412 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12413 place = i3;
12414 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12415 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12416 place = i2;
12418 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12419 || rtx_equal_p (XEXP (note, 0), elim_i1))
12420 break;
12422 if (place == 0)
12424 basic_block bb = this_basic_block;
12426 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12428 if (! INSN_P (tem))
12430 if (tem == bb->head)
12431 break;
12432 continue;
12435 /* If the register is being set at TEM, see if that is all
12436 TEM is doing. If so, delete TEM. Otherwise, make this
12437 into a REG_UNUSED note instead. */
12438 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12440 rtx set = single_set (tem);
12441 rtx inner_dest = 0;
12442 #ifdef HAVE_cc0
12443 rtx cc0_setter = NULL_RTX;
12444 #endif
12446 if (set != 0)
12447 for (inner_dest = SET_DEST (set);
12448 (GET_CODE (inner_dest) == STRICT_LOW_PART
12449 || GET_CODE (inner_dest) == SUBREG
12450 || GET_CODE (inner_dest) == ZERO_EXTRACT);
12451 inner_dest = XEXP (inner_dest, 0))
12454 /* Verify that it was the set, and not a clobber that
12455 modified the register.
12457 CC0 targets must be careful to maintain setter/user
12458 pairs. If we cannot delete the setter due to side
12459 effects, mark the user with an UNUSED note instead
12460 of deleting it. */
12462 if (set != 0 && ! side_effects_p (SET_SRC (set))
12463 && rtx_equal_p (XEXP (note, 0), inner_dest)
12464 #ifdef HAVE_cc0
12465 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12466 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12467 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12468 #endif
12471 /* Move the notes and links of TEM elsewhere.
12472 This might delete other dead insns recursively.
12473 First set the pattern to something that won't use
12474 any register. */
12476 PATTERN (tem) = pc_rtx;
12478 distribute_notes (REG_NOTES (tem), tem, tem,
12479 NULL_RTX, NULL_RTX, NULL_RTX);
12480 distribute_links (LOG_LINKS (tem));
12482 PUT_CODE (tem, NOTE);
12483 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12484 NOTE_SOURCE_FILE (tem) = 0;
12486 #ifdef HAVE_cc0
12487 /* Delete the setter too. */
12488 if (cc0_setter)
12490 PATTERN (cc0_setter) = pc_rtx;
12492 distribute_notes (REG_NOTES (cc0_setter),
12493 cc0_setter, cc0_setter,
12494 NULL_RTX, NULL_RTX, NULL_RTX);
12495 distribute_links (LOG_LINKS (cc0_setter));
12497 PUT_CODE (cc0_setter, NOTE);
12498 NOTE_LINE_NUMBER (cc0_setter)
12499 = NOTE_INSN_DELETED;
12500 NOTE_SOURCE_FILE (cc0_setter) = 0;
12502 #endif
12504 /* If the register is both set and used here, put the
12505 REG_DEAD note here, but place a REG_UNUSED note
12506 here too unless there already is one. */
12507 else if (reg_referenced_p (XEXP (note, 0),
12508 PATTERN (tem)))
12510 place = tem;
12512 if (! find_regno_note (tem, REG_UNUSED,
12513 REGNO (XEXP (note, 0))))
12514 REG_NOTES (tem)
12515 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12516 REG_NOTES (tem));
12518 else
12520 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12522 /* If there isn't already a REG_UNUSED note, put one
12523 here. */
12524 if (! find_regno_note (tem, REG_UNUSED,
12525 REGNO (XEXP (note, 0))))
12526 place = tem;
12527 break;
12530 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12531 || (GET_CODE (tem) == CALL_INSN
12532 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12534 place = tem;
12536 /* If we are doing a 3->2 combination, and we have a
12537 register which formerly died in i3 and was not used
12538 by i2, which now no longer dies in i3 and is used in
12539 i2 but does not die in i2, and place is between i2
12540 and i3, then we may need to move a link from place to
12541 i2. */
12542 if (i2 && INSN_UID (place) <= max_uid_cuid
12543 && INSN_CUID (place) > INSN_CUID (i2)
12544 && from_insn
12545 && INSN_CUID (from_insn) > INSN_CUID (i2)
12546 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12548 rtx links = LOG_LINKS (place);
12549 LOG_LINKS (place) = 0;
12550 distribute_links (links);
12552 break;
12555 if (tem == bb->head)
12556 break;
12559 /* We haven't found an insn for the death note and it
12560 is still a REG_DEAD note, but we have hit the beginning
12561 of the block. If the existing life info says the reg
12562 was dead, there's nothing left to do. Otherwise, we'll
12563 need to do a global life update after combine. */
12564 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12565 && REGNO_REG_SET_P (bb->global_live_at_start,
12566 REGNO (XEXP (note, 0))))
12568 SET_BIT (refresh_blocks, this_basic_block->index);
12569 need_refresh = 1;
12573 /* If the register is set or already dead at PLACE, we needn't do
12574 anything with this note if it is still a REG_DEAD note.
12575 We can here if it is set at all, not if is it totally replace,
12576 which is what `dead_or_set_p' checks, so also check for it being
12577 set partially. */
12579 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12581 unsigned int regno = REGNO (XEXP (note, 0));
12583 /* Similarly, if the instruction on which we want to place
12584 the note is a noop, we'll need do a global live update
12585 after we remove them in delete_noop_moves. */
12586 if (noop_move_p (place))
12588 SET_BIT (refresh_blocks, this_basic_block->index);
12589 need_refresh = 1;
12592 if (dead_or_set_p (place, XEXP (note, 0))
12593 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12595 /* Unless the register previously died in PLACE, clear
12596 reg_last_death. [I no longer understand why this is
12597 being done.] */
12598 if (reg_last_death[regno] != place)
12599 reg_last_death[regno] = 0;
12600 place = 0;
12602 else
12603 reg_last_death[regno] = place;
12605 /* If this is a death note for a hard reg that is occupying
12606 multiple registers, ensure that we are still using all
12607 parts of the object. If we find a piece of the object
12608 that is unused, we must arrange for an appropriate REG_DEAD
12609 note to be added for it. However, we can't just emit a USE
12610 and tag the note to it, since the register might actually
12611 be dead; so we recourse, and the recursive call then finds
12612 the previous insn that used this register. */
12614 if (place && regno < FIRST_PSEUDO_REGISTER
12615 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12617 unsigned int endregno
12618 = regno + HARD_REGNO_NREGS (regno,
12619 GET_MODE (XEXP (note, 0)));
12620 int all_used = 1;
12621 unsigned int i;
12623 for (i = regno; i < endregno; i++)
12624 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12625 && ! find_regno_fusage (place, USE, i))
12626 || dead_or_set_regno_p (place, i))
12627 all_used = 0;
12629 if (! all_used)
12631 /* Put only REG_DEAD notes for pieces that are
12632 not already dead or set. */
12634 for (i = regno; i < endregno;
12635 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12637 rtx piece = regno_reg_rtx[i];
12638 basic_block bb = this_basic_block;
12640 if (! dead_or_set_p (place, piece)
12641 && ! reg_bitfield_target_p (piece,
12642 PATTERN (place)))
12644 rtx new_note
12645 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12647 distribute_notes (new_note, place, place,
12648 NULL_RTX, NULL_RTX, NULL_RTX);
12650 else if (! refers_to_regno_p (i, i + 1,
12651 PATTERN (place), 0)
12652 && ! find_regno_fusage (place, USE, i))
12653 for (tem = PREV_INSN (place); ;
12654 tem = PREV_INSN (tem))
12656 if (! INSN_P (tem))
12658 if (tem == bb->head)
12660 SET_BIT (refresh_blocks,
12661 this_basic_block->index);
12662 need_refresh = 1;
12663 break;
12665 continue;
12667 if (dead_or_set_p (tem, piece)
12668 || reg_bitfield_target_p (piece,
12669 PATTERN (tem)))
12671 REG_NOTES (tem)
12672 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12673 REG_NOTES (tem));
12674 break;
12680 place = 0;
12684 break;
12686 default:
12687 /* Any other notes should not be present at this point in the
12688 compilation. */
12689 abort ();
12692 if (place)
12694 XEXP (note, 1) = REG_NOTES (place);
12695 REG_NOTES (place) = note;
12697 else if ((REG_NOTE_KIND (note) == REG_DEAD
12698 || REG_NOTE_KIND (note) == REG_UNUSED)
12699 && GET_CODE (XEXP (note, 0)) == REG)
12700 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12702 if (place2)
12704 if ((REG_NOTE_KIND (note) == REG_DEAD
12705 || REG_NOTE_KIND (note) == REG_UNUSED)
12706 && GET_CODE (XEXP (note, 0)) == REG)
12707 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12709 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12710 REG_NOTE_KIND (note),
12711 XEXP (note, 0),
12712 REG_NOTES (place2));
12717 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12718 I3, I2, and I1 to new locations. This is also called in one case to
12719 add a link pointing at I3 when I3's destination is changed. */
12721 static void
12722 distribute_links (links)
12723 rtx links;
12725 rtx link, next_link;
12727 for (link = links; link; link = next_link)
12729 rtx place = 0;
12730 rtx insn;
12731 rtx set, reg;
12733 next_link = XEXP (link, 1);
12735 /* If the insn that this link points to is a NOTE or isn't a single
12736 set, ignore it. In the latter case, it isn't clear what we
12737 can do other than ignore the link, since we can't tell which
12738 register it was for. Such links wouldn't be used by combine
12739 anyway.
12741 It is not possible for the destination of the target of the link to
12742 have been changed by combine. The only potential of this is if we
12743 replace I3, I2, and I1 by I3 and I2. But in that case the
12744 destination of I2 also remains unchanged. */
12746 if (GET_CODE (XEXP (link, 0)) == NOTE
12747 || (set = single_set (XEXP (link, 0))) == 0)
12748 continue;
12750 reg = SET_DEST (set);
12751 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12752 || GET_CODE (reg) == SIGN_EXTRACT
12753 || GET_CODE (reg) == STRICT_LOW_PART)
12754 reg = XEXP (reg, 0);
12756 /* A LOG_LINK is defined as being placed on the first insn that uses
12757 a register and points to the insn that sets the register. Start
12758 searching at the next insn after the target of the link and stop
12759 when we reach a set of the register or the end of the basic block.
12761 Note that this correctly handles the link that used to point from
12762 I3 to I2. Also note that not much searching is typically done here
12763 since most links don't point very far away. */
12765 for (insn = NEXT_INSN (XEXP (link, 0));
12766 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12767 || this_basic_block->next_bb->head != insn));
12768 insn = NEXT_INSN (insn))
12769 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12771 if (reg_referenced_p (reg, PATTERN (insn)))
12772 place = insn;
12773 break;
12775 else if (GET_CODE (insn) == CALL_INSN
12776 && find_reg_fusage (insn, USE, reg))
12778 place = insn;
12779 break;
12782 /* If we found a place to put the link, place it there unless there
12783 is already a link to the same insn as LINK at that point. */
12785 if (place)
12787 rtx link2;
12789 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12790 if (XEXP (link2, 0) == XEXP (link, 0))
12791 break;
12793 if (link2 == 0)
12795 XEXP (link, 1) = LOG_LINKS (place);
12796 LOG_LINKS (place) = link;
12798 /* Set added_links_insn to the earliest insn we added a
12799 link to. */
12800 if (added_links_insn == 0
12801 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12802 added_links_insn = place;
12808 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
12810 static int
12811 insn_cuid (insn)
12812 rtx insn;
12814 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12815 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12816 insn = NEXT_INSN (insn);
12818 if (INSN_UID (insn) > max_uid_cuid)
12819 abort ();
12821 return INSN_CUID (insn);
12824 void
12825 dump_combine_stats (file)
12826 FILE *file;
12828 fnotice
12829 (file,
12830 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12831 combine_attempts, combine_merges, combine_extras, combine_successes);
12834 void
12835 dump_combine_total_stats (file)
12836 FILE *file;
12838 fnotice
12839 (file,
12840 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12841 total_attempts, total_merges, total_extras, total_successes);