Initial revision
[official-gcc.git] / gcc / combine.c
blobc50265681b0c40456d108e7a54ed5ff6cc20e664
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
76 #include "config.h"
77 /* Must precede rtl.h for FFS. */
78 #include <stdio.h>
80 #include "gvarargs.h"
81 #include "rtl.h"
82 #include "flags.h"
83 #include "regs.h"
84 #include "hard-reg-set.h"
85 #include "expr.h"
86 #include "basic-block.h"
87 #include "insn-config.h"
88 #include "insn-flags.h"
89 #include "insn-codes.h"
90 #include "insn-attr.h"
91 #include "recog.h"
92 #include "real.h"
94 /* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96 #define gen_lowpart dont_use_gen_lowpart_you_dummy
98 /* Number of attempts to combine instructions in this function. */
100 static int combine_attempts;
102 /* Number of attempts that got as far as substitution in this function. */
104 static int combine_merges;
106 /* Number of instructions combined with added SETs in this function. */
108 static int combine_extras;
110 /* Number of instructions combined in this function. */
112 static int combine_successes;
114 /* Totals over entire compilation. */
116 static int total_attempts, total_merges, total_extras, total_successes;
118 /* Vector mapping INSN_UIDs to cuids.
119 The cuids are like uids but increase monotonically always.
120 Combine always uses cuids so that it can compare them.
121 But actually renumbering the uids, which we used to do,
122 proves to be a bad idea because it makes it hard to compare
123 the dumps produced by earlier passes with those from later passes. */
125 static int *uid_cuid;
127 /* Get the cuid of an insn. */
129 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
131 /* Maximum register number, which is the size of the tables below. */
133 static int combine_max_regno;
135 /* Record last point of death of (hard or pseudo) register n. */
137 static rtx *reg_last_death;
139 /* Record last point of modification of (hard or pseudo) register n. */
141 static rtx *reg_last_set;
143 /* Record the cuid of the last insn that invalidated memory
144 (anything that writes memory, and subroutine calls, but not pushes). */
146 static int mem_last_set;
148 /* Record the cuid of the last CALL_INSN
149 so we can tell whether a potential combination crosses any calls. */
151 static int last_call_cuid;
153 /* When `subst' is called, this is the insn that is being modified
154 (by combining in a previous insn). The PATTERN of this insn
155 is still the old pattern partially modified and it should not be
156 looked at, but this may be used to examine the successors of the insn
157 to judge whether a simplification is valid. */
159 static rtx subst_insn;
161 /* This is the lowest CUID that `subst' is currently dealing with.
162 get_last_value will not return a value if the register was set at or
163 after this CUID. If not for this mechanism, we could get confused if
164 I2 or I1 in try_combine were an insn that used the old value of a register
165 to obtain a new value. In that case, we might erroneously get the
166 new value of the register when we wanted the old one. */
168 static int subst_low_cuid;
170 /* This is the value of undobuf.num_undo when we started processing this
171 substitution. This will prevent gen_rtx_combine from re-used a piece
172 from the previous expression. Doing so can produce circular rtl
173 structures. */
175 static int previous_num_undos;
177 /* Basic block number of the block in which we are performing combines. */
178 static int this_basic_block;
180 /* The next group of arrays allows the recording of the last value assigned
181 to (hard or pseudo) register n. We use this information to see if a
182 operation being processed is redundant given a prior operation performed
183 on the register. For example, an `and' with a constant is redundant if
184 all the zero bits are already known to be turned off.
186 We use an approach similar to that used by cse, but change it in the
187 following ways:
189 (1) We do not want to reinitialize at each label.
190 (2) It is useful, but not critical, to know the actual value assigned
191 to a register. Often just its form is helpful.
193 Therefore, we maintain the following arrays:
195 reg_last_set_value the last value assigned
196 reg_last_set_label records the value of label_tick when the
197 register was assigned
198 reg_last_set_table_tick records the value of label_tick when a
199 value using the register is assigned
200 reg_last_set_invalid set to non-zero when it is not valid
201 to use the value of this register in some
202 register's value
204 To understand the usage of these tables, it is important to understand
205 the distinction between the value in reg_last_set_value being valid
206 and the register being validly contained in some other expression in the
207 table.
209 Entry I in reg_last_set_value is valid if it is non-zero, and either
210 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
212 Register I may validly appear in any expression returned for the value
213 of another register if reg_n_sets[i] is 1. It may also appear in the
214 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
215 reg_last_set_invalid[j] is zero.
217 If an expression is found in the table containing a register which may
218 not validly appear in an expression, the register is replaced by
219 something that won't match, (clobber (const_int 0)).
221 reg_last_set_invalid[i] is set non-zero when register I is being assigned
222 to and reg_last_set_table_tick[i] == label_tick. */
224 /* Record last value assigned to (hard or pseudo) register n. */
226 static rtx *reg_last_set_value;
228 /* Record the value of label_tick when the value for register n is placed in
229 reg_last_set_value[n]. */
231 static int *reg_last_set_label;
233 /* Record the value of label_tick when an expression involving register n
234 is placed in reg_last_set_value. */
236 static int *reg_last_set_table_tick;
238 /* Set non-zero if references to register n in expressions should not be
239 used. */
241 static char *reg_last_set_invalid;
243 /* Incremented for each label. */
245 static int label_tick;
247 /* Some registers that are set more than once and used in more than one
248 basic block are nevertheless always set in similar ways. For example,
249 a QImode register may be loaded from memory in two places on a machine
250 where byte loads zero extend.
252 We record in the following array what we know about the nonzero
253 bits of a register, specifically which bits are known to be zero.
255 If an entry is zero, it means that we don't know anything special. */
257 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
259 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
260 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
262 static enum machine_mode nonzero_bits_mode;
264 /* Nonzero if we know that a register has some leading bits that are always
265 equal to the sign bit. */
267 static char *reg_sign_bit_copies;
269 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
270 It is zero while computing them and after combine has completed. This
271 former test prevents propagating values based on previously set values,
272 which can be incorrect if a variable is modified in a loop. */
274 static int nonzero_sign_valid;
276 /* These arrays are maintained in parallel with reg_last_set_value
277 and are used to store the mode in which the register was last set,
278 the bits that were known to be zero when it was last set, and the
279 number of sign bits copies it was known to have when it was last set. */
281 static enum machine_mode *reg_last_set_mode;
282 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
283 static char *reg_last_set_sign_bit_copies;
285 /* Record one modification to rtl structure
286 to be undone by storing old_contents into *where.
287 is_int is 1 if the contents are an int. */
289 struct undo
291 int is_int;
292 union {rtx r; int i;} old_contents;
293 union {rtx *r; int *i;} where;
296 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
297 num_undo says how many are currently recorded.
299 storage is nonzero if we must undo the allocation of new storage.
300 The value of storage is what to pass to obfree.
302 other_insn is nonzero if we have modified some other insn in the process
303 of working on subst_insn. It must be verified too. */
305 #define MAX_UNDO 50
307 struct undobuf
309 int num_undo;
310 char *storage;
311 struct undo undo[MAX_UNDO];
312 rtx other_insn;
315 static struct undobuf undobuf;
317 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
318 insn. The substitution can be undone by undo_all. If INTO is already
319 set to NEWVAL, do not record this change. Because computing NEWVAL might
320 also call SUBST, we have to compute it before we put anything into
321 the undo table. */
323 #define SUBST(INTO, NEWVAL) \
324 do { rtx _new = (NEWVAL); \
325 if (undobuf.num_undo < MAX_UNDO) \
327 undobuf.undo[undobuf.num_undo].is_int = 0; \
328 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
329 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
330 INTO = _new; \
331 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
332 undobuf.num_undo++; \
334 } while (0)
336 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
337 expression.
338 Note that substitution for the value of a CONST_INT is not safe. */
340 #define SUBST_INT(INTO, NEWVAL) \
341 do { if (undobuf.num_undo < MAX_UNDO) \
343 undobuf.undo[undobuf.num_undo].is_int = 1; \
344 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
345 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
346 INTO = NEWVAL; \
347 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
348 undobuf.num_undo++; \
350 } while (0)
352 /* Number of times the pseudo being substituted for
353 was found and replaced. */
355 static int n_occurrences;
357 static void init_reg_last_arrays PROTO(());
358 static void setup_incoming_promotions PROTO(());
359 static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
360 static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
361 static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
362 static rtx try_combine PROTO((rtx, rtx, rtx));
363 static void undo_all PROTO((void));
364 static rtx *find_split_point PROTO((rtx *, rtx));
365 static rtx subst PROTO((rtx, rtx, rtx, int, int));
366 static rtx expand_compound_operation PROTO((rtx));
367 static rtx expand_field_assignment PROTO((rtx));
368 static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
369 int, int, int));
370 static rtx make_compound_operation PROTO((rtx, enum rtx_code));
371 static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
372 static rtx force_to_mode PROTO((rtx, enum machine_mode,
373 unsigned HOST_WIDE_INT, rtx));
374 static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
375 static rtx make_field_assignment PROTO((rtx));
376 static rtx apply_distributive_law PROTO((rtx));
377 static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
378 unsigned HOST_WIDE_INT));
379 static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
380 static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
381 static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
382 enum rtx_code, HOST_WIDE_INT,
383 enum machine_mode, int *));
384 static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
385 rtx, int));
386 static int recog_for_combine PROTO((rtx *, rtx, rtx *));
387 static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
388 static rtx gen_rtx_combine (); /* This is varargs. */
389 static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
390 rtx, rtx));
391 static rtx gen_unary PROTO((enum rtx_code, enum machine_mode, rtx));
392 static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
393 static int reversible_comparison_p PROTO((rtx));
394 static void update_table_tick PROTO((rtx));
395 static void record_value_for_reg PROTO((rtx, rtx, rtx));
396 static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
397 static void record_dead_and_set_regs PROTO((rtx));
398 static int get_last_value_validate PROTO((rtx *, int, int));
399 static rtx get_last_value PROTO((rtx));
400 static int use_crosses_set_p PROTO((rtx, int));
401 static void reg_dead_at_p_1 PROTO((rtx, rtx));
402 static int reg_dead_at_p PROTO((rtx, rtx));
403 static void move_deaths PROTO((rtx, int, rtx, rtx *));
404 static int reg_bitfield_target_p PROTO((rtx, rtx));
405 static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
406 static void distribute_links PROTO((rtx));
408 /* Main entry point for combiner. F is the first insn of the function.
409 NREGS is the first unused pseudo-reg number. */
411 void
412 combine_instructions (f, nregs)
413 rtx f;
414 int nregs;
416 register rtx insn, next, prev;
417 register int i;
418 register rtx links, nextlinks;
420 combine_attempts = 0;
421 combine_merges = 0;
422 combine_extras = 0;
423 combine_successes = 0;
424 undobuf.num_undo = previous_num_undos = 0;
426 combine_max_regno = nregs;
428 reg_nonzero_bits
429 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
430 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
432 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
433 bzero (reg_sign_bit_copies, nregs * sizeof (char));
435 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
436 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
437 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
438 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
439 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
440 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
441 reg_last_set_mode
442 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
443 reg_last_set_nonzero_bits
444 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
445 reg_last_set_sign_bit_copies
446 = (char *) alloca (nregs * sizeof (char));
448 init_reg_last_arrays ();
450 init_recog_no_volatile ();
452 /* Compute maximum uid value so uid_cuid can be allocated. */
454 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
455 if (INSN_UID (insn) > i)
456 i = INSN_UID (insn);
458 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
460 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
462 /* Don't use reg_nonzero_bits when computing it. This can cause problems
463 when, for example, we have j <<= 1 in a loop. */
465 nonzero_sign_valid = 0;
467 /* Compute the mapping from uids to cuids.
468 Cuids are numbers assigned to insns, like uids,
469 except that cuids increase monotonically through the code.
471 Scan all SETs and see if we can deduce anything about what
472 bits are known to be zero for some registers and how many copies
473 of the sign bit are known to exist for those registers.
475 Also set any known values so that we can use it while searching
476 for what bits are known to be set. */
478 label_tick = 1;
480 setup_incoming_promotions ();
482 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
484 INSN_CUID (insn) = ++i;
485 subst_low_cuid = i;
486 subst_insn = insn;
488 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
490 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
491 record_dead_and_set_regs (insn);
494 if (GET_CODE (insn) == CODE_LABEL)
495 label_tick++;
498 nonzero_sign_valid = 1;
500 /* Now scan all the insns in forward order. */
502 this_basic_block = -1;
503 label_tick = 1;
504 last_call_cuid = 0;
505 mem_last_set = 0;
506 init_reg_last_arrays ();
507 setup_incoming_promotions ();
509 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
511 next = 0;
513 /* If INSN starts a new basic block, update our basic block number. */
514 if (this_basic_block + 1 < n_basic_blocks
515 && basic_block_head[this_basic_block + 1] == insn)
516 this_basic_block++;
518 if (GET_CODE (insn) == CODE_LABEL)
519 label_tick++;
521 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
523 /* Try this insn with each insn it links back to. */
525 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
526 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
527 goto retry;
529 /* Try each sequence of three linked insns ending with this one. */
531 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
532 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
533 nextlinks = XEXP (nextlinks, 1))
534 if ((next = try_combine (insn, XEXP (links, 0),
535 XEXP (nextlinks, 0))) != 0)
536 goto retry;
538 #ifdef HAVE_cc0
539 /* Try to combine a jump insn that uses CC0
540 with a preceding insn that sets CC0, and maybe with its
541 logical predecessor as well.
542 This is how we make decrement-and-branch insns.
543 We need this special code because data flow connections
544 via CC0 do not get entered in LOG_LINKS. */
546 if (GET_CODE (insn) == JUMP_INSN
547 && (prev = prev_nonnote_insn (insn)) != 0
548 && GET_CODE (prev) == INSN
549 && sets_cc0_p (PATTERN (prev)))
551 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
552 goto retry;
554 for (nextlinks = LOG_LINKS (prev); nextlinks;
555 nextlinks = XEXP (nextlinks, 1))
556 if ((next = try_combine (insn, prev,
557 XEXP (nextlinks, 0))) != 0)
558 goto retry;
561 /* Do the same for an insn that explicitly references CC0. */
562 if (GET_CODE (insn) == INSN
563 && (prev = prev_nonnote_insn (insn)) != 0
564 && GET_CODE (prev) == INSN
565 && sets_cc0_p (PATTERN (prev))
566 && GET_CODE (PATTERN (insn)) == SET
567 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
569 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
570 goto retry;
572 for (nextlinks = LOG_LINKS (prev); nextlinks;
573 nextlinks = XEXP (nextlinks, 1))
574 if ((next = try_combine (insn, prev,
575 XEXP (nextlinks, 0))) != 0)
576 goto retry;
579 /* Finally, see if any of the insns that this insn links to
580 explicitly references CC0. If so, try this insn, that insn,
581 and its predecessor if it sets CC0. */
582 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
583 if (GET_CODE (XEXP (links, 0)) == INSN
584 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
585 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
586 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
587 && GET_CODE (prev) == INSN
588 && sets_cc0_p (PATTERN (prev))
589 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
590 goto retry;
591 #endif
593 /* Try combining an insn with two different insns whose results it
594 uses. */
595 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
596 for (nextlinks = XEXP (links, 1); nextlinks;
597 nextlinks = XEXP (nextlinks, 1))
598 if ((next = try_combine (insn, XEXP (links, 0),
599 XEXP (nextlinks, 0))) != 0)
600 goto retry;
602 if (GET_CODE (insn) != NOTE)
603 record_dead_and_set_regs (insn);
605 retry:
610 total_attempts += combine_attempts;
611 total_merges += combine_merges;
612 total_extras += combine_extras;
613 total_successes += combine_successes;
615 nonzero_sign_valid = 0;
618 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
620 static void
621 init_reg_last_arrays ()
623 int nregs = combine_max_regno;
625 bzero (reg_last_death, nregs * sizeof (rtx));
626 bzero (reg_last_set, nregs * sizeof (rtx));
627 bzero (reg_last_set_value, nregs * sizeof (rtx));
628 bzero (reg_last_set_table_tick, nregs * sizeof (int));
629 bzero (reg_last_set_label, nregs * sizeof (int));
630 bzero (reg_last_set_invalid, nregs * sizeof (char));
631 bzero (reg_last_set_mode, nregs * sizeof (enum machine_mode));
632 bzero (reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
633 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
636 /* Set up any promoted values for incoming argument registers. */
638 static void
639 setup_incoming_promotions ()
641 #ifdef PROMOTE_FUNCTION_ARGS
642 int regno;
643 rtx reg;
644 enum machine_mode mode;
645 int unsignedp;
646 rtx first = get_insns ();
648 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
649 if (FUNCTION_ARG_REGNO_P (regno)
650 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
651 record_value_for_reg (reg, first,
652 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
653 GET_MODE (reg),
654 gen_rtx (CLOBBER, mode, const0_rtx)));
655 #endif
658 /* Called via note_stores. If X is a pseudo that is used in more than
659 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
660 set, record what bits are known zero. If we are clobbering X,
661 ignore this "set" because the clobbered value won't be used.
663 If we are setting only a portion of X and we can't figure out what
664 portion, assume all bits will be used since we don't know what will
665 be happening.
667 Similarly, set how many bits of X are known to be copies of the sign bit
668 at all locations in the function. This is the smallest number implied
669 by any set of X. */
671 static void
672 set_nonzero_bits_and_sign_copies (x, set)
673 rtx x;
674 rtx set;
676 int num;
678 if (GET_CODE (x) == REG
679 && REGNO (x) >= FIRST_PSEUDO_REGISTER
680 && reg_n_sets[REGNO (x)] > 1
681 && reg_basic_block[REGNO (x)] < 0
682 /* If this register is undefined at the start of the file, we can't
683 say what its contents were. */
684 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
685 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
686 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
688 if (GET_CODE (set) == CLOBBER)
690 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
691 reg_sign_bit_copies[REGNO (x)] = 0;
692 return;
695 /* If this is a complex assignment, see if we can convert it into a
696 simple assignment. */
697 set = expand_field_assignment (set);
699 /* If this is a simple assignment, or we have a paradoxical SUBREG,
700 set what we know about X. */
702 if (SET_DEST (set) == x
703 || (GET_CODE (SET_DEST (set)) == SUBREG
704 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
705 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
706 && SUBREG_REG (SET_DEST (set)) == x))
708 rtx src = SET_SRC (set);
710 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
711 /* If X is narrower than a word and SRC is a non-negative
712 constant that would appear negative in the mode of X,
713 sign-extend it for use in reg_nonzero_bits because some
714 machines (maybe most) will actually do the sign-extension
715 and this is the conservative approach.
717 ??? For 2.5, try to tighten up the MD files in this regard
718 instead of this kludge. */
720 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
721 && GET_CODE (src) == CONST_INT
722 && INTVAL (src) > 0
723 && 0 != (INTVAL (src)
724 & ((HOST_WIDE_INT) 1
725 << GET_MODE_BITSIZE (GET_MODE (x)))))
726 src = GEN_INT (INTVAL (src)
727 | ((HOST_WIDE_INT) (-1)
728 << GET_MODE_BITSIZE (GET_MODE (x))));
729 #endif
731 reg_nonzero_bits[REGNO (x)]
732 |= nonzero_bits (src, nonzero_bits_mode);
733 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
734 if (reg_sign_bit_copies[REGNO (x)] == 0
735 || reg_sign_bit_copies[REGNO (x)] > num)
736 reg_sign_bit_copies[REGNO (x)] = num;
738 else
740 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
741 reg_sign_bit_copies[REGNO (x)] = 0;
746 /* See if INSN can be combined into I3. PRED and SUCC are optionally
747 insns that were previously combined into I3 or that will be combined
748 into the merger of INSN and I3.
750 Return 0 if the combination is not allowed for any reason.
752 If the combination is allowed, *PDEST will be set to the single
753 destination of INSN and *PSRC to the single source, and this function
754 will return 1. */
756 static int
757 can_combine_p (insn, i3, pred, succ, pdest, psrc)
758 rtx insn;
759 rtx i3;
760 rtx pred, succ;
761 rtx *pdest, *psrc;
763 int i;
764 rtx set = 0, src, dest;
765 rtx p, link;
766 int all_adjacent = (succ ? (next_active_insn (insn) == succ
767 && next_active_insn (succ) == i3)
768 : next_active_insn (insn) == i3);
770 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
771 or a PARALLEL consisting of such a SET and CLOBBERs.
773 If INSN has CLOBBER parallel parts, ignore them for our processing.
774 By definition, these happen during the execution of the insn. When it
775 is merged with another insn, all bets are off. If they are, in fact,
776 needed and aren't also supplied in I3, they may be added by
777 recog_for_combine. Otherwise, it won't match.
779 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
780 note.
782 Get the source and destination of INSN. If more than one, can't
783 combine. */
785 if (GET_CODE (PATTERN (insn)) == SET)
786 set = PATTERN (insn);
787 else if (GET_CODE (PATTERN (insn)) == PARALLEL
788 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
790 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
792 rtx elt = XVECEXP (PATTERN (insn), 0, i);
794 switch (GET_CODE (elt))
796 /* We can ignore CLOBBERs. */
797 case CLOBBER:
798 break;
800 case SET:
801 /* Ignore SETs whose result isn't used but not those that
802 have side-effects. */
803 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
804 && ! side_effects_p (elt))
805 break;
807 /* If we have already found a SET, this is a second one and
808 so we cannot combine with this insn. */
809 if (set)
810 return 0;
812 set = elt;
813 break;
815 default:
816 /* Anything else means we can't combine. */
817 return 0;
821 if (set == 0
822 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
823 so don't do anything with it. */
824 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
825 return 0;
827 else
828 return 0;
830 if (set == 0)
831 return 0;
833 set = expand_field_assignment (set);
834 src = SET_SRC (set), dest = SET_DEST (set);
836 /* Don't eliminate a store in the stack pointer. */
837 if (dest == stack_pointer_rtx
838 /* If we couldn't eliminate a field assignment, we can't combine. */
839 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
840 /* Don't combine with an insn that sets a register to itself if it has
841 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
842 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
843 /* Can't merge a function call. */
844 || GET_CODE (src) == CALL
845 /* Don't substitute into an incremented register. */
846 || FIND_REG_INC_NOTE (i3, dest)
847 || (succ && FIND_REG_INC_NOTE (succ, dest))
848 /* Don't combine the end of a libcall into anything. */
849 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
850 /* Make sure that DEST is not used after SUCC but before I3. */
851 || (succ && ! all_adjacent
852 && reg_used_between_p (dest, succ, i3))
853 /* Make sure that the value that is to be substituted for the register
854 does not use any registers whose values alter in between. However,
855 If the insns are adjacent, a use can't cross a set even though we
856 think it might (this can happen for a sequence of insns each setting
857 the same destination; reg_last_set of that register might point to
858 a NOTE). If INSN has a REG_EQUIV note, the register is always
859 equivalent to the memory so the substitution is valid even if there
860 are intervening stores. Also, don't move a volatile asm or
861 UNSPEC_VOLATILE across any other insns. */
862 || (! all_adjacent
863 && (((GET_CODE (src) != MEM
864 || ! find_reg_note (insn, REG_EQUIV, src))
865 && use_crosses_set_p (src, INSN_CUID (insn)))
866 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
867 || GET_CODE (src) == UNSPEC_VOLATILE))
868 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
869 better register allocation by not doing the combine. */
870 || find_reg_note (i3, REG_NO_CONFLICT, dest)
871 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
872 /* Don't combine across a CALL_INSN, because that would possibly
873 change whether the life span of some REGs crosses calls or not,
874 and it is a pain to update that information.
875 Exception: if source is a constant, moving it later can't hurt.
876 Accept that special case, because it helps -fforce-addr a lot. */
877 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
878 return 0;
880 /* DEST must either be a REG or CC0. */
881 if (GET_CODE (dest) == REG)
883 /* If register alignment is being enforced for multi-word items in all
884 cases except for parameters, it is possible to have a register copy
885 insn referencing a hard register that is not allowed to contain the
886 mode being copied and which would not be valid as an operand of most
887 insns. Eliminate this problem by not combining with such an insn.
889 Also, on some machines we don't want to extend the life of a hard
890 register. */
892 if (GET_CODE (src) == REG
893 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
894 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
895 #ifdef SMALL_REGISTER_CLASSES
896 /* Don't extend the life of a hard register. */
897 || REGNO (src) < FIRST_PSEUDO_REGISTER
898 #else
899 || (REGNO (src) < FIRST_PSEUDO_REGISTER
900 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
901 #endif
903 return 0;
905 else if (GET_CODE (dest) != CC0)
906 return 0;
908 /* Don't substitute for a register intended as a clobberable operand.
909 Similarly, don't substitute an expression containing a register that
910 will be clobbered in I3. */
911 if (GET_CODE (PATTERN (i3)) == PARALLEL)
912 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
913 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
914 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
915 src)
916 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
917 return 0;
919 /* If INSN contains anything volatile, or is an `asm' (whether volatile
920 or not), reject, unless nothing volatile comes between it and I3,
921 with the exception of SUCC. */
923 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
924 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
925 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
926 && p != succ && volatile_refs_p (PATTERN (p)))
927 return 0;
929 /* If INSN or I2 contains an autoincrement or autodecrement,
930 make sure that register is not used between there and I3,
931 and not already used in I3 either.
932 Also insist that I3 not be a jump; if it were one
933 and the incremented register were spilled, we would lose. */
935 #ifdef AUTO_INC_DEC
936 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
937 if (REG_NOTE_KIND (link) == REG_INC
938 && (GET_CODE (i3) == JUMP_INSN
939 || reg_used_between_p (XEXP (link, 0), insn, i3)
940 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
941 return 0;
942 #endif
944 #ifdef HAVE_cc0
945 /* Don't combine an insn that follows a CC0-setting insn.
946 An insn that uses CC0 must not be separated from the one that sets it.
947 We do, however, allow I2 to follow a CC0-setting insn if that insn
948 is passed as I1; in that case it will be deleted also.
949 We also allow combining in this case if all the insns are adjacent
950 because that would leave the two CC0 insns adjacent as well.
951 It would be more logical to test whether CC0 occurs inside I1 or I2,
952 but that would be much slower, and this ought to be equivalent. */
954 p = prev_nonnote_insn (insn);
955 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
956 && ! all_adjacent)
957 return 0;
958 #endif
960 /* If we get here, we have passed all the tests and the combination is
961 to be allowed. */
963 *pdest = dest;
964 *psrc = src;
966 return 1;
969 /* LOC is the location within I3 that contains its pattern or the component
970 of a PARALLEL of the pattern. We validate that it is valid for combining.
972 One problem is if I3 modifies its output, as opposed to replacing it
973 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
974 so would produce an insn that is not equivalent to the original insns.
976 Consider:
978 (set (reg:DI 101) (reg:DI 100))
979 (set (subreg:SI (reg:DI 101) 0) <foo>)
981 This is NOT equivalent to:
983 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
984 (set (reg:DI 101) (reg:DI 100))])
986 Not only does this modify 100 (in which case it might still be valid
987 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
989 We can also run into a problem if I2 sets a register that I1
990 uses and I1 gets directly substituted into I3 (not via I2). In that
991 case, we would be getting the wrong value of I2DEST into I3, so we
992 must reject the combination. This case occurs when I2 and I1 both
993 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
994 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
995 of a SET must prevent combination from occurring.
997 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
998 if the destination of a SET is a hard register.
1000 Before doing the above check, we first try to expand a field assignment
1001 into a set of logical operations.
1003 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1004 we place a register that is both set and used within I3. If more than one
1005 such register is detected, we fail.
1007 Return 1 if the combination is valid, zero otherwise. */
1009 static int
1010 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1011 rtx i3;
1012 rtx *loc;
1013 rtx i2dest;
1014 rtx i1dest;
1015 int i1_not_in_src;
1016 rtx *pi3dest_killed;
1018 rtx x = *loc;
1020 if (GET_CODE (x) == SET)
1022 rtx set = expand_field_assignment (x);
1023 rtx dest = SET_DEST (set);
1024 rtx src = SET_SRC (set);
1025 rtx inner_dest = dest, inner_src = src;
1027 SUBST (*loc, set);
1029 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1030 || GET_CODE (inner_dest) == SUBREG
1031 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1032 inner_dest = XEXP (inner_dest, 0);
1034 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1035 was added. */
1036 #if 0
1037 while (GET_CODE (inner_src) == STRICT_LOW_PART
1038 || GET_CODE (inner_src) == SUBREG
1039 || GET_CODE (inner_src) == ZERO_EXTRACT)
1040 inner_src = XEXP (inner_src, 0);
1042 /* If it is better that two different modes keep two different pseudos,
1043 avoid combining them. This avoids producing the following pattern
1044 on a 386:
1045 (set (subreg:SI (reg/v:QI 21) 0)
1046 (lshiftrt:SI (reg/v:SI 20)
1047 (const_int 24)))
1048 If that were made, reload could not handle the pair of
1049 reg 20/21, since it would try to get any GENERAL_REGS
1050 but some of them don't handle QImode. */
1052 if (rtx_equal_p (inner_src, i2dest)
1053 && GET_CODE (inner_dest) == REG
1054 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1055 return 0;
1056 #endif
1058 /* Check for the case where I3 modifies its output, as
1059 discussed above. */
1060 if ((inner_dest != dest
1061 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1062 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1063 /* This is the same test done in can_combine_p except that we
1064 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1065 CALL operation. */
1066 || (GET_CODE (inner_dest) == REG
1067 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1068 #ifdef SMALL_REGISTER_CLASSES
1069 && GET_CODE (src) != CALL
1070 #else
1071 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1072 GET_MODE (inner_dest))
1073 #endif
1076 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1077 return 0;
1079 /* If DEST is used in I3, it is being killed in this insn,
1080 so record that for later.
1081 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1082 STACK_POINTER_REGNUM, since these are always considered to be
1083 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1084 if (pi3dest_killed && GET_CODE (dest) == REG
1085 && reg_referenced_p (dest, PATTERN (i3))
1086 && REGNO (dest) != FRAME_POINTER_REGNUM
1087 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1088 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1089 #endif
1090 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1091 && (REGNO (dest) != ARG_POINTER_REGNUM
1092 || ! fixed_regs [REGNO (dest)])
1093 #endif
1094 && REGNO (dest) != STACK_POINTER_REGNUM)
1096 if (*pi3dest_killed)
1097 return 0;
1099 *pi3dest_killed = dest;
1103 else if (GET_CODE (x) == PARALLEL)
1105 int i;
1107 for (i = 0; i < XVECLEN (x, 0); i++)
1108 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1109 i1_not_in_src, pi3dest_killed))
1110 return 0;
1113 return 1;
1116 /* Try to combine the insns I1 and I2 into I3.
1117 Here I1 and I2 appear earlier than I3.
1118 I1 can be zero; then we combine just I2 into I3.
1120 It we are combining three insns and the resulting insn is not recognized,
1121 try splitting it into two insns. If that happens, I2 and I3 are retained
1122 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1123 are pseudo-deleted.
1125 If we created two insns, return I2; otherwise return I3.
1126 Return 0 if the combination does not work. Then nothing is changed. */
1128 static rtx
1129 try_combine (i3, i2, i1)
1130 register rtx i3, i2, i1;
1132 /* New patterns for I3 and I3, respectively. */
1133 rtx newpat, newi2pat = 0;
1134 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1135 int added_sets_1, added_sets_2;
1136 /* Total number of SETs to put into I3. */
1137 int total_sets;
1138 /* Nonzero is I2's body now appears in I3. */
1139 int i2_is_used;
1140 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1141 int insn_code_number, i2_code_number, other_code_number;
1142 /* Contains I3 if the destination of I3 is used in its source, which means
1143 that the old life of I3 is being killed. If that usage is placed into
1144 I2 and not in I3, a REG_DEAD note must be made. */
1145 rtx i3dest_killed = 0;
1146 /* SET_DEST and SET_SRC of I2 and I1. */
1147 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1148 /* PATTERN (I2), or a copy of it in certain cases. */
1149 rtx i2pat;
1150 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1151 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1152 int i1_feeds_i3 = 0;
1153 /* Notes that must be added to REG_NOTES in I3 and I2. */
1154 rtx new_i3_notes, new_i2_notes;
1156 int maxreg;
1157 rtx temp;
1158 register rtx link;
1159 int i;
1161 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1162 This can occur when flow deletes an insn that it has merged into an
1163 auto-increment address. We also can't do anything if I3 has a
1164 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1165 libcall. */
1167 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1168 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1169 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1170 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1171 return 0;
1173 combine_attempts++;
1175 undobuf.num_undo = previous_num_undos = 0;
1176 undobuf.other_insn = 0;
1178 /* Save the current high-water-mark so we can free storage if we didn't
1179 accept this combination. */
1180 undobuf.storage = (char *) oballoc (0);
1182 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1183 code below, set I1 to be the earlier of the two insns. */
1184 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1185 temp = i1, i1 = i2, i2 = temp;
1187 /* First check for one important special-case that the code below will
1188 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1189 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1190 we may be able to replace that destination with the destination of I3.
1191 This occurs in the common code where we compute both a quotient and
1192 remainder into a structure, in which case we want to do the computation
1193 directly into the structure to avoid register-register copies.
1195 We make very conservative checks below and only try to handle the
1196 most common cases of this. For example, we only handle the case
1197 where I2 and I3 are adjacent to avoid making difficult register
1198 usage tests. */
1200 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1201 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1202 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1203 #ifdef SMALL_REGISTER_CLASSES
1204 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1205 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1206 #endif
1207 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1208 && GET_CODE (PATTERN (i2)) == PARALLEL
1209 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1210 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1211 below would need to check what is inside (and reg_overlap_mentioned_p
1212 doesn't support those codes anyway). Don't allow those destinations;
1213 the resulting insn isn't likely to be recognized anyway. */
1214 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1215 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1216 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1217 SET_DEST (PATTERN (i3)))
1218 && next_real_insn (i2) == i3)
1220 rtx p2 = PATTERN (i2);
1222 /* Make sure that the destination of I3,
1223 which we are going to substitute into one output of I2,
1224 is not used within another output of I2. We must avoid making this:
1225 (parallel [(set (mem (reg 69)) ...)
1226 (set (reg 69) ...)])
1227 which is not well-defined as to order of actions.
1228 (Besides, reload can't handle output reloads for this.)
1230 The problem can also happen if the dest of I3 is a memory ref,
1231 if another dest in I2 is an indirect memory ref. */
1232 for (i = 0; i < XVECLEN (p2, 0); i++)
1233 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1234 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1235 SET_DEST (XVECEXP (p2, 0, i))))
1236 break;
1238 if (i == XVECLEN (p2, 0))
1239 for (i = 0; i < XVECLEN (p2, 0); i++)
1240 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1242 combine_merges++;
1244 subst_insn = i3;
1245 subst_low_cuid = INSN_CUID (i2);
1247 added_sets_2 = 0;
1248 i2dest = SET_SRC (PATTERN (i3));
1250 /* Replace the dest in I2 with our dest and make the resulting
1251 insn the new pattern for I3. Then skip to where we
1252 validate the pattern. Everything was set up above. */
1253 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1254 SET_DEST (PATTERN (i3)));
1256 newpat = p2;
1257 goto validate_replacement;
1261 #ifndef HAVE_cc0
1262 /* If we have no I1 and I2 looks like:
1263 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1264 (set Y OP)])
1265 make up a dummy I1 that is
1266 (set Y OP)
1267 and change I2 to be
1268 (set (reg:CC X) (compare:CC Y (const_int 0)))
1270 (We can ignore any trailing CLOBBERs.)
1272 This undoes a previous combination and allows us to match a branch-and-
1273 decrement insn. */
1275 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1276 && XVECLEN (PATTERN (i2), 0) >= 2
1277 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1278 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1279 == MODE_CC)
1280 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1281 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1282 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1283 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1284 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1285 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1287 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1288 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1289 break;
1291 if (i == 1)
1293 /* We make I1 with the same INSN_UID as I2. This gives it
1294 the same INSN_CUID for value tracking. Our fake I1 will
1295 never appear in the insn stream so giving it the same INSN_UID
1296 as I2 will not cause a problem. */
1298 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1299 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1301 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1302 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1303 SET_DEST (PATTERN (i1)));
1306 #endif
1308 /* Verify that I2 and I1 are valid for combining. */
1309 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1310 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1312 undo_all ();
1313 return 0;
1316 /* Record whether I2DEST is used in I2SRC and similarly for the other
1317 cases. Knowing this will help in register status updating below. */
1318 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1319 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1320 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1322 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1323 in I2SRC. */
1324 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1326 /* Ensure that I3's pattern can be the destination of combines. */
1327 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1328 i1 && i2dest_in_i1src && i1_feeds_i3,
1329 &i3dest_killed))
1331 undo_all ();
1332 return 0;
1335 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1336 We used to do this EXCEPT in one case: I3 has a post-inc in an
1337 output operand. However, that exception can give rise to insns like
1338 mov r3,(r3)+
1339 which is a famous insn on the PDP-11 where the value of r3 used as the
1340 source was model-dependent. Avoid this sort of thing. */
1342 #if 0
1343 if (!(GET_CODE (PATTERN (i3)) == SET
1344 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1345 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1346 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1347 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1348 /* It's not the exception. */
1349 #endif
1350 #ifdef AUTO_INC_DEC
1351 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1352 if (REG_NOTE_KIND (link) == REG_INC
1353 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1354 || (i1 != 0
1355 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1357 undo_all ();
1358 return 0;
1360 #endif
1362 /* See if the SETs in I1 or I2 need to be kept around in the merged
1363 instruction: whenever the value set there is still needed past I3.
1364 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1366 For the SET in I1, we have two cases: If I1 and I2 independently
1367 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1368 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1369 in I1 needs to be kept around unless I1DEST dies or is set in either
1370 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1371 I1DEST. If so, we know I1 feeds into I2. */
1373 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1375 added_sets_1
1376 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1377 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1379 /* If the set in I2 needs to be kept around, we must make a copy of
1380 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1381 PATTERN (I2), we are only substituting for the original I1DEST, not into
1382 an already-substituted copy. This also prevents making self-referential
1383 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1384 I2DEST. */
1386 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1387 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1388 : PATTERN (i2));
1390 if (added_sets_2)
1391 i2pat = copy_rtx (i2pat);
1393 combine_merges++;
1395 /* Substitute in the latest insn for the regs set by the earlier ones. */
1397 maxreg = max_reg_num ();
1399 subst_insn = i3;
1401 /* It is possible that the source of I2 or I1 may be performing an
1402 unneeded operation, such as a ZERO_EXTEND of something that is known
1403 to have the high part zero. Handle that case by letting subst look at
1404 the innermost one of them.
1406 Another way to do this would be to have a function that tries to
1407 simplify a single insn instead of merging two or more insns. We don't
1408 do this because of the potential of infinite loops and because
1409 of the potential extra memory required. However, doing it the way
1410 we are is a bit of a kludge and doesn't catch all cases.
1412 But only do this if -fexpensive-optimizations since it slows things down
1413 and doesn't usually win. */
1415 if (flag_expensive_optimizations)
1417 /* Pass pc_rtx so no substitutions are done, just simplifications.
1418 The cases that we are interested in here do not involve the few
1419 cases were is_replaced is checked. */
1420 if (i1)
1422 subst_low_cuid = INSN_CUID (i1);
1423 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1425 else
1427 subst_low_cuid = INSN_CUID (i2);
1428 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1431 previous_num_undos = undobuf.num_undo;
1434 #ifndef HAVE_cc0
1435 /* Many machines that don't use CC0 have insns that can both perform an
1436 arithmetic operation and set the condition code. These operations will
1437 be represented as a PARALLEL with the first element of the vector
1438 being a COMPARE of an arithmetic operation with the constant zero.
1439 The second element of the vector will set some pseudo to the result
1440 of the same arithmetic operation. If we simplify the COMPARE, we won't
1441 match such a pattern and so will generate an extra insn. Here we test
1442 for this case, where both the comparison and the operation result are
1443 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1444 I2SRC. Later we will make the PARALLEL that contains I2. */
1446 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1447 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1448 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1449 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1451 rtx *cc_use;
1452 enum machine_mode compare_mode;
1454 newpat = PATTERN (i3);
1455 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1457 i2_is_used = 1;
1459 #ifdef EXTRA_CC_MODES
1460 /* See if a COMPARE with the operand we substituted in should be done
1461 with the mode that is currently being used. If not, do the same
1462 processing we do in `subst' for a SET; namely, if the destination
1463 is used only once, try to replace it with a register of the proper
1464 mode and also replace the COMPARE. */
1465 if (undobuf.other_insn == 0
1466 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1467 &undobuf.other_insn))
1468 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1469 i2src, const0_rtx))
1470 != GET_MODE (SET_DEST (newpat))))
1472 int regno = REGNO (SET_DEST (newpat));
1473 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1475 if (regno < FIRST_PSEUDO_REGISTER
1476 || (reg_n_sets[regno] == 1 && ! added_sets_2
1477 && ! REG_USERVAR_P (SET_DEST (newpat))))
1479 if (regno >= FIRST_PSEUDO_REGISTER)
1480 SUBST (regno_reg_rtx[regno], new_dest);
1482 SUBST (SET_DEST (newpat), new_dest);
1483 SUBST (XEXP (*cc_use, 0), new_dest);
1484 SUBST (SET_SRC (newpat),
1485 gen_rtx_combine (COMPARE, compare_mode,
1486 i2src, const0_rtx));
1488 else
1489 undobuf.other_insn = 0;
1491 #endif
1493 else
1494 #endif
1496 n_occurrences = 0; /* `subst' counts here */
1498 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1499 need to make a unique copy of I2SRC each time we substitute it
1500 to avoid self-referential rtl. */
1502 subst_low_cuid = INSN_CUID (i2);
1503 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1504 ! i1_feeds_i3 && i1dest_in_i1src);
1505 previous_num_undos = undobuf.num_undo;
1507 /* Record whether i2's body now appears within i3's body. */
1508 i2_is_used = n_occurrences;
1511 /* If we already got a failure, don't try to do more. Otherwise,
1512 try to substitute in I1 if we have it. */
1514 if (i1 && GET_CODE (newpat) != CLOBBER)
1516 /* Before we can do this substitution, we must redo the test done
1517 above (see detailed comments there) that ensures that I1DEST
1518 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1520 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1521 0, NULL_PTR))
1523 undo_all ();
1524 return 0;
1527 n_occurrences = 0;
1528 subst_low_cuid = INSN_CUID (i1);
1529 newpat = subst (newpat, i1dest, i1src, 0, 0);
1530 previous_num_undos = undobuf.num_undo;
1533 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1534 to count all the ways that I2SRC and I1SRC can be used. */
1535 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1536 && i2_is_used + added_sets_2 > 1)
1537 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1538 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1539 > 1))
1540 /* Fail if we tried to make a new register (we used to abort, but there's
1541 really no reason to). */
1542 || max_reg_num () != maxreg
1543 /* Fail if we couldn't do something and have a CLOBBER. */
1544 || GET_CODE (newpat) == CLOBBER)
1546 undo_all ();
1547 return 0;
1550 /* If the actions of the earlier insns must be kept
1551 in addition to substituting them into the latest one,
1552 we must make a new PARALLEL for the latest insn
1553 to hold additional the SETs. */
1555 if (added_sets_1 || added_sets_2)
1557 combine_extras++;
1559 if (GET_CODE (newpat) == PARALLEL)
1561 rtvec old = XVEC (newpat, 0);
1562 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1563 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1564 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1565 sizeof (old->elem[0]) * old->num_elem);
1567 else
1569 rtx old = newpat;
1570 total_sets = 1 + added_sets_1 + added_sets_2;
1571 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1572 XVECEXP (newpat, 0, 0) = old;
1575 if (added_sets_1)
1576 XVECEXP (newpat, 0, --total_sets)
1577 = (GET_CODE (PATTERN (i1)) == PARALLEL
1578 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1580 if (added_sets_2)
1582 /* If there is no I1, use I2's body as is. We used to also not do
1583 the subst call below if I2 was substituted into I3,
1584 but that could lose a simplification. */
1585 if (i1 == 0)
1586 XVECEXP (newpat, 0, --total_sets) = i2pat;
1587 else
1588 /* See comment where i2pat is assigned. */
1589 XVECEXP (newpat, 0, --total_sets)
1590 = subst (i2pat, i1dest, i1src, 0, 0);
1594 /* We come here when we are replacing a destination in I2 with the
1595 destination of I3. */
1596 validate_replacement:
1598 /* Is the result of combination a valid instruction? */
1599 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1601 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1602 the second SET's destination is a register that is unused. In that case,
1603 we just need the first SET. This can occur when simplifying a divmod
1604 insn. We *must* test for this case here because the code below that
1605 splits two independent SETs doesn't handle this case correctly when it
1606 updates the register status. Also check the case where the first
1607 SET's destination is unused. That would not cause incorrect code, but
1608 does cause an unneeded insn to remain. */
1610 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1611 && XVECLEN (newpat, 0) == 2
1612 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1613 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1614 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1615 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1616 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1617 && asm_noperands (newpat) < 0)
1619 newpat = XVECEXP (newpat, 0, 0);
1620 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1623 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1624 && XVECLEN (newpat, 0) == 2
1625 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1626 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1627 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1628 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1629 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1630 && asm_noperands (newpat) < 0)
1632 newpat = XVECEXP (newpat, 0, 1);
1633 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1636 /* See if this is an XOR. If so, perhaps the problem is that the
1637 constant is out of range. Replace it with a complemented XOR with
1638 a complemented constant; it might be in range. */
1640 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1641 && GET_CODE (SET_SRC (newpat)) == XOR
1642 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1643 && ((temp = simplify_unary_operation (NOT,
1644 GET_MODE (SET_SRC (newpat)),
1645 XEXP (SET_SRC (newpat), 1),
1646 GET_MODE (SET_SRC (newpat))))
1647 != 0))
1649 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1650 rtx pat
1651 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1652 gen_unary (NOT, i_mode,
1653 gen_binary (XOR, i_mode,
1654 XEXP (SET_SRC (newpat), 0),
1655 temp)));
1657 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1658 if (insn_code_number >= 0)
1659 newpat = pat;
1662 /* If we were combining three insns and the result is a simple SET
1663 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1664 insns. There are two ways to do this. It can be split using a
1665 machine-specific method (like when you have an addition of a large
1666 constant) or by combine in the function find_split_point. */
1668 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1669 && asm_noperands (newpat) < 0)
1671 rtx m_split, *split;
1672 rtx ni2dest = i2dest;
1674 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1675 use I2DEST as a scratch register will help. In the latter case,
1676 convert I2DEST to the mode of the source of NEWPAT if we can. */
1678 m_split = split_insns (newpat, i3);
1680 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1681 inputs of NEWPAT. */
1683 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1684 possible to try that as a scratch reg. This would require adding
1685 more code to make it work though. */
1687 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1689 /* If I2DEST is a hard register or the only use of a pseudo,
1690 we can change its mode. */
1691 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1692 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1693 && GET_CODE (i2dest) == REG
1694 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1695 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1696 && ! REG_USERVAR_P (i2dest))))
1697 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1698 REGNO (i2dest));
1700 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1701 gen_rtvec (2, newpat,
1702 gen_rtx (CLOBBER,
1703 VOIDmode,
1704 ni2dest))),
1705 i3);
1708 if (m_split && GET_CODE (m_split) == SEQUENCE
1709 && XVECLEN (m_split, 0) == 2
1710 && (next_real_insn (i2) == i3
1711 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1712 INSN_CUID (i2))))
1714 rtx i2set, i3set;
1715 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1716 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1718 i3set = single_set (XVECEXP (m_split, 0, 1));
1719 i2set = single_set (XVECEXP (m_split, 0, 0));
1721 /* In case we changed the mode of I2DEST, replace it in the
1722 pseudo-register table here. We can't do it above in case this
1723 code doesn't get executed and we do a split the other way. */
1725 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1726 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1728 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1730 /* If I2 or I3 has multiple SETs, we won't know how to track
1731 register status, so don't use these insns. */
1733 if (i2_code_number >= 0 && i2set && i3set)
1734 insn_code_number = recog_for_combine (&newi3pat, i3,
1735 &new_i3_notes);
1737 if (insn_code_number >= 0)
1738 newpat = newi3pat;
1740 /* It is possible that both insns now set the destination of I3.
1741 If so, we must show an extra use of it. */
1743 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1744 && GET_CODE (SET_DEST (i2set)) == REG
1745 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1746 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1749 /* If we can split it and use I2DEST, go ahead and see if that
1750 helps things be recognized. Verify that none of the registers
1751 are set between I2 and I3. */
1752 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1753 #ifdef HAVE_cc0
1754 && GET_CODE (i2dest) == REG
1755 #endif
1756 /* We need I2DEST in the proper mode. If it is a hard register
1757 or the only use of a pseudo, we can change its mode. */
1758 && (GET_MODE (*split) == GET_MODE (i2dest)
1759 || GET_MODE (*split) == VOIDmode
1760 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1761 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1762 && ! REG_USERVAR_P (i2dest)))
1763 && (next_real_insn (i2) == i3
1764 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1765 /* We can't overwrite I2DEST if its value is still used by
1766 NEWPAT. */
1767 && ! reg_referenced_p (i2dest, newpat))
1769 rtx newdest = i2dest;
1771 /* Get NEWDEST as a register in the proper mode. We have already
1772 validated that we can do this. */
1773 if (GET_MODE (i2dest) != GET_MODE (*split)
1774 && GET_MODE (*split) != VOIDmode)
1776 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1778 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1779 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1782 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1783 an ASHIFT. This can occur if it was inside a PLUS and hence
1784 appeared to be a memory address. This is a kludge. */
1785 if (GET_CODE (*split) == MULT
1786 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1787 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1788 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1789 XEXP (*split, 0), GEN_INT (i)));
1791 #ifdef INSN_SCHEDULING
1792 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1793 be written as a ZERO_EXTEND. */
1794 if (GET_CODE (*split) == SUBREG
1795 && GET_CODE (SUBREG_REG (*split)) == MEM)
1796 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1797 XEXP (*split, 0)));
1798 #endif
1800 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1801 SUBST (*split, newdest);
1802 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1803 if (i2_code_number >= 0)
1804 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1808 /* Check for a case where we loaded from memory in a narrow mode and
1809 then sign extended it, but we need both registers. In that case,
1810 we have a PARALLEL with both loads from the same memory location.
1811 We can split this into a load from memory followed by a register-register
1812 copy. This saves at least one insn, more if register allocation can
1813 eliminate the copy.
1815 We cannot do this if the destination of the second assignment is
1816 a register that we have already assumed is zero-extended. Similarly
1817 for a SUBREG of such a register. */
1819 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1820 && GET_CODE (newpat) == PARALLEL
1821 && XVECLEN (newpat, 0) == 2
1822 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1823 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1824 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1825 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1826 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1827 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1828 INSN_CUID (i2))
1829 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1830 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1831 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1832 (GET_CODE (temp) == REG
1833 && reg_nonzero_bits[REGNO (temp)] != 0
1834 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1835 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1836 && (reg_nonzero_bits[REGNO (temp)]
1837 != GET_MODE_MASK (word_mode))))
1838 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1839 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1840 (GET_CODE (temp) == REG
1841 && reg_nonzero_bits[REGNO (temp)] != 0
1842 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1843 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1844 && (reg_nonzero_bits[REGNO (temp)]
1845 != GET_MODE_MASK (word_mode)))))
1846 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1847 SET_SRC (XVECEXP (newpat, 0, 1)))
1848 && ! find_reg_note (i3, REG_UNUSED,
1849 SET_DEST (XVECEXP (newpat, 0, 0))))
1851 rtx ni2dest;
1853 newi2pat = XVECEXP (newpat, 0, 0);
1854 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1855 newpat = XVECEXP (newpat, 0, 1);
1856 SUBST (SET_SRC (newpat),
1857 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1858 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1859 if (i2_code_number >= 0)
1860 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1862 if (insn_code_number >= 0)
1864 rtx insn;
1865 rtx link;
1867 /* If we will be able to accept this, we have made a change to the
1868 destination of I3. This can invalidate a LOG_LINKS pointing
1869 to I3. No other part of combine.c makes such a transformation.
1871 The new I3 will have a destination that was previously the
1872 destination of I1 or I2 and which was used in i2 or I3. Call
1873 distribute_links to make a LOG_LINK from the next use of
1874 that destination. */
1876 PATTERN (i3) = newpat;
1877 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1879 /* I3 now uses what used to be its destination and which is
1880 now I2's destination. That means we need a LOG_LINK from
1881 I3 to I2. But we used to have one, so we still will.
1883 However, some later insn might be using I2's dest and have
1884 a LOG_LINK pointing at I3. We must remove this link.
1885 The simplest way to remove the link is to point it at I1,
1886 which we know will be a NOTE. */
1888 for (insn = NEXT_INSN (i3);
1889 insn && (this_basic_block == n_basic_blocks - 1
1890 || insn != basic_block_head[this_basic_block + 1]);
1891 insn = NEXT_INSN (insn))
1893 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1894 && reg_referenced_p (ni2dest, PATTERN (insn)))
1896 for (link = LOG_LINKS (insn); link;
1897 link = XEXP (link, 1))
1898 if (XEXP (link, 0) == i3)
1899 XEXP (link, 0) = i1;
1901 break;
1907 /* Similarly, check for a case where we have a PARALLEL of two independent
1908 SETs but we started with three insns. In this case, we can do the sets
1909 as two separate insns. This case occurs when some SET allows two
1910 other insns to combine, but the destination of that SET is still live. */
1912 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1913 && GET_CODE (newpat) == PARALLEL
1914 && XVECLEN (newpat, 0) == 2
1915 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1916 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1917 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1918 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1919 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1920 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1921 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1922 INSN_CUID (i2))
1923 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1924 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1925 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1926 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1927 XVECEXP (newpat, 0, 0))
1928 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1929 XVECEXP (newpat, 0, 1)))
1931 newi2pat = XVECEXP (newpat, 0, 1);
1932 newpat = XVECEXP (newpat, 0, 0);
1934 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1935 if (i2_code_number >= 0)
1936 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1939 /* If it still isn't recognized, fail and change things back the way they
1940 were. */
1941 if ((insn_code_number < 0
1942 /* Is the result a reasonable ASM_OPERANDS? */
1943 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1945 undo_all ();
1946 return 0;
1949 /* If we had to change another insn, make sure it is valid also. */
1950 if (undobuf.other_insn)
1952 rtx other_notes = REG_NOTES (undobuf.other_insn);
1953 rtx other_pat = PATTERN (undobuf.other_insn);
1954 rtx new_other_notes;
1955 rtx note, next;
1957 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1958 &new_other_notes);
1960 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1962 undo_all ();
1963 return 0;
1966 PATTERN (undobuf.other_insn) = other_pat;
1968 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1969 are still valid. Then add any non-duplicate notes added by
1970 recog_for_combine. */
1971 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1973 next = XEXP (note, 1);
1975 if (REG_NOTE_KIND (note) == REG_UNUSED
1976 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1978 if (GET_CODE (XEXP (note, 0)) == REG)
1979 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1981 remove_note (undobuf.other_insn, note);
1985 for (note = new_other_notes; note; note = XEXP (note, 1))
1986 if (GET_CODE (XEXP (note, 0)) == REG)
1987 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1989 distribute_notes (new_other_notes, undobuf.other_insn,
1990 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1993 /* We now know that we can do this combination. Merge the insns and
1994 update the status of registers and LOG_LINKS. */
1997 rtx i3notes, i2notes, i1notes = 0;
1998 rtx i3links, i2links, i1links = 0;
1999 rtx midnotes = 0;
2000 int all_adjacent = (next_real_insn (i2) == i3
2001 && (i1 == 0 || next_real_insn (i1) == i2));
2002 register int regno;
2003 /* Compute which registers we expect to eliminate. */
2004 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2005 ? 0 : i2dest);
2006 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2008 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2009 clear them. */
2010 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2011 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2012 if (i1)
2013 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2015 /* Ensure that we do not have something that should not be shared but
2016 occurs multiple times in the new insns. Check this by first
2017 resetting all the `used' flags and then copying anything is shared. */
2019 reset_used_flags (i3notes);
2020 reset_used_flags (i2notes);
2021 reset_used_flags (i1notes);
2022 reset_used_flags (newpat);
2023 reset_used_flags (newi2pat);
2024 if (undobuf.other_insn)
2025 reset_used_flags (PATTERN (undobuf.other_insn));
2027 i3notes = copy_rtx_if_shared (i3notes);
2028 i2notes = copy_rtx_if_shared (i2notes);
2029 i1notes = copy_rtx_if_shared (i1notes);
2030 newpat = copy_rtx_if_shared (newpat);
2031 newi2pat = copy_rtx_if_shared (newi2pat);
2032 if (undobuf.other_insn)
2033 reset_used_flags (PATTERN (undobuf.other_insn));
2035 INSN_CODE (i3) = insn_code_number;
2036 PATTERN (i3) = newpat;
2037 if (undobuf.other_insn)
2038 INSN_CODE (undobuf.other_insn) = other_code_number;
2040 /* We had one special case above where I2 had more than one set and
2041 we replaced a destination of one of those sets with the destination
2042 of I3. In that case, we have to update LOG_LINKS of insns later
2043 in this basic block. Note that this (expensive) case is rare. */
2045 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2046 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2047 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2048 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2049 && ! find_reg_note (i2, REG_UNUSED,
2050 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2051 for (temp = NEXT_INSN (i2);
2052 temp && (this_basic_block == n_basic_blocks - 1
2053 || basic_block_head[this_basic_block] != temp);
2054 temp = NEXT_INSN (temp))
2055 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2056 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2057 if (XEXP (link, 0) == i2)
2058 XEXP (link, 0) = i3;
2060 LOG_LINKS (i3) = 0;
2061 REG_NOTES (i3) = 0;
2062 LOG_LINKS (i2) = 0;
2063 REG_NOTES (i2) = 0;
2065 if (newi2pat)
2067 INSN_CODE (i2) = i2_code_number;
2068 PATTERN (i2) = newi2pat;
2070 else
2072 PUT_CODE (i2, NOTE);
2073 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2074 NOTE_SOURCE_FILE (i2) = 0;
2077 if (i1)
2079 LOG_LINKS (i1) = 0;
2080 REG_NOTES (i1) = 0;
2081 PUT_CODE (i1, NOTE);
2082 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2083 NOTE_SOURCE_FILE (i1) = 0;
2086 /* Get death notes for everything that is now used in either I3 or
2087 I2 and used to die in a previous insn. */
2089 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2090 if (newi2pat)
2091 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2093 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2094 if (i3notes)
2095 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2096 elim_i2, elim_i1);
2097 if (i2notes)
2098 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2099 elim_i2, elim_i1);
2100 if (i1notes)
2101 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2102 elim_i2, elim_i1);
2103 if (midnotes)
2104 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2105 elim_i2, elim_i1);
2107 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2108 know these are REG_UNUSED and want them to go to the desired insn,
2109 so we always pass it as i3. We have not counted the notes in
2110 reg_n_deaths yet, so we need to do so now. */
2112 if (newi2pat && new_i2_notes)
2114 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2115 if (GET_CODE (XEXP (temp, 0)) == REG)
2116 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2118 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2121 if (new_i3_notes)
2123 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2124 if (GET_CODE (XEXP (temp, 0)) == REG)
2125 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2127 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2130 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2131 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2132 Show an additional death due to the REG_DEAD note we make here. If
2133 we discard it in distribute_notes, we will decrement it again. */
2135 if (i3dest_killed)
2137 if (GET_CODE (i3dest_killed) == REG)
2138 reg_n_deaths[REGNO (i3dest_killed)]++;
2140 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2141 NULL_RTX),
2142 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2143 NULL_RTX, NULL_RTX);
2146 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2147 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2148 we passed I3 in that case, it might delete I2. */
2150 if (i2dest_in_i2src)
2152 if (GET_CODE (i2dest) == REG)
2153 reg_n_deaths[REGNO (i2dest)]++;
2155 if (newi2pat && reg_set_p (i2dest, newi2pat))
2156 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2157 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2158 else
2159 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2160 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2161 NULL_RTX, NULL_RTX);
2164 if (i1dest_in_i1src)
2166 if (GET_CODE (i1dest) == REG)
2167 reg_n_deaths[REGNO (i1dest)]++;
2169 if (newi2pat && reg_set_p (i1dest, newi2pat))
2170 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2171 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2172 else
2173 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2174 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2175 NULL_RTX, NULL_RTX);
2178 distribute_links (i3links);
2179 distribute_links (i2links);
2180 distribute_links (i1links);
2182 if (GET_CODE (i2dest) == REG)
2184 rtx link;
2185 rtx i2_insn = 0, i2_val = 0, set;
2187 /* The insn that used to set this register doesn't exist, and
2188 this life of the register may not exist either. See if one of
2189 I3's links points to an insn that sets I2DEST. If it does,
2190 that is now the last known value for I2DEST. If we don't update
2191 this and I2 set the register to a value that depended on its old
2192 contents, we will get confused. If this insn is used, thing
2193 will be set correctly in combine_instructions. */
2195 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2196 if ((set = single_set (XEXP (link, 0))) != 0
2197 && rtx_equal_p (i2dest, SET_DEST (set)))
2198 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2200 record_value_for_reg (i2dest, i2_insn, i2_val);
2202 /* If the reg formerly set in I2 died only once and that was in I3,
2203 zero its use count so it won't make `reload' do any work. */
2204 if (! added_sets_2 && newi2pat == 0)
2206 regno = REGNO (i2dest);
2207 reg_n_sets[regno]--;
2208 if (reg_n_sets[regno] == 0
2209 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2210 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2211 reg_n_refs[regno] = 0;
2215 if (i1 && GET_CODE (i1dest) == REG)
2217 rtx link;
2218 rtx i1_insn = 0, i1_val = 0, set;
2220 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2221 if ((set = single_set (XEXP (link, 0))) != 0
2222 && rtx_equal_p (i1dest, SET_DEST (set)))
2223 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2225 record_value_for_reg (i1dest, i1_insn, i1_val);
2227 regno = REGNO (i1dest);
2228 if (! added_sets_1)
2230 reg_n_sets[regno]--;
2231 if (reg_n_sets[regno] == 0
2232 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2233 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2234 reg_n_refs[regno] = 0;
2238 /* Update reg_nonzero_bits et al for any changes that may have been made
2239 to this insn. */
2241 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2242 if (newi2pat)
2243 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2245 /* If I3 is now an unconditional jump, ensure that it has a
2246 BARRIER following it since it may have initially been a
2247 conditional jump. It may also be the last nonnote insn. */
2249 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2250 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2251 || GET_CODE (temp) != BARRIER))
2252 emit_barrier_after (i3);
2255 combine_successes++;
2257 return newi2pat ? i2 : i3;
2260 /* Undo all the modifications recorded in undobuf. */
2262 static void
2263 undo_all ()
2265 register int i;
2266 if (undobuf.num_undo > MAX_UNDO)
2267 undobuf.num_undo = MAX_UNDO;
2268 for (i = undobuf.num_undo - 1; i >= 0; i--)
2270 if (undobuf.undo[i].is_int)
2271 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2272 else
2273 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
2277 obfree (undobuf.storage);
2278 undobuf.num_undo = 0;
2281 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2282 where we have an arithmetic expression and return that point. LOC will
2283 be inside INSN.
2285 try_combine will call this function to see if an insn can be split into
2286 two insns. */
2288 static rtx *
2289 find_split_point (loc, insn)
2290 rtx *loc;
2291 rtx insn;
2293 rtx x = *loc;
2294 enum rtx_code code = GET_CODE (x);
2295 rtx *split;
2296 int len = 0, pos, unsignedp;
2297 rtx inner;
2299 /* First special-case some codes. */
2300 switch (code)
2302 case SUBREG:
2303 #ifdef INSN_SCHEDULING
2304 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2305 point. */
2306 if (GET_CODE (SUBREG_REG (x)) == MEM)
2307 return loc;
2308 #endif
2309 return find_split_point (&SUBREG_REG (x), insn);
2311 case MEM:
2312 #ifdef HAVE_lo_sum
2313 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2314 using LO_SUM and HIGH. */
2315 if (GET_CODE (XEXP (x, 0)) == CONST
2316 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2318 SUBST (XEXP (x, 0),
2319 gen_rtx_combine (LO_SUM, Pmode,
2320 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2321 XEXP (x, 0)));
2322 return &XEXP (XEXP (x, 0), 0);
2324 #endif
2326 /* If we have a PLUS whose second operand is a constant and the
2327 address is not valid, perhaps will can split it up using
2328 the machine-specific way to split large constants. We use
2329 the first psuedo-reg (one of the virtual regs) as a placeholder;
2330 it will not remain in the result. */
2331 if (GET_CODE (XEXP (x, 0)) == PLUS
2332 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2333 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2335 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2336 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2337 subst_insn);
2339 /* This should have produced two insns, each of which sets our
2340 placeholder. If the source of the second is a valid address,
2341 we can make put both sources together and make a split point
2342 in the middle. */
2344 if (seq && XVECLEN (seq, 0) == 2
2345 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2346 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2347 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2348 && ! reg_mentioned_p (reg,
2349 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2350 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2351 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2352 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2353 && memory_address_p (GET_MODE (x),
2354 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2356 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2357 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2359 /* Replace the placeholder in SRC2 with SRC1. If we can
2360 find where in SRC2 it was placed, that can become our
2361 split point and we can replace this address with SRC2.
2362 Just try two obvious places. */
2364 src2 = replace_rtx (src2, reg, src1);
2365 split = 0;
2366 if (XEXP (src2, 0) == src1)
2367 split = &XEXP (src2, 0);
2368 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2369 && XEXP (XEXP (src2, 0), 0) == src1)
2370 split = &XEXP (XEXP (src2, 0), 0);
2372 if (split)
2374 SUBST (XEXP (x, 0), src2);
2375 return split;
2379 /* If that didn't work, perhaps the first operand is complex and
2380 needs to be computed separately, so make a split point there.
2381 This will occur on machines that just support REG + CONST
2382 and have a constant moved through some previous computation. */
2384 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2385 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2386 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2387 == 'o')))
2388 return &XEXP (XEXP (x, 0), 0);
2390 break;
2392 case SET:
2393 #ifdef HAVE_cc0
2394 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2395 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2396 we need to put the operand into a register. So split at that
2397 point. */
2399 if (SET_DEST (x) == cc0_rtx
2400 && GET_CODE (SET_SRC (x)) != COMPARE
2401 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2402 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2403 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2404 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2405 return &SET_SRC (x);
2406 #endif
2408 /* See if we can split SET_SRC as it stands. */
2409 split = find_split_point (&SET_SRC (x), insn);
2410 if (split && split != &SET_SRC (x))
2411 return split;
2413 /* See if this is a bitfield assignment with everything constant. If
2414 so, this is an IOR of an AND, so split it into that. */
2415 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2416 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2417 <= HOST_BITS_PER_WIDE_INT)
2418 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2419 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2420 && GET_CODE (SET_SRC (x)) == CONST_INT
2421 && ((INTVAL (XEXP (SET_DEST (x), 1))
2422 + INTVAL (XEXP (SET_DEST (x), 2)))
2423 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2424 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2426 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2427 int len = INTVAL (XEXP (SET_DEST (x), 1));
2428 int src = INTVAL (SET_SRC (x));
2429 rtx dest = XEXP (SET_DEST (x), 0);
2430 enum machine_mode mode = GET_MODE (dest);
2431 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2433 #if BITS_BIG_ENDIAN
2434 pos = GET_MODE_BITSIZE (mode) - len - pos;
2435 #endif
2437 if (src == mask)
2438 SUBST (SET_SRC (x),
2439 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2440 else
2441 SUBST (SET_SRC (x),
2442 gen_binary (IOR, mode,
2443 gen_binary (AND, mode, dest,
2444 GEN_INT (~ (mask << pos)
2445 & GET_MODE_MASK (mode))),
2446 GEN_INT (src << pos)));
2448 SUBST (SET_DEST (x), dest);
2450 split = find_split_point (&SET_SRC (x), insn);
2451 if (split && split != &SET_SRC (x))
2452 return split;
2455 /* Otherwise, see if this is an operation that we can split into two.
2456 If so, try to split that. */
2457 code = GET_CODE (SET_SRC (x));
2459 switch (code)
2461 case AND:
2462 /* If we are AND'ing with a large constant that is only a single
2463 bit and the result is only being used in a context where we
2464 need to know if it is zero or non-zero, replace it with a bit
2465 extraction. This will avoid the large constant, which might
2466 have taken more than one insn to make. If the constant were
2467 not a valid argument to the AND but took only one insn to make,
2468 this is no worse, but if it took more than one insn, it will
2469 be better. */
2471 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2472 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2473 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2474 && GET_CODE (SET_DEST (x)) == REG
2475 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2476 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2477 && XEXP (*split, 0) == SET_DEST (x)
2478 && XEXP (*split, 1) == const0_rtx)
2480 SUBST (SET_SRC (x),
2481 make_extraction (GET_MODE (SET_DEST (x)),
2482 XEXP (SET_SRC (x), 0),
2483 pos, NULL_RTX, 1, 1, 0, 0));
2484 return find_split_point (loc, insn);
2486 break;
2488 case SIGN_EXTEND:
2489 inner = XEXP (SET_SRC (x), 0);
2490 pos = 0;
2491 len = GET_MODE_BITSIZE (GET_MODE (inner));
2492 unsignedp = 0;
2493 break;
2495 case SIGN_EXTRACT:
2496 case ZERO_EXTRACT:
2497 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2498 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2500 inner = XEXP (SET_SRC (x), 0);
2501 len = INTVAL (XEXP (SET_SRC (x), 1));
2502 pos = INTVAL (XEXP (SET_SRC (x), 2));
2504 #if BITS_BIG_ENDIAN
2505 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2506 #endif
2507 unsignedp = (code == ZERO_EXTRACT);
2509 break;
2512 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2514 enum machine_mode mode = GET_MODE (SET_SRC (x));
2516 /* For unsigned, we have a choice of a shift followed by an
2517 AND or two shifts. Use two shifts for field sizes where the
2518 constant might be too large. We assume here that we can
2519 always at least get 8-bit constants in an AND insn, which is
2520 true for every current RISC. */
2522 if (unsignedp && len <= 8)
2524 SUBST (SET_SRC (x),
2525 gen_rtx_combine
2526 (AND, mode,
2527 gen_rtx_combine (LSHIFTRT, mode,
2528 gen_lowpart_for_combine (mode, inner),
2529 GEN_INT (pos)),
2530 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2532 split = find_split_point (&SET_SRC (x), insn);
2533 if (split && split != &SET_SRC (x))
2534 return split;
2536 else
2538 SUBST (SET_SRC (x),
2539 gen_rtx_combine
2540 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2541 gen_rtx_combine (ASHIFT, mode,
2542 gen_lowpart_for_combine (mode, inner),
2543 GEN_INT (GET_MODE_BITSIZE (mode)
2544 - len - pos)),
2545 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2547 split = find_split_point (&SET_SRC (x), insn);
2548 if (split && split != &SET_SRC (x))
2549 return split;
2553 /* See if this is a simple operation with a constant as the second
2554 operand. It might be that this constant is out of range and hence
2555 could be used as a split point. */
2556 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2557 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2558 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2559 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2560 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2561 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2562 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2563 == 'o'))))
2564 return &XEXP (SET_SRC (x), 1);
2566 /* Finally, see if this is a simple operation with its first operand
2567 not in a register. The operation might require this operand in a
2568 register, so return it as a split point. We can always do this
2569 because if the first operand were another operation, we would have
2570 already found it as a split point. */
2571 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2572 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2573 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2574 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2575 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2576 return &XEXP (SET_SRC (x), 0);
2578 return 0;
2580 case AND:
2581 case IOR:
2582 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2583 it is better to write this as (not (ior A B)) so we can split it.
2584 Similarly for IOR. */
2585 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2587 SUBST (*loc,
2588 gen_rtx_combine (NOT, GET_MODE (x),
2589 gen_rtx_combine (code == IOR ? AND : IOR,
2590 GET_MODE (x),
2591 XEXP (XEXP (x, 0), 0),
2592 XEXP (XEXP (x, 1), 0))));
2593 return find_split_point (loc, insn);
2596 /* Many RISC machines have a large set of logical insns. If the
2597 second operand is a NOT, put it first so we will try to split the
2598 other operand first. */
2599 if (GET_CODE (XEXP (x, 1)) == NOT)
2601 rtx tem = XEXP (x, 0);
2602 SUBST (XEXP (x, 0), XEXP (x, 1));
2603 SUBST (XEXP (x, 1), tem);
2605 break;
2608 /* Otherwise, select our actions depending on our rtx class. */
2609 switch (GET_RTX_CLASS (code))
2611 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2612 case '3':
2613 split = find_split_point (&XEXP (x, 2), insn);
2614 if (split)
2615 return split;
2616 /* ... fall through ... */
2617 case '2':
2618 case 'c':
2619 case '<':
2620 split = find_split_point (&XEXP (x, 1), insn);
2621 if (split)
2622 return split;
2623 /* ... fall through ... */
2624 case '1':
2625 /* Some machines have (and (shift ...) ...) insns. If X is not
2626 an AND, but XEXP (X, 0) is, use it as our split point. */
2627 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2628 return &XEXP (x, 0);
2630 split = find_split_point (&XEXP (x, 0), insn);
2631 if (split)
2632 return split;
2633 return loc;
2636 /* Otherwise, we don't have a split point. */
2637 return 0;
2640 /* Throughout X, replace FROM with TO, and return the result.
2641 The result is TO if X is FROM;
2642 otherwise the result is X, but its contents may have been modified.
2643 If they were modified, a record was made in undobuf so that
2644 undo_all will (among other things) return X to its original state.
2646 If the number of changes necessary is too much to record to undo,
2647 the excess changes are not made, so the result is invalid.
2648 The changes already made can still be undone.
2649 undobuf.num_undo is incremented for such changes, so by testing that
2650 the caller can tell whether the result is valid.
2652 `n_occurrences' is incremented each time FROM is replaced.
2654 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2656 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2657 by copying if `n_occurrences' is non-zero. */
2659 static rtx
2660 subst (x, from, to, in_dest, unique_copy)
2661 register rtx x, from, to;
2662 int in_dest;
2663 int unique_copy;
2665 register char *fmt;
2666 register int len, i;
2667 register enum rtx_code code = GET_CODE (x), orig_code = code;
2668 rtx temp;
2669 enum machine_mode mode = GET_MODE (x);
2670 enum machine_mode op0_mode = VOIDmode;
2671 rtx other_insn;
2672 rtx *cc_use;
2673 int n_restarts = 0;
2675 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2676 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2677 If it is 0, that cannot be done. We can now do this for any MEM
2678 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2679 If not for that, MEM's would very rarely be safe. */
2681 /* Reject MODEs bigger than a word, because we might not be able
2682 to reference a two-register group starting with an arbitrary register
2683 (and currently gen_lowpart might crash for a SUBREG). */
2685 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2686 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2688 /* Two expressions are equal if they are identical copies of a shared
2689 RTX or if they are both registers with the same register number
2690 and mode. */
2692 #define COMBINE_RTX_EQUAL_P(X,Y) \
2693 ((X) == (Y) \
2694 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2695 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2697 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2699 n_occurrences++;
2700 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2703 /* If X and FROM are the same register but different modes, they will
2704 not have been seen as equal above. However, flow.c will make a
2705 LOG_LINKS entry for that case. If we do nothing, we will try to
2706 rerecognize our original insn and, when it succeeds, we will
2707 delete the feeding insn, which is incorrect.
2709 So force this insn not to match in this (rare) case. */
2710 if (! in_dest && code == REG && GET_CODE (from) == REG
2711 && REGNO (x) == REGNO (from))
2712 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2714 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2715 of which may contain things that can be combined. */
2716 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2717 return x;
2719 /* It is possible to have a subexpression appear twice in the insn.
2720 Suppose that FROM is a register that appears within TO.
2721 Then, after that subexpression has been scanned once by `subst',
2722 the second time it is scanned, TO may be found. If we were
2723 to scan TO here, we would find FROM within it and create a
2724 self-referent rtl structure which is completely wrong. */
2725 if (COMBINE_RTX_EQUAL_P (x, to))
2726 return to;
2728 len = GET_RTX_LENGTH (code);
2729 fmt = GET_RTX_FORMAT (code);
2731 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2732 set up to skip this common case. All other cases where we want to
2733 suppress replacing something inside a SET_SRC are handled via the
2734 IN_DEST operand. */
2735 if (code == SET
2736 && (GET_CODE (SET_DEST (x)) == REG
2737 || GET_CODE (SET_DEST (x)) == CC0
2738 || GET_CODE (SET_DEST (x)) == PC))
2739 fmt = "ie";
2741 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2742 if (fmt[0] == 'e')
2743 op0_mode = GET_MODE (XEXP (x, 0));
2745 for (i = 0; i < len; i++)
2747 if (fmt[i] == 'E')
2749 register int j;
2750 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2752 register rtx new;
2753 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2755 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2756 n_occurrences++;
2758 else
2760 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2762 /* If this substitution failed, this whole thing fails. */
2763 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2764 return new;
2767 SUBST (XVECEXP (x, i, j), new);
2770 else if (fmt[i] == 'e')
2772 register rtx new;
2774 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2776 /* In general, don't install a subreg involving two modes not
2777 tieable. It can worsen register allocation, and can even
2778 make invalid reload insns, since the reg inside may need to
2779 be copied from in the outside mode, and that may be invalid
2780 if it is an fp reg copied in integer mode.
2782 We allow two exceptions to this: It is valid if it is inside
2783 another SUBREG and the mode of that SUBREG and the mode of
2784 the inside of TO is tieable and it is valid if X is a SET
2785 that copies FROM to CC0. */
2786 if (GET_CODE (to) == SUBREG
2787 && ! MODES_TIEABLE_P (GET_MODE (to),
2788 GET_MODE (SUBREG_REG (to)))
2789 && ! (code == SUBREG
2790 && MODES_TIEABLE_P (mode, GET_MODE (SUBREG_REG (to))))
2791 #ifdef HAVE_cc0
2792 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2793 #endif
2795 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2797 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2798 n_occurrences++;
2800 else
2801 /* If we are in a SET_DEST, suppress most cases unless we
2802 have gone inside a MEM, in which case we want to
2803 simplify the address. We assume here that things that
2804 are actually part of the destination have their inner
2805 parts in the first expression. This is true for SUBREG,
2806 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2807 things aside from REG and MEM that should appear in a
2808 SET_DEST. */
2809 new = subst (XEXP (x, i), from, to,
2810 (((in_dest
2811 && (code == SUBREG || code == STRICT_LOW_PART
2812 || code == ZERO_EXTRACT))
2813 || code == SET)
2814 && i == 0), unique_copy);
2816 /* If we found that we will have to reject this combination,
2817 indicate that by returning the CLOBBER ourselves, rather than
2818 an expression containing it. This will speed things up as
2819 well as prevent accidents where two CLOBBERs are considered
2820 to be equal, thus producing an incorrect simplification. */
2822 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2823 return new;
2825 SUBST (XEXP (x, i), new);
2829 /* We come back to here if we have replaced the expression with one of
2830 a different code and it is likely that further simplification will be
2831 possible. */
2833 restart:
2835 /* If we have restarted more than 4 times, we are probably looping, so
2836 give up. */
2837 if (++n_restarts > 4)
2838 return x;
2840 /* If we are restarting at all, it means that we no longer know the
2841 original mode of operand 0 (since we have probably changed the
2842 form of X). */
2844 if (n_restarts > 1)
2845 op0_mode = VOIDmode;
2847 code = GET_CODE (x);
2849 /* If this is a commutative operation, put a constant last and a complex
2850 expression first. We don't need to do this for comparisons here. */
2851 if (GET_RTX_CLASS (code) == 'c'
2852 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2853 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2854 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2855 || (GET_CODE (XEXP (x, 0)) == SUBREG
2856 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2857 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2859 temp = XEXP (x, 0);
2860 SUBST (XEXP (x, 0), XEXP (x, 1));
2861 SUBST (XEXP (x, 1), temp);
2864 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2865 sign extension of a PLUS with a constant, reverse the order of the sign
2866 extension and the addition. Note that this not the same as the original
2867 code, but overflow is undefined for signed values. Also note that the
2868 PLUS will have been partially moved "inside" the sign-extension, so that
2869 the first operand of X will really look like:
2870 (ashiftrt (plus (ashift A C4) C5) C4).
2871 We convert this to
2872 (plus (ashiftrt (ashift A C4) C2) C4)
2873 and replace the first operand of X with that expression. Later parts
2874 of this function may simplify the expression further.
2876 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2877 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2878 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2880 We do this to simplify address expressions. */
2882 if ((code == PLUS || code == MINUS || code == MULT)
2883 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2884 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2885 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2886 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2887 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2888 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2889 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2890 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2891 XEXP (XEXP (XEXP (x, 0), 0), 1),
2892 XEXP (XEXP (x, 0), 1))) != 0)
2894 rtx new
2895 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2896 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2897 INTVAL (XEXP (XEXP (x, 0), 1)));
2899 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2900 INTVAL (XEXP (XEXP (x, 0), 1)));
2902 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2905 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2906 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2907 things. Don't deal with operations that change modes here. */
2909 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2910 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2912 /* Don't do this by using SUBST inside X since we might be messing
2913 up a shared expression. */
2914 rtx cond = XEXP (XEXP (x, 0), 0);
2915 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2916 XEXP (x, 1)),
2917 pc_rtx, pc_rtx, 0, 0);
2918 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2919 XEXP (x, 1)),
2920 pc_rtx, pc_rtx, 0, 0);
2923 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2924 goto restart;
2927 else if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2928 && GET_CODE (XEXP (x, 1)) == IF_THEN_ELSE)
2930 /* Don't do this by using SUBST inside X since we might be messing
2931 up a shared expression. */
2932 rtx cond = XEXP (XEXP (x, 1), 0);
2933 rtx t_arm = subst (gen_binary (code, mode, XEXP (x, 0),
2934 XEXP (XEXP (x, 1), 1)),
2935 pc_rtx, pc_rtx, 0, 0);
2936 rtx f_arm = subst (gen_binary (code, mode, XEXP (x, 0),
2937 XEXP (XEXP (x, 1), 2)),
2938 pc_rtx, pc_rtx, 0, 0);
2940 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2941 goto restart;
2944 else if (GET_RTX_CLASS (code) == '1'
2945 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2946 && GET_MODE (XEXP (x, 0)) == mode)
2948 rtx cond = XEXP (XEXP (x, 0), 0);
2949 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2950 pc_rtx, pc_rtx, 0, 0);
2951 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2952 pc_rtx, pc_rtx, 0, 0);
2954 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2955 goto restart;
2958 /* Try to fold this expression in case we have constants that weren't
2959 present before. */
2960 temp = 0;
2961 switch (GET_RTX_CLASS (code))
2963 case '1':
2964 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2965 break;
2966 case '<':
2967 temp = simplify_relational_operation (code, op0_mode,
2968 XEXP (x, 0), XEXP (x, 1));
2969 #ifdef FLOAT_STORE_FLAG_VALUE
2970 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2971 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2972 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2973 #endif
2974 break;
2975 case 'c':
2976 case '2':
2977 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2978 break;
2979 case 'b':
2980 case '3':
2981 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2982 XEXP (x, 1), XEXP (x, 2));
2983 break;
2986 if (temp)
2987 x = temp, code = GET_CODE (temp);
2989 /* First see if we can apply the inverse distributive law. */
2990 if (code == PLUS || code == MINUS
2991 || code == AND || code == IOR || code == XOR)
2993 x = apply_distributive_law (x);
2994 code = GET_CODE (x);
2997 /* If CODE is an associative operation not otherwise handled, see if we
2998 can associate some operands. This can win if they are constants or
2999 if they are logically related (i.e. (a & b) & a. */
3000 if ((code == PLUS || code == MINUS
3001 || code == MULT || code == AND || code == IOR || code == XOR
3002 || code == DIV || code == UDIV
3003 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3004 && INTEGRAL_MODE_P (mode))
3006 if (GET_CODE (XEXP (x, 0)) == code)
3008 rtx other = XEXP (XEXP (x, 0), 0);
3009 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3010 rtx inner_op1 = XEXP (x, 1);
3011 rtx inner;
3013 /* Make sure we pass the constant operand if any as the second
3014 one if this is a commutative operation. */
3015 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3017 rtx tem = inner_op0;
3018 inner_op0 = inner_op1;
3019 inner_op1 = tem;
3021 inner = simplify_binary_operation (code == MINUS ? PLUS
3022 : code == DIV ? MULT
3023 : code == UDIV ? MULT
3024 : code,
3025 mode, inner_op0, inner_op1);
3027 /* For commutative operations, try the other pair if that one
3028 didn't simplify. */
3029 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3031 other = XEXP (XEXP (x, 0), 1);
3032 inner = simplify_binary_operation (code, mode,
3033 XEXP (XEXP (x, 0), 0),
3034 XEXP (x, 1));
3037 if (inner)
3039 x = gen_binary (code, mode, other, inner);
3040 goto restart;
3046 /* A little bit of algebraic simplification here. */
3047 switch (code)
3049 case MEM:
3050 /* Ensure that our address has any ASHIFTs converted to MULT in case
3051 address-recognizing predicates are called later. */
3052 temp = make_compound_operation (XEXP (x, 0), MEM);
3053 SUBST (XEXP (x, 0), temp);
3054 break;
3056 case SUBREG:
3057 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3058 is paradoxical. If we can't do that safely, then it becomes
3059 something nonsensical so that this combination won't take place. */
3061 if (GET_CODE (SUBREG_REG (x)) == MEM
3062 && (GET_MODE_SIZE (mode)
3063 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3065 rtx inner = SUBREG_REG (x);
3066 int endian_offset = 0;
3067 /* Don't change the mode of the MEM
3068 if that would change the meaning of the address. */
3069 if (MEM_VOLATILE_P (SUBREG_REG (x))
3070 || mode_dependent_address_p (XEXP (inner, 0)))
3071 return gen_rtx (CLOBBER, mode, const0_rtx);
3073 #if BYTES_BIG_ENDIAN
3074 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3075 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3076 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3077 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3078 #endif
3079 /* Note if the plus_constant doesn't make a valid address
3080 then this combination won't be accepted. */
3081 x = gen_rtx (MEM, mode,
3082 plus_constant (XEXP (inner, 0),
3083 (SUBREG_WORD (x) * UNITS_PER_WORD
3084 + endian_offset)));
3085 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3086 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3087 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3088 return x;
3091 /* If we are in a SET_DEST, these other cases can't apply. */
3092 if (in_dest)
3093 return x;
3095 /* Changing mode twice with SUBREG => just change it once,
3096 or not at all if changing back to starting mode. */
3097 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3099 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3100 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3101 return SUBREG_REG (SUBREG_REG (x));
3103 SUBST_INT (SUBREG_WORD (x),
3104 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3105 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3108 /* SUBREG of a hard register => just change the register number
3109 and/or mode. If the hard register is not valid in that mode,
3110 suppress this combination. If the hard register is the stack,
3111 frame, or argument pointer, leave this as a SUBREG. */
3113 if (GET_CODE (SUBREG_REG (x)) == REG
3114 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3115 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3116 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3117 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3118 #endif
3119 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3120 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3121 #endif
3122 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3124 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3125 mode))
3126 return gen_rtx (REG, mode,
3127 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3128 else
3129 return gen_rtx (CLOBBER, mode, const0_rtx);
3132 /* For a constant, try to pick up the part we want. Handle a full
3133 word and low-order part. Only do this if we are narrowing
3134 the constant; if it is being widened, we have no idea what
3135 the extra bits will have been set to. */
3137 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3138 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3139 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3140 && GET_MODE_CLASS (mode) == MODE_INT)
3142 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3143 0, op0_mode);
3144 if (temp)
3145 return temp;
3148 /* If we want a subreg of a constant, at offset 0,
3149 take the low bits. On a little-endian machine, that's
3150 always valid. On a big-endian machine, it's valid
3151 only if the constant's mode fits in one word. */
3152 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3153 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3154 #if WORDS_BIG_ENDIAN
3155 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
3156 #endif
3158 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3160 /* If we are narrowing the object, we need to see if we can simplify
3161 the expression for the object knowing that we only need the
3162 low-order bits. */
3164 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
3165 && subreg_lowpart_p (x))
3166 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_MASK (mode),
3167 NULL_RTX);
3168 break;
3170 case NOT:
3171 /* (not (plus X -1)) can become (neg X). */
3172 if (GET_CODE (XEXP (x, 0)) == PLUS
3173 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3175 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3176 goto restart;
3179 /* Similarly, (not (neg X)) is (plus X -1). */
3180 if (GET_CODE (XEXP (x, 0)) == NEG)
3182 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3183 goto restart;
3186 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3187 if (GET_CODE (XEXP (x, 0)) == XOR
3188 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3189 && (temp = simplify_unary_operation (NOT, mode,
3190 XEXP (XEXP (x, 0), 1),
3191 mode)) != 0)
3193 SUBST (XEXP (XEXP (x, 0), 1), temp);
3194 return XEXP (x, 0);
3197 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3198 other than 1, but that is not valid. We could do a similar
3199 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3200 but this doesn't seem common enough to bother with. */
3201 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3202 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3204 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3205 XEXP (XEXP (x, 0), 1));
3206 goto restart;
3209 if (GET_CODE (XEXP (x, 0)) == SUBREG
3210 && subreg_lowpart_p (XEXP (x, 0))
3211 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3212 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3213 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3214 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3216 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3218 x = gen_rtx (ROTATE, inner_mode,
3219 gen_unary (NOT, inner_mode, const1_rtx),
3220 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3221 x = gen_lowpart_for_combine (mode, x);
3222 goto restart;
3225 #if STORE_FLAG_VALUE == -1
3226 /* (not (comparison foo bar)) can be done by reversing the comparison
3227 code if valid. */
3228 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3229 && reversible_comparison_p (XEXP (x, 0)))
3230 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3231 mode, XEXP (XEXP (x, 0), 0),
3232 XEXP (XEXP (x, 0), 1));
3234 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3235 is (lt foo (const_int 0)), so we can perform the above
3236 simplification. */
3238 if (XEXP (x, 1) == const1_rtx
3239 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3240 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3241 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3242 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3243 #endif
3245 /* Apply De Morgan's laws to reduce number of patterns for machines
3246 with negating logical insns (and-not, nand, etc.). If result has
3247 only one NOT, put it first, since that is how the patterns are
3248 coded. */
3250 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3252 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3254 if (GET_CODE (in1) == NOT)
3255 in1 = XEXP (in1, 0);
3256 else
3257 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3259 if (GET_CODE (in2) == NOT)
3260 in2 = XEXP (in2, 0);
3261 else if (GET_CODE (in2) == CONST_INT
3262 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3263 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3264 else
3265 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3267 if (GET_CODE (in2) == NOT)
3269 rtx tem = in2;
3270 in2 = in1; in1 = tem;
3273 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3274 mode, in1, in2);
3275 goto restart;
3277 break;
3279 case NEG:
3280 /* (neg (plus X 1)) can become (not X). */
3281 if (GET_CODE (XEXP (x, 0)) == PLUS
3282 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3284 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3285 goto restart;
3288 /* Similarly, (neg (not X)) is (plus X 1). */
3289 if (GET_CODE (XEXP (x, 0)) == NOT)
3291 x = plus_constant (XEXP (XEXP (x, 0), 0), 1);
3292 goto restart;
3295 /* (neg (minus X Y)) can become (minus Y X). */
3296 if (GET_CODE (XEXP (x, 0)) == MINUS
3297 && (! FLOAT_MODE_P (mode)
3298 /* x-y != -(y-x) with IEEE floating point. */
3299 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3301 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3302 XEXP (XEXP (x, 0), 0));
3303 goto restart;
3306 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3307 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3308 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3310 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3311 goto restart;
3314 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3315 if we can then eliminate the NEG (e.g.,
3316 if the operand is a constant). */
3318 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3320 temp = simplify_unary_operation (NEG, mode,
3321 XEXP (XEXP (x, 0), 0), mode);
3322 if (temp)
3324 SUBST (XEXP (XEXP (x, 0), 0), temp);
3325 return XEXP (x, 0);
3329 temp = expand_compound_operation (XEXP (x, 0));
3331 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3332 replaced by (lshiftrt X C). This will convert
3333 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3335 if (GET_CODE (temp) == ASHIFTRT
3336 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3337 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3339 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3340 INTVAL (XEXP (temp, 1)));
3341 goto restart;
3344 /* If X has only a single bit that might be nonzero, say, bit I, convert
3345 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3346 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3347 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3348 or a SUBREG of one since we'd be making the expression more
3349 complex if it was just a register. */
3351 if (GET_CODE (temp) != REG
3352 && ! (GET_CODE (temp) == SUBREG
3353 && GET_CODE (SUBREG_REG (temp)) == REG)
3354 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3356 rtx temp1 = simplify_shift_const
3357 (NULL_RTX, ASHIFTRT, mode,
3358 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3359 GET_MODE_BITSIZE (mode) - 1 - i),
3360 GET_MODE_BITSIZE (mode) - 1 - i);
3362 /* If all we did was surround TEMP with the two shifts, we
3363 haven't improved anything, so don't use it. Otherwise,
3364 we are better off with TEMP1. */
3365 if (GET_CODE (temp1) != ASHIFTRT
3366 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3367 || XEXP (XEXP (temp1, 0), 0) != temp)
3369 x = temp1;
3370 goto restart;
3373 break;
3375 case FLOAT_TRUNCATE:
3376 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3377 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3378 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3379 return XEXP (XEXP (x, 0), 0);
3380 break;
3382 #ifdef HAVE_cc0
3383 case COMPARE:
3384 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3385 using cc0, in which case we want to leave it as a COMPARE
3386 so we can distinguish it from a register-register-copy. */
3387 if (XEXP (x, 1) == const0_rtx)
3388 return XEXP (x, 0);
3390 /* In IEEE floating point, x-0 is not the same as x. */
3391 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3392 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))))
3393 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3394 return XEXP (x, 0);
3395 break;
3396 #endif
3398 case CONST:
3399 /* (const (const X)) can become (const X). Do it this way rather than
3400 returning the inner CONST since CONST can be shared with a
3401 REG_EQUAL note. */
3402 if (GET_CODE (XEXP (x, 0)) == CONST)
3403 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3404 break;
3406 #ifdef HAVE_lo_sum
3407 case LO_SUM:
3408 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3409 can add in an offset. find_split_point will split this address up
3410 again if it doesn't match. */
3411 if (GET_CODE (XEXP (x, 0)) == HIGH
3412 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3413 return XEXP (x, 1);
3414 break;
3415 #endif
3417 case PLUS:
3418 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3419 outermost. That's because that's the way indexed addresses are
3420 supposed to appear. This code used to check many more cases, but
3421 they are now checked elsewhere. */
3422 if (GET_CODE (XEXP (x, 0)) == PLUS
3423 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3424 return gen_binary (PLUS, mode,
3425 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3426 XEXP (x, 1)),
3427 XEXP (XEXP (x, 0), 1));
3429 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3430 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3431 bit-field and can be replaced by either a sign_extend or a
3432 sign_extract. The `and' may be a zero_extend. */
3433 if (GET_CODE (XEXP (x, 0)) == XOR
3434 && GET_CODE (XEXP (x, 1)) == CONST_INT
3435 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3436 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3437 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3438 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3439 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3440 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3441 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3442 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3443 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3444 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3445 == i + 1))))
3447 x = simplify_shift_const
3448 (NULL_RTX, ASHIFTRT, mode,
3449 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3450 XEXP (XEXP (XEXP (x, 0), 0), 0),
3451 GET_MODE_BITSIZE (mode) - (i + 1)),
3452 GET_MODE_BITSIZE (mode) - (i + 1));
3453 goto restart;
3456 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3457 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3458 is 1. This produces better code than the alternative immediately
3459 below. */
3460 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3461 && reversible_comparison_p (XEXP (x, 0))
3462 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3463 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3465 x = gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3466 mode, XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1));
3467 x = gen_unary (NEG, mode, x);
3468 goto restart;
3471 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3472 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3473 the bitsize of the mode - 1. This allows simplification of
3474 "a = (b & 8) == 0;" */
3475 if (XEXP (x, 1) == constm1_rtx
3476 && GET_CODE (XEXP (x, 0)) != REG
3477 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3478 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3479 && nonzero_bits (XEXP (x, 0), mode) == 1)
3481 x = simplify_shift_const
3482 (NULL_RTX, ASHIFTRT, mode,
3483 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3484 gen_rtx_combine (XOR, mode,
3485 XEXP (x, 0), const1_rtx),
3486 GET_MODE_BITSIZE (mode) - 1),
3487 GET_MODE_BITSIZE (mode) - 1);
3488 goto restart;
3491 /* If we are adding two things that have no bits in common, convert
3492 the addition into an IOR. This will often be further simplified,
3493 for example in cases like ((a & 1) + (a & 2)), which can
3494 become a & 3. */
3496 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3497 && (nonzero_bits (XEXP (x, 0), mode)
3498 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3500 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3501 goto restart;
3503 break;
3505 case MINUS:
3506 #if STORE_FLAG_VALUE == 1
3507 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3508 code if valid. */
3509 if (XEXP (x, 0) == const1_rtx
3510 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3511 && reversible_comparison_p (XEXP (x, 1)))
3512 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3513 mode, XEXP (XEXP (x, 1), 0),
3514 XEXP (XEXP (x, 1), 1));
3515 #endif
3517 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3518 (and <foo> (const_int pow2-1)) */
3519 if (GET_CODE (XEXP (x, 1)) == AND
3520 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3521 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3522 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3524 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3525 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3526 goto restart;
3528 break;
3530 case MULT:
3531 /* If we have (mult (plus A B) C), apply the distributive law and then
3532 the inverse distributive law to see if things simplify. This
3533 occurs mostly in addresses, often when unrolling loops. */
3535 if (GET_CODE (XEXP (x, 0)) == PLUS)
3537 x = apply_distributive_law
3538 (gen_binary (PLUS, mode,
3539 gen_binary (MULT, mode,
3540 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3541 gen_binary (MULT, mode,
3542 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3544 if (GET_CODE (x) != MULT)
3545 goto restart;
3548 /* If this is multiplication by a power of two and its first operand is
3549 a shift, treat the multiply as a shift to allow the shifts to
3550 possibly combine. */
3551 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3552 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3553 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3554 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3555 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3556 || GET_CODE (XEXP (x, 0)) == ROTATE
3557 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3559 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3560 goto restart;
3563 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3564 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3565 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3566 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3567 XEXP (XEXP (x, 0), 1));
3568 break;
3570 case UDIV:
3571 /* If this is a divide by a power of two, treat it as a shift if
3572 its first operand is a shift. */
3573 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3574 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3575 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3576 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3577 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3578 || GET_CODE (XEXP (x, 0)) == ROTATE
3579 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3581 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3582 goto restart;
3584 break;
3586 case EQ: case NE:
3587 case GT: case GTU: case GE: case GEU:
3588 case LT: case LTU: case LE: case LEU:
3589 /* If the first operand is a condition code, we can't do anything
3590 with it. */
3591 if (GET_CODE (XEXP (x, 0)) == COMPARE
3592 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3593 #ifdef HAVE_cc0
3594 && XEXP (x, 0) != cc0_rtx
3595 #endif
3598 rtx op0 = XEXP (x, 0);
3599 rtx op1 = XEXP (x, 1);
3600 enum rtx_code new_code;
3602 if (GET_CODE (op0) == COMPARE)
3603 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3605 /* Simplify our comparison, if possible. */
3606 new_code = simplify_comparison (code, &op0, &op1);
3608 #if STORE_FLAG_VALUE == 1
3609 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3610 if only the low-order bit is possibly nonzero in X (such as when
3611 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3612 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3613 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3614 (plus X 1).
3616 Remove any ZERO_EXTRACT we made when thinking this was a
3617 comparison. It may now be simpler to use, e.g., an AND. If a
3618 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3619 the call to make_compound_operation in the SET case. */
3621 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3622 && op1 == const0_rtx
3623 && nonzero_bits (op0, mode) == 1)
3624 return gen_lowpart_for_combine (mode,
3625 expand_compound_operation (op0));
3627 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3628 && op1 == const0_rtx
3629 && (num_sign_bit_copies (op0, mode)
3630 == GET_MODE_BITSIZE (mode)))
3632 op0 = expand_compound_operation (op0);
3633 x = gen_unary (NEG, mode, gen_lowpart_for_combine (mode, op0));
3634 goto restart;
3637 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3638 && op1 == const0_rtx
3639 && nonzero_bits (op0, mode) == 1)
3641 op0 = expand_compound_operation (op0);
3642 x = gen_binary (XOR, mode,
3643 gen_lowpart_for_combine (mode, op0),
3644 const1_rtx);
3645 goto restart;
3648 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3649 && op1 == const0_rtx
3650 && (num_sign_bit_copies (op0, mode)
3651 == GET_MODE_BITSIZE (mode)))
3653 op0 = expand_compound_operation (op0);
3654 x = plus_constant (gen_lowpart_for_combine (mode, op0), 1);
3655 goto restart;
3657 #endif
3659 #if STORE_FLAG_VALUE == -1
3660 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3661 those above. */
3662 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3663 && op1 == const0_rtx
3664 && (num_sign_bit_copies (op0, mode)
3665 == GET_MODE_BITSIZE (mode)))
3666 return gen_lowpart_for_combine (mode,
3667 expand_compound_operation (op0));
3669 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3670 && op1 == const0_rtx
3671 && nonzero_bits (op0, mode) == 1)
3673 op0 = expand_compound_operation (op0);
3674 x = gen_unary (NEG, mode, gen_lowpart_for_combine (mode, op0));
3675 goto restart;
3678 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3679 && op1 == const0_rtx
3680 && (num_sign_bit_copies (op0, mode)
3681 == GET_MODE_BITSIZE (mode)))
3683 op0 = expand_compound_operation (op0);
3684 x = gen_unary (NOT, mode, gen_lowpart_for_combine (mode, op0));
3685 goto restart;
3688 /* If X is 0/1, (eq X 0) is X-1. */
3689 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3690 && op1 == const0_rtx
3691 && nonzero_bits (op0, mode) == 1)
3693 op0 = expand_compound_operation (op0);
3694 x = plus_constant (gen_lowpart_for_combine (mode, op0), -1);
3695 goto restart;
3697 #endif
3699 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3700 one bit that might be nonzero, we can convert (ne x 0) to
3701 (ashift x c) where C puts the bit in the sign bit. Remove any
3702 AND with STORE_FLAG_VALUE when we are done, since we are only
3703 going to test the sign bit. */
3704 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3705 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3706 && (STORE_FLAG_VALUE
3707 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3708 && op1 == const0_rtx
3709 && mode == GET_MODE (op0)
3710 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
3712 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3713 expand_compound_operation (op0),
3714 GET_MODE_BITSIZE (mode) - 1 - i);
3715 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3716 return XEXP (x, 0);
3717 else
3718 return x;
3721 /* If the code changed, return a whole new comparison. */
3722 if (new_code != code)
3723 return gen_rtx_combine (new_code, mode, op0, op1);
3725 /* Otherwise, keep this operation, but maybe change its operands.
3726 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3727 SUBST (XEXP (x, 0), op0);
3728 SUBST (XEXP (x, 1), op1);
3730 break;
3732 case IF_THEN_ELSE:
3733 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3734 used in it is being compared against certain values. Get the
3735 true and false comparisons and see if that says anything about the
3736 value of each arm. */
3738 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3739 && reversible_comparison_p (XEXP (x, 0))
3740 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3742 HOST_WIDE_INT nzb;
3743 rtx from = XEXP (XEXP (x, 0), 0);
3744 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3745 enum rtx_code false_code = reverse_condition (true_code);
3746 rtx true_val = XEXP (XEXP (x, 0), 1);
3747 rtx false_val = true_val;
3748 rtx true_arm = XEXP (x, 1);
3749 rtx false_arm = XEXP (x, 2);
3750 int swapped = 0;
3752 /* If FALSE_CODE is EQ, swap the codes and arms. */
3754 if (false_code == EQ)
3756 swapped = 1, true_code = EQ, false_code = NE;
3757 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3760 /* If we are comparing against zero and the expression being tested
3761 has only a single bit that might be nonzero, that is its value
3762 when it is not equal to zero. Similarly if it is known to be
3763 -1 or 0. */
3765 if (true_code == EQ && true_val == const0_rtx
3766 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3767 false_code = EQ, false_val = GEN_INT (nzb);
3768 else if (true_code == EQ && true_val == const0_rtx
3769 && (num_sign_bit_copies (from, GET_MODE (from))
3770 == GET_MODE_BITSIZE (GET_MODE (from))))
3771 false_code = EQ, false_val = constm1_rtx;
3773 /* Now simplify an arm if we know the value of the register
3774 in the branch and it is used in the arm. Be carefull due to
3775 the potential of locally-shared RTL. */
3777 if (reg_mentioned_p (from, true_arm))
3778 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3779 from, true_val),
3780 pc_rtx, pc_rtx, 0, 0);
3781 if (reg_mentioned_p (from, false_arm))
3782 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3783 from, false_val),
3784 pc_rtx, pc_rtx, 0, 0);
3786 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3787 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3790 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3791 reversed, do so to avoid needing two sets of patterns for
3792 subtract-and-branch insns. Similarly if we have a constant in that
3793 position or if the third operand is the same as the first operand
3794 of the comparison. */
3796 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3797 && reversible_comparison_p (XEXP (x, 0))
3798 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3799 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3801 SUBST (XEXP (x, 0),
3802 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3803 GET_MODE (XEXP (x, 0)),
3804 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3806 temp = XEXP (x, 1);
3807 SUBST (XEXP (x, 1), XEXP (x, 2));
3808 SUBST (XEXP (x, 2), temp);
3811 /* If the two arms are identical, we don't need the comparison. */
3813 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3814 && ! side_effects_p (XEXP (x, 0)))
3815 return XEXP (x, 1);
3817 /* Look for cases where we have (abs x) or (neg (abs X)). */
3819 if (GET_MODE_CLASS (mode) == MODE_INT
3820 && GET_CODE (XEXP (x, 2)) == NEG
3821 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3822 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3823 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3824 && ! side_effects_p (XEXP (x, 1)))
3825 switch (GET_CODE (XEXP (x, 0)))
3827 case GT:
3828 case GE:
3829 x = gen_unary (ABS, mode, XEXP (x, 1));
3830 goto restart;
3831 case LT:
3832 case LE:
3833 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3834 goto restart;
3837 /* Look for MIN or MAX. */
3839 if (! FLOAT_MODE_P (mode)
3840 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3841 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3842 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3843 && ! side_effects_p (XEXP (x, 0)))
3844 switch (GET_CODE (XEXP (x, 0)))
3846 case GE:
3847 case GT:
3848 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3849 goto restart;
3850 case LE:
3851 case LT:
3852 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3853 goto restart;
3854 case GEU:
3855 case GTU:
3856 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3857 goto restart;
3858 case LEU:
3859 case LTU:
3860 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3861 goto restart;
3864 #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
3866 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when
3867 its second operand is zero, this can be done as (OP Z (mult COND C2))
3868 where C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer
3869 ZERO_EXTEND or SIGN_EXTEND as long as Z is already extended (so
3870 we don't destroy it). We can do this kind of thing in some
3871 cases when STORE_FLAG_VALUE is neither of the above, but it isn't
3872 worth checking for. */
3874 if (mode != VOIDmode && ! side_effects_p (x))
3876 rtx t = make_compound_operation (XEXP (x, 1), SET);
3877 rtx f = make_compound_operation (XEXP (x, 2), SET);
3878 rtx cond_op0 = XEXP (XEXP (x, 0), 0);
3879 rtx cond_op1 = XEXP (XEXP (x, 0), 1);
3880 enum rtx_code cond_op = GET_CODE (XEXP (x, 0));
3881 enum rtx_code op, extend_op = NIL;
3882 enum machine_mode m = mode;
3883 rtx z = 0, c1, c2;
3885 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
3886 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
3887 || GET_CODE (t) == ASHIFT
3888 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
3889 && rtx_equal_p (XEXP (t, 0), f))
3890 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
3891 else if (GET_CODE (t) == SIGN_EXTEND
3892 && (GET_CODE (XEXP (t, 0)) == PLUS
3893 || GET_CODE (XEXP (t, 0)) == MINUS
3894 || GET_CODE (XEXP (t, 0)) == IOR
3895 || GET_CODE (XEXP (t, 0)) == XOR
3896 || GET_CODE (XEXP (t, 0)) == ASHIFT
3897 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
3898 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
3899 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
3900 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
3901 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
3902 && (num_sign_bit_copies (f, GET_MODE (f))
3903 > (GET_MODE_BITSIZE (mode)
3904 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
3906 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
3907 extend_op = SIGN_EXTEND;
3908 m = GET_MODE (XEXP (t, 0));
3910 else if (GET_CODE (t) == ZERO_EXTEND
3911 && (GET_CODE (XEXP (t, 0)) == PLUS
3912 || GET_CODE (XEXP (t, 0)) == MINUS
3913 || GET_CODE (XEXP (t, 0)) == IOR
3914 || GET_CODE (XEXP (t, 0)) == XOR
3915 || GET_CODE (XEXP (t, 0)) == ASHIFT
3916 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
3917 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
3918 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
3919 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3920 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
3921 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
3922 && ((nonzero_bits (f, GET_MODE (f))
3923 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
3924 == 0))
3926 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
3927 extend_op = ZERO_EXTEND;
3928 m = GET_MODE (XEXP (t, 0));
3931 if (reversible_comparison_p (XEXP (x, 0))
3932 && (GET_CODE (f) == PLUS || GET_CODE (f) == MINUS
3933 || GET_CODE (f) == IOR || GET_CODE (f) == XOR
3934 || GET_CODE (f) == ASHIFT
3935 || GET_CODE (f) == LSHIFTRT || GET_CODE (f) == ASHIFTRT)
3936 && rtx_equal_p (XEXP (f, 0), t))
3938 c1 = XEXP (f, 1), op = GET_CODE (f), z = t;
3939 cond_op = reverse_condition (cond_op);
3941 else if (GET_CODE (f) == SIGN_EXTEND
3942 && (GET_CODE (XEXP (f, 0)) == PLUS
3943 || GET_CODE (XEXP (f, 0)) == MINUS
3944 || GET_CODE (XEXP (f, 0)) == IOR
3945 || GET_CODE (XEXP (f, 0)) == XOR
3946 || GET_CODE (XEXP (f, 0)) == ASHIFT
3947 || GET_CODE (XEXP (f, 0)) == LSHIFTRT
3948 || GET_CODE (XEXP (f, 0)) == ASHIFTRT)
3949 && GET_CODE (XEXP (XEXP (f, 0), 0)) == SUBREG
3950 && subreg_lowpart_p (XEXP (XEXP (f, 0), 0))
3951 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (f, 0), 0)), f)
3952 && (num_sign_bit_copies (t, GET_MODE (t))
3953 > (GET_MODE_BITSIZE (mode)
3954 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (f, 0), 0))))))
3956 c1 = XEXP (XEXP (f, 0), 1); z = t; op = GET_CODE (XEXP (f, 0));
3957 extend_op = SIGN_EXTEND;
3958 m = GET_MODE (XEXP (f, 0));
3959 cond_op = reverse_condition (cond_op);
3961 else if (GET_CODE (f) == ZERO_EXTEND
3962 && (GET_CODE (XEXP (f, 0)) == PLUS
3963 || GET_CODE (XEXP (f, 0)) == MINUS
3964 || GET_CODE (XEXP (f, 0)) == IOR
3965 || GET_CODE (XEXP (f, 0)) == XOR
3966 || GET_CODE (XEXP (f, 0)) == ASHIFT
3967 || GET_CODE (XEXP (f, 0)) == LSHIFTRT
3968 || GET_CODE (XEXP (f, 0)) == ASHIFTRT)
3969 && GET_CODE (XEXP (XEXP (f, 0), 0)) == SUBREG
3970 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3971 && subreg_lowpart_p (XEXP (XEXP (f, 0), 0))
3972 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (f, 0), 0)), t)
3973 && ((nonzero_bits (t, GET_MODE (t))
3974 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (f, 0), 0))))
3975 == 0))
3977 c1 = XEXP (XEXP (f, 0), 1); z = t; op = GET_CODE (XEXP (f, 0));
3978 extend_op = ZERO_EXTEND;
3979 m = GET_MODE (XEXP (f, 0));
3980 cond_op = reverse_condition (cond_op);
3983 if (z)
3985 temp = subst (gen_binary (cond_op, m, cond_op0, cond_op1),
3986 pc_rtx, pc_rtx, 0, 0);
3989 temp = gen_binary (MULT, m, temp,
3990 gen_binary (MULT, m, c1,
3991 GEN_INT (STORE_FLAG_VALUE)));
3993 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3995 if (extend_op != NIL)
3996 temp = gen_unary (extend_op, mode, temp);
3998 return temp;
4001 #endif
4003 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to
4004 be 0 or 1 and C1 is a single bit or A is known to be 0 or -1 and
4005 C1 is the negation of a single bit, we can convert this operation
4006 to a shift. We can actually do this in more general cases, but it
4007 doesn't seem worth it. */
4009 if (GET_CODE (XEXP (x, 0)) == NE && XEXP (XEXP (x, 0), 1) == const0_rtx
4010 && XEXP (x, 2) == const0_rtx && GET_CODE (XEXP (x, 1)) == CONST_INT
4011 && ((1 == nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
4012 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4013 || ((num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
4014 == GET_MODE_BITSIZE (mode))
4015 && (i = exact_log2 (- INTVAL (XEXP (x, 1)))) >= 0)))
4016 return
4017 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4018 gen_lowpart_for_combine (mode,
4019 XEXP (XEXP (x, 0), 0)),
4021 break;
4023 case ZERO_EXTRACT:
4024 case SIGN_EXTRACT:
4025 case ZERO_EXTEND:
4026 case SIGN_EXTEND:
4027 /* If we are processing SET_DEST, we are done. */
4028 if (in_dest)
4029 return x;
4031 x = expand_compound_operation (x);
4032 if (GET_CODE (x) != code)
4033 goto restart;
4034 break;
4036 case SET:
4037 /* (set (pc) (return)) gets written as (return). */
4038 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
4039 return SET_SRC (x);
4041 /* Convert this into a field assignment operation, if possible. */
4042 x = make_field_assignment (x);
4044 /* If we are setting CC0 or if the source is a COMPARE, look for the
4045 use of the comparison result and try to simplify it unless we already
4046 have used undobuf.other_insn. */
4047 if ((GET_CODE (SET_SRC (x)) == COMPARE
4048 #ifdef HAVE_cc0
4049 || SET_DEST (x) == cc0_rtx
4050 #endif
4052 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
4053 &other_insn)) != 0
4054 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4055 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4056 && XEXP (*cc_use, 0) == SET_DEST (x))
4058 enum rtx_code old_code = GET_CODE (*cc_use);
4059 enum rtx_code new_code;
4060 rtx op0, op1;
4061 int other_changed = 0;
4062 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
4064 if (GET_CODE (SET_SRC (x)) == COMPARE)
4065 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
4066 else
4067 op0 = SET_SRC (x), op1 = const0_rtx;
4069 /* Simplify our comparison, if possible. */
4070 new_code = simplify_comparison (old_code, &op0, &op1);
4072 #ifdef EXTRA_CC_MODES
4073 /* If this machine has CC modes other than CCmode, check to see
4074 if we need to use a different CC mode here. */
4075 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4076 #endif /* EXTRA_CC_MODES */
4078 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4079 /* If the mode changed, we have to change SET_DEST, the mode
4080 in the compare, and the mode in the place SET_DEST is used.
4081 If SET_DEST is a hard register, just build new versions with
4082 the proper mode. If it is a pseudo, we lose unless it is only
4083 time we set the pseudo, in which case we can safely change
4084 its mode. */
4085 if (compare_mode != GET_MODE (SET_DEST (x)))
4087 int regno = REGNO (SET_DEST (x));
4088 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4090 if (regno < FIRST_PSEUDO_REGISTER
4091 || (reg_n_sets[regno] == 1
4092 && ! REG_USERVAR_P (SET_DEST (x))))
4094 if (regno >= FIRST_PSEUDO_REGISTER)
4095 SUBST (regno_reg_rtx[regno], new_dest);
4097 SUBST (SET_DEST (x), new_dest);
4098 SUBST (XEXP (*cc_use, 0), new_dest);
4099 other_changed = 1;
4102 #endif
4104 /* If the code changed, we have to build a new comparison
4105 in undobuf.other_insn. */
4106 if (new_code != old_code)
4108 unsigned HOST_WIDE_INT mask;
4110 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4111 SET_DEST (x), const0_rtx));
4113 /* If the only change we made was to change an EQ into an
4114 NE or vice versa, OP0 has only one bit that might be nonzero,
4115 and OP1 is zero, check if changing the user of the condition
4116 code will produce a valid insn. If it won't, we can keep
4117 the original code in that insn by surrounding our operation
4118 with an XOR. */
4120 if (((old_code == NE && new_code == EQ)
4121 || (old_code == EQ && new_code == NE))
4122 && ! other_changed && op1 == const0_rtx
4123 && (GET_MODE_BITSIZE (GET_MODE (op0))
4124 <= HOST_BITS_PER_WIDE_INT)
4125 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
4126 >= 0))
4128 rtx pat = PATTERN (other_insn), note = 0;
4130 if ((recog_for_combine (&pat, other_insn, &note) < 0
4131 && ! check_asm_operands (pat)))
4133 PUT_CODE (*cc_use, old_code);
4134 other_insn = 0;
4136 op0 = gen_binary (XOR, GET_MODE (op0), op0,
4137 GEN_INT (mask));
4141 other_changed = 1;
4144 if (other_changed)
4145 undobuf.other_insn = other_insn;
4147 #ifdef HAVE_cc0
4148 /* If we are now comparing against zero, change our source if
4149 needed. If we do not use cc0, we always have a COMPARE. */
4150 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
4151 SUBST (SET_SRC (x), op0);
4152 else
4153 #endif
4155 /* Otherwise, if we didn't previously have a COMPARE in the
4156 correct mode, we need one. */
4157 if (GET_CODE (SET_SRC (x)) != COMPARE
4158 || GET_MODE (SET_SRC (x)) != compare_mode)
4159 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
4160 op0, op1));
4161 else
4163 /* Otherwise, update the COMPARE if needed. */
4164 SUBST (XEXP (SET_SRC (x), 0), op0);
4165 SUBST (XEXP (SET_SRC (x), 1), op1);
4168 else
4170 /* Get SET_SRC in a form where we have placed back any
4171 compound expressions. Then do the checks below. */
4172 temp = make_compound_operation (SET_SRC (x), SET);
4173 SUBST (SET_SRC (x), temp);
4176 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
4177 operation, and X being a REG or (subreg (reg)), we may be able to
4178 convert this to (set (subreg:m2 x) (op)).
4180 We can always do this if M1 is narrower than M2 because that
4181 means that we only care about the low bits of the result.
4183 However, on machines without WORD_REGISTER_OPERATIONS defined,
4184 we cannot perform a narrower operation that requested since the
4185 high-order bits will be undefined. On machine where it is defined,
4186 this transformation is safe as long as M1 and M2 have the same
4187 number of words. */
4189 if (GET_CODE (SET_SRC (x)) == SUBREG
4190 && subreg_lowpart_p (SET_SRC (x))
4191 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
4192 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
4193 / UNITS_PER_WORD)
4194 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
4195 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4196 #ifndef WORD_REGISTER_OPERATIONS
4197 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4198 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4199 #endif
4200 && (GET_CODE (SET_DEST (x)) == REG
4201 || (GET_CODE (SET_DEST (x)) == SUBREG
4202 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
4204 SUBST (SET_DEST (x),
4205 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
4206 SET_DEST (x)));
4207 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
4210 #ifdef LOAD_EXTEND_OP
4211 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
4212 M wider than N, this would require a paradoxical subreg.
4213 Replace the subreg with a zero_extend to avoid the reload that
4214 would otherwise be required. */
4216 if (GET_CODE (SET_SRC (x)) == SUBREG
4217 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (SET_SRC (x)))) != NIL
4218 && subreg_lowpart_p (SET_SRC (x))
4219 && SUBREG_WORD (SET_SRC (x)) == 0
4220 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4221 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4222 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
4223 SUBST (SET_SRC (x),
4224 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE
4225 (SUBREG_REG (SET_SRC (x)))),
4226 GET_MODE (SET_SRC (x)),
4227 XEXP (SET_SRC (x), 0)));
4228 #endif
4230 #ifndef HAVE_conditional_move
4232 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
4233 and we are comparing an item known to be 0 or -1 against 0, use a
4234 logical operation instead. Check for one of the arms being an IOR
4235 of the other arm with some value. We compute three terms to be
4236 IOR'ed together. In practice, at most two will be nonzero. Then
4237 we do the IOR's. */
4239 if (GET_CODE (SET_DEST (x)) != PC
4240 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
4241 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
4242 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
4243 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
4244 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
4245 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
4246 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
4247 && ! side_effects_p (SET_SRC (x)))
4249 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4250 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
4251 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4252 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
4253 rtx term1 = const0_rtx, term2, term3;
4255 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4256 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4257 else if (GET_CODE (true) == IOR
4258 && rtx_equal_p (XEXP (true, 1), false))
4259 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4260 else if (GET_CODE (false) == IOR
4261 && rtx_equal_p (XEXP (false, 0), true))
4262 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4263 else if (GET_CODE (false) == IOR
4264 && rtx_equal_p (XEXP (false, 1), true))
4265 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4267 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4268 XEXP (XEXP (SET_SRC (x), 0), 0), true);
4269 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4270 gen_unary (NOT, GET_MODE (SET_SRC (x)),
4271 XEXP (XEXP (SET_SRC (x), 0), 0)),
4272 false);
4274 SUBST (SET_SRC (x),
4275 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4276 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4277 term1, term2),
4278 term3));
4280 #endif
4281 break;
4283 case AND:
4284 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4286 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4287 INTVAL (XEXP (x, 1)));
4289 /* If we have (ior (and (X C1) C2)) and the next restart would be
4290 the last, simplify this by making C1 as small as possible
4291 and then exit. */
4292 if (n_restarts >= 3 && GET_CODE (x) == IOR
4293 && GET_CODE (XEXP (x, 0)) == AND
4294 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4295 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4297 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
4298 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4299 & ~ INTVAL (XEXP (x, 1))));
4300 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4303 if (GET_CODE (x) != AND)
4304 goto restart;
4307 /* Convert (A | B) & A to A. */
4308 if (GET_CODE (XEXP (x, 0)) == IOR
4309 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4310 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4311 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4312 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4313 return XEXP (x, 1);
4315 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4316 insn (and may simplify more). */
4317 else if (GET_CODE (XEXP (x, 0)) == XOR
4318 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4319 && ! side_effects_p (XEXP (x, 1)))
4321 x = gen_binary (AND, mode,
4322 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4323 XEXP (x, 1));
4324 goto restart;
4326 else if (GET_CODE (XEXP (x, 0)) == XOR
4327 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4328 && ! side_effects_p (XEXP (x, 1)))
4330 x = gen_binary (AND, mode,
4331 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4332 XEXP (x, 1));
4333 goto restart;
4336 /* Similarly for (~ (A ^ B)) & A. */
4337 else if (GET_CODE (XEXP (x, 0)) == NOT
4338 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4339 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4340 && ! side_effects_p (XEXP (x, 1)))
4342 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4343 XEXP (x, 1));
4344 goto restart;
4346 else if (GET_CODE (XEXP (x, 0)) == NOT
4347 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4348 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4349 && ! side_effects_p (XEXP (x, 1)))
4351 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4352 XEXP (x, 1));
4353 goto restart;
4356 /* If we have (and A B) with A not an object but that is known to
4357 be -1 or 0, this is equivalent to the expression
4358 (if_then_else (ne A (const_int 0)) B (const_int 0))
4359 We make this conversion because it may allow further
4360 simplifications and then allow use of conditional move insns.
4361 If the machine doesn't have condition moves, code in case SET
4362 will convert the IF_THEN_ELSE back to the logical operation.
4363 We build the IF_THEN_ELSE here in case further simplification
4364 is possible (e.g., we can convert it to ABS). */
4366 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4367 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4368 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4369 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4370 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4372 rtx op0 = XEXP (x, 0);
4373 rtx op1 = const0_rtx;
4374 enum rtx_code comp_code
4375 = simplify_comparison (NE, &op0, &op1);
4377 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4378 gen_binary (comp_code, VOIDmode, op0, op1),
4379 XEXP (x, 1), const0_rtx);
4380 goto restart;
4383 /* In the following group of tests (and those in case IOR below),
4384 we start with some combination of logical operations and apply
4385 the distributive law followed by the inverse distributive law.
4386 Most of the time, this results in no change. However, if some of
4387 the operands are the same or inverses of each other, simplifications
4388 will result.
4390 For example, (and (ior A B) (not B)) can occur as the result of
4391 expanding a bit field assignment. When we apply the distributive
4392 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4393 which then simplifies to (and (A (not B))). */
4395 /* If we have (and (ior A B) C), apply the distributive law and then
4396 the inverse distributive law to see if things simplify. */
4398 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4400 x = apply_distributive_law
4401 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4402 gen_binary (AND, mode,
4403 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4404 gen_binary (AND, mode,
4405 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4406 if (GET_CODE (x) != AND)
4407 goto restart;
4410 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4412 x = apply_distributive_law
4413 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4414 gen_binary (AND, mode,
4415 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4416 gen_binary (AND, mode,
4417 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4418 if (GET_CODE (x) != AND)
4419 goto restart;
4422 /* Similarly, taking advantage of the fact that
4423 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4425 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4427 x = apply_distributive_law
4428 (gen_binary (XOR, mode,
4429 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4430 XEXP (XEXP (x, 1), 0)),
4431 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4432 XEXP (XEXP (x, 1), 1))));
4433 if (GET_CODE (x) != AND)
4434 goto restart;
4437 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4439 x = apply_distributive_law
4440 (gen_binary (XOR, mode,
4441 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4442 XEXP (XEXP (x, 0), 0)),
4443 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4444 XEXP (XEXP (x, 0), 1))));
4445 if (GET_CODE (x) != AND)
4446 goto restart;
4448 break;
4450 case IOR:
4451 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4452 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4453 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4454 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
4455 return XEXP (x, 1);
4457 /* Convert (A & B) | A to A. */
4458 if (GET_CODE (XEXP (x, 0)) == AND
4459 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4460 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4461 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4462 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4463 return XEXP (x, 1);
4465 /* If we have (ior (and A B) C), apply the distributive law and then
4466 the inverse distributive law to see if things simplify. */
4468 if (GET_CODE (XEXP (x, 0)) == AND)
4470 x = apply_distributive_law
4471 (gen_binary (AND, mode,
4472 gen_binary (IOR, mode,
4473 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4474 gen_binary (IOR, mode,
4475 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4477 if (GET_CODE (x) != IOR)
4478 goto restart;
4481 if (GET_CODE (XEXP (x, 1)) == AND)
4483 x = apply_distributive_law
4484 (gen_binary (AND, mode,
4485 gen_binary (IOR, mode,
4486 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4487 gen_binary (IOR, mode,
4488 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4490 if (GET_CODE (x) != IOR)
4491 goto restart;
4494 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4495 mode size to (rotate A CX). */
4497 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4498 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4499 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4500 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4501 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4502 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4503 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4504 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4505 == GET_MODE_BITSIZE (mode)))
4507 rtx shift_count;
4509 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4510 shift_count = XEXP (XEXP (x, 0), 1);
4511 else
4512 shift_count = XEXP (XEXP (x, 1), 1);
4513 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4514 goto restart;
4516 break;
4518 case XOR:
4519 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4520 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4521 (NOT y). */
4523 int num_negated = 0;
4524 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4526 if (GET_CODE (in1) == NOT)
4527 num_negated++, in1 = XEXP (in1, 0);
4528 if (GET_CODE (in2) == NOT)
4529 num_negated++, in2 = XEXP (in2, 0);
4531 if (num_negated == 2)
4533 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4534 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4536 else if (num_negated == 1)
4538 x = gen_unary (NOT, mode,
4539 gen_binary (XOR, mode, in1, in2));
4540 goto restart;
4544 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4545 correspond to a machine insn or result in further simplifications
4546 if B is a constant. */
4548 if (GET_CODE (XEXP (x, 0)) == AND
4549 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4550 && ! side_effects_p (XEXP (x, 1)))
4552 x = gen_binary (AND, mode,
4553 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4554 XEXP (x, 1));
4555 goto restart;
4557 else if (GET_CODE (XEXP (x, 0)) == AND
4558 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4559 && ! side_effects_p (XEXP (x, 1)))
4561 x = gen_binary (AND, mode,
4562 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4563 XEXP (x, 1));
4564 goto restart;
4568 #if STORE_FLAG_VALUE == 1
4569 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4570 comparison. */
4571 if (XEXP (x, 1) == const1_rtx
4572 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4573 && reversible_comparison_p (XEXP (x, 0)))
4574 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4575 mode, XEXP (XEXP (x, 0), 0),
4576 XEXP (XEXP (x, 0), 1));
4578 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4579 is (lt foo (const_int 0)), so we can perform the above
4580 simplification. */
4582 if (XEXP (x, 1) == const1_rtx
4583 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4584 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4585 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4586 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
4587 #endif
4589 /* (xor (comparison foo bar) (const_int sign-bit))
4590 when STORE_FLAG_VALUE is the sign bit. */
4591 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4592 && (STORE_FLAG_VALUE
4593 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4594 && XEXP (x, 1) == const_true_rtx
4595 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4596 && reversible_comparison_p (XEXP (x, 0)))
4597 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4598 mode, XEXP (XEXP (x, 0), 0),
4599 XEXP (XEXP (x, 0), 1));
4600 break;
4602 case ABS:
4603 /* (abs (neg <foo>)) -> (abs <foo>) */
4604 if (GET_CODE (XEXP (x, 0)) == NEG)
4605 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4607 /* If operand is something known to be positive, ignore the ABS. */
4608 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4609 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4610 <= HOST_BITS_PER_WIDE_INT)
4611 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4612 & ((HOST_WIDE_INT) 1
4613 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4614 == 0)))
4615 return XEXP (x, 0);
4618 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4619 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4621 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4622 goto restart;
4624 break;
4626 case FFS:
4627 /* (ffs (*_extend <X>)) = (ffs <X>) */
4628 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4629 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4630 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4631 break;
4633 case FLOAT:
4634 /* (float (sign_extend <X>)) = (float <X>). */
4635 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4636 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4637 break;
4639 case LSHIFT:
4640 case ASHIFT:
4641 case LSHIFTRT:
4642 case ASHIFTRT:
4643 case ROTATE:
4644 case ROTATERT:
4645 /* If this is a shift by a constant amount, simplify it. */
4646 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4648 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4649 INTVAL (XEXP (x, 1)));
4650 if (GET_CODE (x) != code)
4651 goto restart;
4654 #ifdef SHIFT_COUNT_TRUNCATED
4655 else if (GET_CODE (XEXP (x, 1)) != REG)
4656 SUBST (XEXP (x, 1),
4657 force_to_mode (XEXP (x, 1), GET_MODE (x),
4658 ((HOST_WIDE_INT) 1
4659 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4660 - 1,
4661 NULL_RTX));
4662 #endif
4664 break;
4667 return x;
4670 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4671 operations" because they can be replaced with two more basic operations.
4672 ZERO_EXTEND is also considered "compound" because it can be replaced with
4673 an AND operation, which is simpler, though only one operation.
4675 The function expand_compound_operation is called with an rtx expression
4676 and will convert it to the appropriate shifts and AND operations,
4677 simplifying at each stage.
4679 The function make_compound_operation is called to convert an expression
4680 consisting of shifts and ANDs into the equivalent compound expression.
4681 It is the inverse of this function, loosely speaking. */
4683 static rtx
4684 expand_compound_operation (x)
4685 rtx x;
4687 int pos = 0, len;
4688 int unsignedp = 0;
4689 int modewidth;
4690 rtx tem;
4692 switch (GET_CODE (x))
4694 case ZERO_EXTEND:
4695 unsignedp = 1;
4696 case SIGN_EXTEND:
4697 /* We can't necessarily use a const_int for a multiword mode;
4698 it depends on implicitly extending the value.
4699 Since we don't know the right way to extend it,
4700 we can't tell whether the implicit way is right.
4702 Even for a mode that is no wider than a const_int,
4703 we can't win, because we need to sign extend one of its bits through
4704 the rest of it, and we don't know which bit. */
4705 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4706 return x;
4708 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4709 return x;
4711 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4712 /* If the inner object has VOIDmode (the only way this can happen
4713 is if it is a ASM_OPERANDS), we can't do anything since we don't
4714 know how much masking to do. */
4715 if (len == 0)
4716 return x;
4718 break;
4720 case ZERO_EXTRACT:
4721 unsignedp = 1;
4722 case SIGN_EXTRACT:
4723 /* If the operand is a CLOBBER, just return it. */
4724 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4725 return XEXP (x, 0);
4727 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4728 || GET_CODE (XEXP (x, 2)) != CONST_INT
4729 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4730 return x;
4732 len = INTVAL (XEXP (x, 1));
4733 pos = INTVAL (XEXP (x, 2));
4735 /* If this goes outside the object being extracted, replace the object
4736 with a (use (mem ...)) construct that only combine understands
4737 and is used only for this purpose. */
4738 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4739 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4741 #if BITS_BIG_ENDIAN
4742 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4743 #endif
4744 break;
4746 default:
4747 return x;
4750 /* If we reach here, we want to return a pair of shifts. The inner
4751 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4752 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4753 logical depending on the value of UNSIGNEDP.
4755 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4756 converted into an AND of a shift.
4758 We must check for the case where the left shift would have a negative
4759 count. This can happen in a case like (x >> 31) & 255 on machines
4760 that can't shift by a constant. On those machines, we would first
4761 combine the shift with the AND to produce a variable-position
4762 extraction. Then the constant of 31 would be substituted in to produce
4763 a such a position. */
4765 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4766 if (modewidth >= pos - len)
4767 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4768 GET_MODE (x),
4769 simplify_shift_const (NULL_RTX, ASHIFT,
4770 GET_MODE (x),
4771 XEXP (x, 0),
4772 modewidth - pos - len),
4773 modewidth - len);
4775 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4776 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4777 simplify_shift_const (NULL_RTX, LSHIFTRT,
4778 GET_MODE (x),
4779 XEXP (x, 0), pos),
4780 ((HOST_WIDE_INT) 1 << len) - 1);
4781 else
4782 /* Any other cases we can't handle. */
4783 return x;
4786 /* If we couldn't do this for some reason, return the original
4787 expression. */
4788 if (GET_CODE (tem) == CLOBBER)
4789 return x;
4791 return tem;
4794 /* X is a SET which contains an assignment of one object into
4795 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4796 or certain SUBREGS). If possible, convert it into a series of
4797 logical operations.
4799 We half-heartedly support variable positions, but do not at all
4800 support variable lengths. */
4802 static rtx
4803 expand_field_assignment (x)
4804 rtx x;
4806 rtx inner;
4807 rtx pos; /* Always counts from low bit. */
4808 int len;
4809 rtx mask;
4810 enum machine_mode compute_mode;
4812 /* Loop until we find something we can't simplify. */
4813 while (1)
4815 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4816 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4818 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4819 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4820 pos = const0_rtx;
4822 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4823 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4825 inner = XEXP (SET_DEST (x), 0);
4826 len = INTVAL (XEXP (SET_DEST (x), 1));
4827 pos = XEXP (SET_DEST (x), 2);
4829 /* If the position is constant and spans the width of INNER,
4830 surround INNER with a USE to indicate this. */
4831 if (GET_CODE (pos) == CONST_INT
4832 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4833 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4835 #if BITS_BIG_ENDIAN
4836 if (GET_CODE (pos) == CONST_INT)
4837 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4838 - INTVAL (pos));
4839 else if (GET_CODE (pos) == MINUS
4840 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4841 && (INTVAL (XEXP (pos, 1))
4842 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4843 /* If position is ADJUST - X, new position is X. */
4844 pos = XEXP (pos, 0);
4845 else
4846 pos = gen_binary (MINUS, GET_MODE (pos),
4847 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4848 - len),
4849 pos);
4850 #endif
4853 /* A SUBREG between two modes that occupy the same numbers of words
4854 can be done by moving the SUBREG to the source. */
4855 else if (GET_CODE (SET_DEST (x)) == SUBREG
4856 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4857 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4858 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4859 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4861 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4862 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4863 SET_SRC (x)));
4864 continue;
4866 else
4867 break;
4869 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4870 inner = SUBREG_REG (inner);
4872 compute_mode = GET_MODE (inner);
4874 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4875 if (len < HOST_BITS_PER_WIDE_INT)
4876 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4877 else
4878 break;
4880 /* Now compute the equivalent expression. Make a copy of INNER
4881 for the SET_DEST in case it is a MEM into which we will substitute;
4882 we don't want shared RTL in that case. */
4883 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4884 gen_binary (IOR, compute_mode,
4885 gen_binary (AND, compute_mode,
4886 gen_unary (NOT, compute_mode,
4887 gen_binary (ASHIFT,
4888 compute_mode,
4889 mask, pos)),
4890 inner),
4891 gen_binary (ASHIFT, compute_mode,
4892 gen_binary (AND, compute_mode,
4893 gen_lowpart_for_combine
4894 (compute_mode,
4895 SET_SRC (x)),
4896 mask),
4897 pos)));
4900 return x;
4903 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4904 it is an RTX that represents a variable starting position; otherwise,
4905 POS is the (constant) starting bit position (counted from the LSB).
4907 INNER may be a USE. This will occur when we started with a bitfield
4908 that went outside the boundary of the object in memory, which is
4909 allowed on most machines. To isolate this case, we produce a USE
4910 whose mode is wide enough and surround the MEM with it. The only
4911 code that understands the USE is this routine. If it is not removed,
4912 it will cause the resulting insn not to match.
4914 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4915 signed reference.
4917 IN_DEST is non-zero if this is a reference in the destination of a
4918 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4919 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4920 be used.
4922 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4923 ZERO_EXTRACT should be built even for bits starting at bit 0.
4925 MODE is the desired mode of the result (if IN_DEST == 0). */
4927 static rtx
4928 make_extraction (mode, inner, pos, pos_rtx, len,
4929 unsignedp, in_dest, in_compare)
4930 enum machine_mode mode;
4931 rtx inner;
4932 int pos;
4933 rtx pos_rtx;
4934 int len;
4935 int unsignedp;
4936 int in_dest, in_compare;
4938 /* This mode describes the size of the storage area
4939 to fetch the overall value from. Within that, we
4940 ignore the POS lowest bits, etc. */
4941 enum machine_mode is_mode = GET_MODE (inner);
4942 enum machine_mode inner_mode;
4943 enum machine_mode wanted_mem_mode = byte_mode;
4944 enum machine_mode pos_mode = word_mode;
4945 enum machine_mode extraction_mode = word_mode;
4946 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4947 int spans_byte = 0;
4948 rtx new = 0;
4949 rtx orig_pos_rtx = pos_rtx;
4950 int orig_pos;
4952 /* Get some information about INNER and get the innermost object. */
4953 if (GET_CODE (inner) == USE)
4954 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4955 /* We don't need to adjust the position because we set up the USE
4956 to pretend that it was a full-word object. */
4957 spans_byte = 1, inner = XEXP (inner, 0);
4958 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4960 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4961 consider just the QI as the memory to extract from.
4962 The subreg adds or removes high bits; its mode is
4963 irrelevant to the meaning of this extraction,
4964 since POS and LEN count from the lsb. */
4965 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4966 is_mode = GET_MODE (SUBREG_REG (inner));
4967 inner = SUBREG_REG (inner);
4970 inner_mode = GET_MODE (inner);
4972 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4973 pos = INTVAL (pos_rtx), pos_rtx = 0;
4975 /* See if this can be done without an extraction. We never can if the
4976 width of the field is not the same as that of some integer mode. For
4977 registers, we can only avoid the extraction if the position is at the
4978 low-order bit and this is either not in the destination or we have the
4979 appropriate STRICT_LOW_PART operation available.
4981 For MEM, we can avoid an extract if the field starts on an appropriate
4982 boundary and we can change the mode of the memory reference. However,
4983 we cannot directly access the MEM if we have a USE and the underlying
4984 MEM is not TMODE. This combination means that MEM was being used in a
4985 context where bits outside its mode were being referenced; that is only
4986 valid in bit-field insns. */
4988 if (tmode != BLKmode
4989 && ! (spans_byte && inner_mode != tmode)
4990 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
4991 && (! in_dest
4992 || (GET_CODE (inner) == REG
4993 && (movstrict_optab->handlers[(int) tmode].insn_code
4994 != CODE_FOR_nothing))))
4995 || (GET_CODE (inner) == MEM && pos_rtx == 0
4996 && (pos
4997 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4998 : BITS_PER_UNIT)) == 0
4999 /* We can't do this if we are widening INNER_MODE (it
5000 may not be aligned, for one thing). */
5001 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5002 && (inner_mode == tmode
5003 || (! mode_dependent_address_p (XEXP (inner, 0))
5004 && ! MEM_VOLATILE_P (inner))))))
5006 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5007 field. If the original and current mode are the same, we need not
5008 adjust the offset. Otherwise, we do if bytes big endian.
5010 If INNER is not a MEM, get a piece consisting of the just the field
5011 of interest (in this case POS must be 0). */
5013 if (GET_CODE (inner) == MEM)
5015 int offset;
5016 /* POS counts from lsb, but make OFFSET count in memory order. */
5017 if (BYTES_BIG_ENDIAN)
5018 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5019 else
5020 offset = pos / BITS_PER_UNIT;
5022 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5023 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5024 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5025 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5027 else if (GET_CODE (inner) == REG)
5028 /* We can't call gen_lowpart_for_combine here since we always want
5029 a SUBREG and it would sometimes return a new hard register. */
5030 new = gen_rtx (SUBREG, tmode, inner,
5031 (WORDS_BIG_ENDIAN
5032 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5033 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
5034 / UNITS_PER_WORD)
5035 : 0));
5036 else
5037 new = force_to_mode (inner, tmode,
5038 len >= HOST_BITS_PER_WIDE_INT
5039 ? GET_MODE_MASK (tmode)
5040 : ((HOST_WIDE_INT) 1 << len) - 1,
5041 NULL_RTX);
5043 /* If this extraction is going into the destination of a SET,
5044 make a STRICT_LOW_PART unless we made a MEM. */
5046 if (in_dest)
5047 return (GET_CODE (new) == MEM ? new
5048 : (GET_CODE (new) != SUBREG
5049 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5050 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5052 /* Otherwise, sign- or zero-extend unless we already are in the
5053 proper mode. */
5055 return (mode == tmode ? new
5056 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5057 mode, new));
5060 /* Unless this is a COMPARE or we have a funny memory reference,
5061 don't do anything with zero-extending field extracts starting at
5062 the low-order bit since they are simple AND operations. */
5063 if (pos_rtx == 0 && pos == 0 && ! in_dest
5064 && ! in_compare && ! spans_byte && unsignedp)
5065 return 0;
5067 /* Get the mode to use should INNER be a MEM, the mode for the position,
5068 and the mode for the result. */
5069 #ifdef HAVE_insv
5070 if (in_dest)
5072 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5073 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5074 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5076 #endif
5078 #ifdef HAVE_extzv
5079 if (! in_dest && unsignedp)
5081 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5082 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5083 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5085 #endif
5087 #ifdef HAVE_extv
5088 if (! in_dest && ! unsignedp)
5090 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5091 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5092 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5094 #endif
5096 /* Never narrow an object, since that might not be safe. */
5098 if (mode != VOIDmode
5099 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5100 extraction_mode = mode;
5102 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5103 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5104 pos_mode = GET_MODE (pos_rtx);
5106 /* If this is not from memory or we have to change the mode of memory and
5107 cannot, the desired mode is EXTRACTION_MODE. */
5108 if (GET_CODE (inner) != MEM
5109 || (inner_mode != wanted_mem_mode
5110 && (mode_dependent_address_p (XEXP (inner, 0))
5111 || MEM_VOLATILE_P (inner))))
5112 wanted_mem_mode = extraction_mode;
5114 orig_pos = pos;
5116 #if BITS_BIG_ENDIAN
5117 /* If position is constant, compute new position. Otherwise, build
5118 subtraction. */
5119 if (pos_rtx == 0)
5120 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
5121 - len - pos);
5122 else
5123 pos_rtx
5124 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5125 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5126 GET_MODE_BITSIZE (wanted_mem_mode))
5127 - len),
5128 pos_rtx);
5129 #endif
5131 /* If INNER has a wider mode, make it smaller. If this is a constant
5132 extract, try to adjust the byte to point to the byte containing
5133 the value. */
5134 if (wanted_mem_mode != VOIDmode
5135 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5136 && ((GET_CODE (inner) == MEM
5137 && (inner_mode == wanted_mem_mode
5138 || (! mode_dependent_address_p (XEXP (inner, 0))
5139 && ! MEM_VOLATILE_P (inner))))))
5141 int offset = 0;
5143 /* The computations below will be correct if the machine is big
5144 endian in both bits and bytes or little endian in bits and bytes.
5145 If it is mixed, we must adjust. */
5147 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5148 adjust OFFSET to compensate. */
5149 #if BYTES_BIG_ENDIAN
5150 if (! spans_byte
5151 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5152 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5153 #endif
5155 /* If this is a constant position, we can move to the desired byte. */
5156 if (pos_rtx == 0)
5158 offset += pos / BITS_PER_UNIT;
5159 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5162 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5163 if (! spans_byte && is_mode != wanted_mem_mode)
5164 offset = (GET_MODE_SIZE (is_mode)
5165 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5166 #endif
5168 if (offset != 0 || inner_mode != wanted_mem_mode)
5170 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5171 plus_constant (XEXP (inner, 0), offset));
5172 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5173 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5174 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5175 inner = newmem;
5179 /* If INNER is not memory, we can always get it into the proper mode. */
5180 else if (GET_CODE (inner) != MEM)
5181 inner = force_to_mode (inner, extraction_mode,
5182 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5183 ? GET_MODE_MASK (extraction_mode)
5184 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5185 NULL_RTX);
5187 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5188 have to zero extend. Otherwise, we can just use a SUBREG. */
5189 if (pos_rtx != 0
5190 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5191 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5192 else if (pos_rtx != 0
5193 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5194 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5196 /* Make POS_RTX unless we already have it and it is correct. If we don't
5197 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5198 be a CONST_INT. */
5199 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5200 pos_rtx = orig_pos_rtx;
5202 else if (pos_rtx == 0)
5203 pos_rtx = GEN_INT (pos);
5205 /* Make the required operation. See if we can use existing rtx. */
5206 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5207 extraction_mode, inner, GEN_INT (len), pos_rtx);
5208 if (! in_dest)
5209 new = gen_lowpart_for_combine (mode, new);
5211 return new;
5214 /* Look at the expression rooted at X. Look for expressions
5215 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5216 Form these expressions.
5218 Return the new rtx, usually just X.
5220 Also, for machines like the Vax that don't have logical shift insns,
5221 try to convert logical to arithmetic shift operations in cases where
5222 they are equivalent. This undoes the canonicalizations to logical
5223 shifts done elsewhere.
5225 We try, as much as possible, to re-use rtl expressions to save memory.
5227 IN_CODE says what kind of expression we are processing. Normally, it is
5228 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5229 being kludges), it is MEM. When processing the arguments of a comparison
5230 or a COMPARE against zero, it is COMPARE. */
5232 static rtx
5233 make_compound_operation (x, in_code)
5234 rtx x;
5235 enum rtx_code in_code;
5237 enum rtx_code code = GET_CODE (x);
5238 enum machine_mode mode = GET_MODE (x);
5239 int mode_width = GET_MODE_BITSIZE (mode);
5240 enum rtx_code next_code;
5241 int i, count;
5242 rtx new = 0;
5243 rtx tem;
5244 char *fmt;
5246 /* Select the code to be used in recursive calls. Once we are inside an
5247 address, we stay there. If we have a comparison, set to COMPARE,
5248 but once inside, go back to our default of SET. */
5250 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5251 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5252 && XEXP (x, 1) == const0_rtx) ? COMPARE
5253 : in_code == COMPARE ? SET : in_code);
5255 /* Process depending on the code of this operation. If NEW is set
5256 non-zero, it will be returned. */
5258 switch (code)
5260 case ASHIFT:
5261 case LSHIFT:
5262 /* Convert shifts by constants into multiplications if inside
5263 an address. */
5264 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5265 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5266 && INTVAL (XEXP (x, 1)) >= 0)
5268 new = make_compound_operation (XEXP (x, 0), next_code);
5269 new = gen_rtx_combine (MULT, mode, new,
5270 GEN_INT ((HOST_WIDE_INT) 1
5271 << INTVAL (XEXP (x, 1))));
5273 break;
5275 case AND:
5276 /* If the second operand is not a constant, we can't do anything
5277 with it. */
5278 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5279 break;
5281 /* If the constant is a power of two minus one and the first operand
5282 is a logical right shift, make an extraction. */
5283 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5284 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5286 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5287 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5288 0, in_code == COMPARE);
5291 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5292 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5293 && subreg_lowpart_p (XEXP (x, 0))
5294 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5295 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5297 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5298 next_code);
5299 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5300 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5301 0, in_code == COMPARE);
5303 /* Same as previous, but for (xor/ior (lshift...) (lshift...)). */
5304 else if ((GET_CODE (XEXP (x, 0)) == XOR
5305 || GET_CODE (XEXP (x, 0)) == IOR)
5306 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5307 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5308 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5310 /* Apply the distributive law, and then try to make extractions. */
5311 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5312 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5313 XEXP (x, 1)),
5314 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5315 XEXP (x, 1)));
5316 new = make_compound_operation (new, in_code);
5319 /* If we are have (and (rotate X C) M) and C is larger than the number
5320 of bits in M, this is an extraction. */
5322 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5323 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5324 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5325 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5327 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5328 new = make_extraction (mode, new,
5329 (GET_MODE_BITSIZE (mode)
5330 - INTVAL (XEXP (XEXP (x, 0), 1))),
5331 NULL_RTX, i, 1, 0, in_code == COMPARE);
5334 /* On machines without logical shifts, if the operand of the AND is
5335 a logical shift and our mask turns off all the propagated sign
5336 bits, we can replace the logical shift with an arithmetic shift. */
5337 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5338 && (lshr_optab->handlers[(int) mode].insn_code
5339 == CODE_FOR_nothing)
5340 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5341 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5342 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5343 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5344 && mode_width <= HOST_BITS_PER_WIDE_INT)
5346 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5348 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5349 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5350 SUBST (XEXP (x, 0),
5351 gen_rtx_combine (ASHIFTRT, mode,
5352 make_compound_operation (XEXP (XEXP (x, 0), 0),
5353 next_code),
5354 XEXP (XEXP (x, 0), 1)));
5357 /* If the constant is one less than a power of two, this might be
5358 representable by an extraction even if no shift is present.
5359 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5360 we are in a COMPARE. */
5361 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5362 new = make_extraction (mode,
5363 make_compound_operation (XEXP (x, 0),
5364 next_code),
5365 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5367 /* If we are in a comparison and this is an AND with a power of two,
5368 convert this into the appropriate bit extract. */
5369 else if (in_code == COMPARE
5370 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5371 new = make_extraction (mode,
5372 make_compound_operation (XEXP (x, 0),
5373 next_code),
5374 i, NULL_RTX, 1, 1, 0, 1);
5376 break;
5378 case LSHIFTRT:
5379 /* If the sign bit is known to be zero, replace this with an
5380 arithmetic shift. */
5381 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5382 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5383 && mode_width <= HOST_BITS_PER_WIDE_INT
5384 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5386 new = gen_rtx_combine (ASHIFTRT, mode,
5387 make_compound_operation (XEXP (x, 0),
5388 next_code),
5389 XEXP (x, 1));
5390 break;
5393 /* ... fall through ... */
5395 case ASHIFTRT:
5396 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5397 this is a SIGN_EXTRACT. */
5398 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5399 && GET_CODE (XEXP (x, 0)) == ASHIFT
5400 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5401 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
5403 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5404 new = make_extraction (mode, new,
5405 (INTVAL (XEXP (x, 1))
5406 - INTVAL (XEXP (XEXP (x, 0), 1))),
5407 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5408 code == LSHIFTRT, 0, in_code == COMPARE);
5411 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5412 cases, we are better off returning a SIGN_EXTEND of the operation. */
5414 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5415 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5416 || GET_CODE (XEXP (x, 0)) == XOR
5417 || GET_CODE (XEXP (x, 0)) == PLUS)
5418 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5419 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5420 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5421 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5422 && 0 == (INTVAL (XEXP (XEXP (x, 0), 1))
5423 & (((HOST_WIDE_INT) 1
5424 << (MIN (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)),
5425 INTVAL (XEXP (x, 1)))
5426 - 1)))))
5428 rtx c1 = XEXP (XEXP (XEXP (x, 0), 0), 1);
5429 rtx c2 = XEXP (x, 1);
5430 rtx c3 = XEXP (XEXP (x, 0), 1);
5431 HOST_WIDE_INT newop1;
5432 rtx inner = XEXP (XEXP (XEXP (x, 0), 0), 0);
5434 /* If C1 > C2, INNER needs to have the shift performed on it
5435 for C1-C2 bits. */
5436 if (INTVAL (c1) > INTVAL (c2))
5438 inner = gen_binary (ASHIFT, mode, inner,
5439 GEN_INT (INTVAL (c1) - INTVAL (c2)));
5440 c1 = c2;
5443 newop1 = INTVAL (c3) >> INTVAL (c1);
5444 new = make_compound_operation (inner,
5445 GET_CODE (XEXP (x, 0)) == PLUS
5446 ? MEM : GET_CODE (XEXP (x, 0)));
5447 new = make_extraction (mode,
5448 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
5449 GEN_INT (newop1)),
5450 INTVAL (c2) - INTVAL (c1),
5451 NULL_RTX, mode_width - INTVAL (c2),
5452 code == LSHIFTRT, 0, in_code == COMPARE);
5455 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5456 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5457 && GET_CODE (XEXP (x, 0)) == NEG
5458 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5459 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5460 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
5462 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5463 next_code);
5464 new = make_extraction (mode,
5465 gen_unary (GET_CODE (XEXP (x, 0)), mode, new),
5466 (INTVAL (XEXP (x, 1))
5467 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5468 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5469 code == LSHIFTRT, 0, in_code == COMPARE);
5471 break;
5473 case SUBREG:
5474 /* Call ourselves recursively on the inner expression. If we are
5475 narrowing the object and it has a different RTL code from
5476 what it originally did, do this SUBREG as a force_to_mode. */
5478 tem = make_compound_operation (SUBREG_REG (x), in_code);
5479 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5480 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5481 && subreg_lowpart_p (x))
5483 rtx newer = force_to_mode (tem, mode,
5484 GET_MODE_MASK (mode), NULL_RTX);
5486 /* If we have something other than a SUBREG, we might have
5487 done an expansion, so rerun outselves. */
5488 if (GET_CODE (newer) != SUBREG)
5489 newer = make_compound_operation (newer, in_code);
5491 return newer;
5495 if (new)
5497 x = gen_lowpart_for_combine (mode, new);
5498 code = GET_CODE (x);
5501 /* Now recursively process each operand of this operation. */
5502 fmt = GET_RTX_FORMAT (code);
5503 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5504 if (fmt[i] == 'e')
5506 new = make_compound_operation (XEXP (x, i), next_code);
5507 SUBST (XEXP (x, i), new);
5510 return x;
5513 /* Given M see if it is a value that would select a field of bits
5514 within an item, but not the entire word. Return -1 if not.
5515 Otherwise, return the starting position of the field, where 0 is the
5516 low-order bit.
5518 *PLEN is set to the length of the field. */
5520 static int
5521 get_pos_from_mask (m, plen)
5522 unsigned HOST_WIDE_INT m;
5523 int *plen;
5525 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5526 int pos = exact_log2 (m & - m);
5528 if (pos < 0)
5529 return -1;
5531 /* Now shift off the low-order zero bits and see if we have a power of
5532 two minus 1. */
5533 *plen = exact_log2 ((m >> pos) + 1);
5535 if (*plen <= 0)
5536 return -1;
5538 return pos;
5541 /* See if X can be simplified knowing that we will only refer to it in
5542 MODE and will only refer to those bits that are nonzero in MASK.
5543 If other bits are being computed or if masking operations are done
5544 that select a superset of the bits in MASK, they can sometimes be
5545 ignored.
5547 Return a possibly simplified expression, but always convert X to
5548 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
5550 Also, if REG is non-zero and X is a register equal in value to REG,
5551 replace X with REG. */
5553 static rtx
5554 force_to_mode (x, mode, mask, reg)
5555 rtx x;
5556 enum machine_mode mode;
5557 unsigned HOST_WIDE_INT mask;
5558 rtx reg;
5560 enum rtx_code code = GET_CODE (x);
5561 enum machine_mode op_mode;
5562 unsigned HOST_WIDE_INT fuller_mask, nonzero;
5563 rtx op0, op1, temp;
5565 /* We want to perform the operation is its present mode unless we know
5566 that the operation is valid in MODE, in which case we do the operation
5567 in MODE. */
5568 op_mode = ((code_to_optab[(int) code] != 0
5569 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5570 != CODE_FOR_nothing))
5571 ? mode : GET_MODE (x));
5573 /* Truncate MASK to fit OP_MODE. */
5574 if (op_mode)
5575 mask &= GET_MODE_MASK (op_mode);
5577 /* When we have an arithmetic operation, or a shift whose count we
5578 do not know, we need to assume that all bit the up to the highest-order
5579 bit in MASK will be needed. This is how we form such a mask. */
5580 if (op_mode)
5581 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5582 ? GET_MODE_MASK (op_mode)
5583 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5584 else
5585 fuller_mask = ~ (HOST_WIDE_INT) 0;
5587 /* Determine what bits of X are guaranteed to be (non)zero. */
5588 nonzero = nonzero_bits (x, mode);
5590 /* If none of the bits in X are needed, return a zero. */
5591 if ((nonzero & mask) == 0)
5592 return const0_rtx;
5594 /* If X is a CONST_INT, return a new one. Do this here since the
5595 test below will fail. */
5596 if (GET_CODE (x) == CONST_INT)
5597 return GEN_INT (INTVAL (x) & mask);
5599 /* If X is narrower than MODE, just get X in the proper mode. */
5600 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
5601 return gen_lowpart_for_combine (mode, x);
5603 /* If we aren't changing the mode and all zero bits in MASK are already
5604 known to be zero in X, we need not do anything. */
5605 if (GET_MODE (x) == mode && (~ mask & nonzero) == 0)
5606 return x;
5608 switch (code)
5610 case CLOBBER:
5611 /* If X is a (clobber (const_int)), return it since we know we are
5612 generating something that won't match. */
5613 return x;
5615 #if ! BITS_BIG_ENDIAN
5616 case USE:
5617 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5618 spanned the boundary of the MEM. If we are now masking so it is
5619 within that boundary, we don't need the USE any more. */
5620 if ((mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5621 return force_to_mode (XEXP (x, 0), mode, mask, reg);
5622 #endif
5624 case SIGN_EXTEND:
5625 case ZERO_EXTEND:
5626 case ZERO_EXTRACT:
5627 case SIGN_EXTRACT:
5628 x = expand_compound_operation (x);
5629 if (GET_CODE (x) != code)
5630 return force_to_mode (x, mode, mask, reg);
5631 break;
5633 case REG:
5634 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5635 || rtx_equal_p (reg, get_last_value (x))))
5636 x = reg;
5637 break;
5639 case SUBREG:
5640 if (subreg_lowpart_p (x)
5641 /* We can ignore the effect this SUBREG if it narrows the mode or,
5642 on machines where register operations are performed on the full
5643 word, if the constant masks to zero all the bits the mode
5644 doesn't have. */
5645 && ((GET_MODE_SIZE (GET_MODE (x))
5646 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5647 #ifdef WORD_REGISTER_OPERATIONS
5648 || (0 == (mask
5649 & GET_MODE_MASK (GET_MODE (x))
5650 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))
5651 #endif
5653 return force_to_mode (SUBREG_REG (x), mode, mask, reg);
5654 break;
5656 case AND:
5657 /* If this is an AND with a constant, convert it into an AND
5658 whose constant is the AND of that constant with MASK. If it
5659 remains an AND of MASK, delete it since it is redundant. */
5661 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5662 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
5664 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5665 mask & INTVAL (XEXP (x, 1)));
5667 /* If X is still an AND, see if it is an AND with a mask that
5668 is just some low-order bits. If so, and it is BITS wide (it
5669 can't be wider), we don't need it. */
5671 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5672 && INTVAL (XEXP (x, 1)) == mask)
5673 x = XEXP (x, 0);
5675 break;
5678 goto binop;
5680 case PLUS:
5681 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5682 low-order bits (as in an alignment operation) and FOO is already
5683 aligned to that boundary, mask C1 to that boundary as well.
5684 This may eliminate that PLUS and, later, the AND. */
5685 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5686 && exact_log2 (- mask) >= 0
5687 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5688 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5689 return force_to_mode (plus_constant (XEXP (x, 0),
5690 INTVAL (XEXP (x, 1)) & mask),
5691 mode, mask, reg);
5693 /* ... fall through ... */
5695 case MINUS:
5696 case MULT:
5697 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5698 most significant bit in MASK since carries from those bits will
5699 affect the bits we are interested in. */
5700 mask = fuller_mask;
5701 goto binop;
5703 case IOR:
5704 case XOR:
5705 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5706 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5707 operation which may be a bitfield extraction. Ensure that the
5708 constant we form is not wider than the mode of X. */
5710 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5711 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5712 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5713 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5714 && GET_CODE (XEXP (x, 1)) == CONST_INT
5715 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5716 + floor_log2 (INTVAL (XEXP (x, 1))))
5717 < GET_MODE_BITSIZE (GET_MODE (x)))
5718 && (INTVAL (XEXP (x, 1))
5719 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5721 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5722 << INTVAL (XEXP (XEXP (x, 0), 1)));
5723 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5724 XEXP (XEXP (x, 0), 0), temp);
5725 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
5726 return force_to_mode (x, mode, mask, reg);
5729 binop:
5730 /* For most binary operations, just propagate into the operation and
5731 change the mode if we have an operation of that mode. */
5733 op0 = gen_lowpart_for_combine (op_mode, force_to_mode (XEXP (x, 0),
5734 mode, mask, reg));
5735 op1 = gen_lowpart_for_combine (op_mode, force_to_mode (XEXP (x, 1),
5736 mode, mask, reg));
5738 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5739 x = gen_binary (code, op_mode, op0, op1);
5740 break;
5742 case ASHIFT:
5743 case LSHIFT:
5744 /* For left shifts, do the same, but just for the first operand.
5745 However, we cannot do anything with shifts where we cannot
5746 guarantee that the counts are smaller than the size of the mode
5747 because such a count will have a different meaning in a
5748 wider mode. */
5750 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5751 && INTVAL (XEXP (x, 1)) >= 0
5752 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5753 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5754 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
5755 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
5756 break;
5758 /* If the shift count is a constant and we can do arithmetic in
5759 the mode of the shift, refine which bits we need. Otherwise, use the
5760 conservative form of the mask. */
5761 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5762 && INTVAL (XEXP (x, 1)) >= 0
5763 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5764 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5765 mask >>= INTVAL (XEXP (x, 1));
5766 else
5767 mask = fuller_mask;
5769 op0 = gen_lowpart_for_combine (op_mode,
5770 force_to_mode (XEXP (x, 0), op_mode,
5771 mask, reg));
5773 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5774 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
5775 break;
5777 case LSHIFTRT:
5778 /* Here we can only do something if the shift count is a constant,
5779 this shift constant is valid for the host, and we can do arithmetic
5780 in OP_MODE. */
5782 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5783 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5784 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5786 rtx inner = XEXP (x, 0);
5788 /* Select the mask of the bits we need for the shift operand. */
5789 mask <<= INTVAL (XEXP (x, 1));
5791 /* We can only change the mode of the shift if we can do arithmetic
5792 in the mode of the shift and MASK is no wider than the width of
5793 OP_MODE. */
5794 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5795 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
5796 op_mode = GET_MODE (x);
5798 inner = force_to_mode (inner, op_mode, mask, reg);
5800 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5801 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
5804 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5805 shift and AND produces only copies of the sign bit (C2 is one less
5806 than a power of two), we can do this with just a shift. */
5808 if (GET_CODE (x) == LSHIFTRT
5809 && GET_CODE (XEXP (x, 1)) == CONST_INT
5810 && ((INTVAL (XEXP (x, 1))
5811 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5812 >= GET_MODE_BITSIZE (GET_MODE (x)))
5813 && exact_log2 (mask + 1) >= 0
5814 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5815 >= exact_log2 (mask + 1)))
5816 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5817 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5818 - exact_log2 (mask + 1)));
5819 break;
5821 case ASHIFTRT:
5822 /* If we are just looking for the sign bit, we don't need this shift at
5823 all, even if it has a variable count. */
5824 if (mask == ((HOST_WIDE_INT) 1
5825 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
5826 return force_to_mode (XEXP (x, 0), mode, mask, reg);
5828 /* If this is a shift by a constant, get a mask that contains those bits
5829 that are not copies of the sign bit. We then have two cases: If
5830 MASK only includes those bits, this can be a logical shift, which may
5831 allow simplifications. If MASK is a single-bit field not within
5832 those bits, we are requesting a copy of the sign bit and hence can
5833 shift the sign bit to the appropriate location. */
5835 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5836 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5838 int i = -1;
5840 nonzero = GET_MODE_MASK (GET_MODE (x));
5841 nonzero >>= INTVAL (XEXP (x, 1));
5843 if ((mask & ~ nonzero) == 0
5844 || (i = exact_log2 (mask)) >= 0)
5846 x = simplify_shift_const
5847 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5848 i < 0 ? INTVAL (XEXP (x, 1))
5849 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5851 if (GET_CODE (x) != ASHIFTRT)
5852 return force_to_mode (x, mode, mask, reg);
5856 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5857 even if the shift count isn't a constant. */
5858 if (mask == 1)
5859 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5861 /* If this is a sign-extension operation that just affects bits
5862 we don't care about, remove it. */
5864 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5865 && INTVAL (XEXP (x, 1)) >= 0
5866 && (INTVAL (XEXP (x, 1))
5867 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
5868 && GET_CODE (XEXP (x, 0)) == ASHIFT
5869 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5870 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5871 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, reg);
5873 break;
5875 case ROTATE:
5876 case ROTATERT:
5877 /* If the shift count is constant and we can do computations
5878 in the mode of X, compute where the bits we care about are.
5879 Otherwise, we can't do anything. Don't change the mode of
5880 the shift or propagate MODE into the shift, though. */
5881 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5882 && INTVAL (XEXP (x, 1)) >= 0)
5884 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
5885 GET_MODE (x), GEN_INT (mask),
5886 XEXP (x, 1));
5887 if (temp)
5888 SUBST (XEXP (x, 0),
5889 force_to_mode (XEXP (x, 0), GET_MODE (x),
5890 INTVAL (temp), reg));
5892 break;
5894 case NEG:
5895 /* We need any bits less significant than the most significant bit in
5896 MASK since carries from those bits will affect the bits we are
5897 interested in. */
5898 mask = fuller_mask;
5899 goto unop;
5901 case NOT:
5902 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
5903 same as the XOR case above. Ensure that the constant we form is not
5904 wider than the mode of X. */
5906 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5907 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5908 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5909 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
5910 < GET_MODE_BITSIZE (GET_MODE (x)))
5911 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5913 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
5914 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
5915 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
5917 return force_to_mode (x, mode, mask, reg);
5920 unop:
5921 op0 = gen_lowpart_for_combine (op_mode, force_to_mode (XEXP (x, 0), mode,
5922 mask, reg));
5923 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5924 x = gen_unary (code, op_mode, op0);
5925 break;
5927 case NE:
5928 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
5929 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
5930 in CONST. */
5931 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
5932 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
5933 return force_to_mode (XEXP (x, 0), mode, mask, reg);
5935 break;
5937 case IF_THEN_ELSE:
5938 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5939 written in a narrower mode. We play it safe and do not do so. */
5941 SUBST (XEXP (x, 1),
5942 gen_lowpart_for_combine (GET_MODE (x),
5943 force_to_mode (XEXP (x, 1), mode,
5944 mask, reg)));
5945 SUBST (XEXP (x, 2),
5946 gen_lowpart_for_combine (GET_MODE (x),
5947 force_to_mode (XEXP (x, 2), mode,
5948 mask, reg)));
5949 break;
5952 /* Ensure we return a value of the proper mode. */
5953 return gen_lowpart_for_combine (mode, x);
5956 /* Return the value of expression X given the fact that condition COND
5957 is known to be true when applied to REG as its first operand and VAL
5958 as its second. X is known to not be shared and so can be modified in
5959 place.
5961 We only handle the simplest cases, and specifically those cases that
5962 arise with IF_THEN_ELSE expressions. */
5964 static rtx
5965 known_cond (x, cond, reg, val)
5966 rtx x;
5967 enum rtx_code cond;
5968 rtx reg, val;
5970 enum rtx_code code = GET_CODE (x);
5971 rtx new, temp;
5972 char *fmt;
5973 int i, j;
5975 if (side_effects_p (x))
5976 return x;
5978 if (cond == EQ && rtx_equal_p (x, reg))
5979 return val;
5981 /* If X is (abs REG) and we know something about REG's relationship
5982 with zero, we may be able to simplify this. */
5984 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5985 switch (cond)
5987 case GE: case GT: case EQ:
5988 return XEXP (x, 0);
5989 case LT: case LE:
5990 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5993 /* The only other cases we handle are MIN, MAX, and comparisons if the
5994 operands are the same as REG and VAL. */
5996 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5998 if (rtx_equal_p (XEXP (x, 0), val))
5999 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6001 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6003 if (GET_RTX_CLASS (code) == '<')
6004 return (comparison_dominates_p (cond, code) ? const_true_rtx
6005 : (comparison_dominates_p (cond,
6006 reverse_condition (code))
6007 ? const0_rtx : x));
6009 else if (code == SMAX || code == SMIN
6010 || code == UMIN || code == UMAX)
6012 int unsignedp = (code == UMIN || code == UMAX);
6014 if (code == SMAX || code == UMAX)
6015 cond = reverse_condition (cond);
6017 switch (cond)
6019 case GE: case GT:
6020 return unsignedp ? x : XEXP (x, 1);
6021 case LE: case LT:
6022 return unsignedp ? x : XEXP (x, 0);
6023 case GEU: case GTU:
6024 return unsignedp ? XEXP (x, 1) : x;
6025 case LEU: case LTU:
6026 return unsignedp ? XEXP (x, 0) : x;
6032 fmt = GET_RTX_FORMAT (code);
6033 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6035 if (fmt[i] == 'e')
6036 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6037 else if (fmt[i] == 'E')
6038 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6039 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6040 cond, reg, val));
6043 return x;
6046 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
6047 Return that assignment if so.
6049 We only handle the most common cases. */
6051 static rtx
6052 make_field_assignment (x)
6053 rtx x;
6055 rtx dest = SET_DEST (x);
6056 rtx src = SET_SRC (x);
6057 rtx ourdest;
6058 rtx assign;
6059 HOST_WIDE_INT c1;
6060 int pos, len;
6061 rtx other;
6062 enum machine_mode mode;
6064 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6065 a clear of a one-bit field. We will have changed it to
6066 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6067 for a SUBREG. */
6069 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6070 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6071 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6072 && (rtx_equal_p (dest, XEXP (src, 1))
6073 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6074 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6076 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6077 1, 1, 1, 0);
6078 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6081 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6082 && subreg_lowpart_p (XEXP (src, 0))
6083 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6084 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6085 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6086 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6087 && (rtx_equal_p (dest, XEXP (src, 1))
6088 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6089 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6091 assign = make_extraction (VOIDmode, dest, 0,
6092 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6093 1, 1, 1, 0);
6094 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6097 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6098 one-bit field. */
6099 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6100 && XEXP (XEXP (src, 0), 0) == const1_rtx
6101 && (rtx_equal_p (dest, XEXP (src, 1))
6102 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6103 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6105 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6106 1, 1, 1, 0);
6107 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6110 /* The other case we handle is assignments into a constant-position
6111 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6112 a mask that has all one bits except for a group of zero bits and
6113 OTHER is known to have zeros where C1 has ones, this is such an
6114 assignment. Compute the position and length from C1. Shift OTHER
6115 to the appropriate position, force it to the required mode, and
6116 make the extraction. Check for the AND in both operands. */
6118 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6119 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6120 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6121 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6122 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6123 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6124 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6125 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6126 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6127 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6128 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6129 dest)))
6130 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6131 else
6132 return x;
6134 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
6135 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6136 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6137 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
6138 return x;
6140 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
6142 /* The mode to use for the source is the mode of the assignment, or of
6143 what is inside a possible STRICT_LOW_PART. */
6144 mode = (GET_CODE (assign) == STRICT_LOW_PART
6145 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6147 /* Shift OTHER right POS places and make it the source, restricting it
6148 to the proper length and mode. */
6150 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6151 GET_MODE (src), other, pos),
6152 mode,
6153 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6154 ? GET_MODE_MASK (mode)
6155 : ((HOST_WIDE_INT) 1 << len) - 1,
6156 dest);
6158 return gen_rtx_combine (SET, VOIDmode, assign, src);
6161 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6162 if so. */
6164 static rtx
6165 apply_distributive_law (x)
6166 rtx x;
6168 enum rtx_code code = GET_CODE (x);
6169 rtx lhs, rhs, other;
6170 rtx tem;
6171 enum rtx_code inner_code;
6173 /* Distributivity is not true for floating point.
6174 It can change the value. So don't do it.
6175 -- rms and moshier@world.std.com. */
6176 if (FLOAT_MODE_P (GET_MODE (x)))
6177 return x;
6179 /* The outer operation can only be one of the following: */
6180 if (code != IOR && code != AND && code != XOR
6181 && code != PLUS && code != MINUS)
6182 return x;
6184 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6186 /* If either operand is a primitive we can't do anything, so get out fast. */
6187 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
6188 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
6189 return x;
6191 lhs = expand_compound_operation (lhs);
6192 rhs = expand_compound_operation (rhs);
6193 inner_code = GET_CODE (lhs);
6194 if (inner_code != GET_CODE (rhs))
6195 return x;
6197 /* See if the inner and outer operations distribute. */
6198 switch (inner_code)
6200 case LSHIFTRT:
6201 case ASHIFTRT:
6202 case AND:
6203 case IOR:
6204 /* These all distribute except over PLUS. */
6205 if (code == PLUS || code == MINUS)
6206 return x;
6207 break;
6209 case MULT:
6210 if (code != PLUS && code != MINUS)
6211 return x;
6212 break;
6214 case ASHIFT:
6215 case LSHIFT:
6216 /* These are also multiplies, so they distribute over everything. */
6217 break;
6219 case SUBREG:
6220 /* Non-paradoxical SUBREGs distributes over all operations, provided
6221 the inner modes and word numbers are the same, this is an extraction
6222 of a low-order part, we don't convert an fp operation to int or
6223 vice versa, and we would not be converting a single-word
6224 operation into a multi-word operation. The latter test is not
6225 required, but it prevents generating unneeded multi-word operations.
6226 Some of the previous tests are redundant given the latter test, but
6227 are retained because they are required for correctness.
6229 We produce the result slightly differently in this case. */
6231 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6232 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6233 || ! subreg_lowpart_p (lhs)
6234 || (GET_MODE_CLASS (GET_MODE (lhs))
6235 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
6236 || (GET_MODE_SIZE (GET_MODE (lhs))
6237 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6238 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
6239 return x;
6241 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6242 SUBREG_REG (lhs), SUBREG_REG (rhs));
6243 return gen_lowpart_for_combine (GET_MODE (x), tem);
6245 default:
6246 return x;
6249 /* Set LHS and RHS to the inner operands (A and B in the example
6250 above) and set OTHER to the common operand (C in the example).
6251 These is only one way to do this unless the inner operation is
6252 commutative. */
6253 if (GET_RTX_CLASS (inner_code) == 'c'
6254 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6255 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6256 else if (GET_RTX_CLASS (inner_code) == 'c'
6257 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6258 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6259 else if (GET_RTX_CLASS (inner_code) == 'c'
6260 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6261 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6262 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6263 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6264 else
6265 return x;
6267 /* Form the new inner operation, seeing if it simplifies first. */
6268 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6270 /* There is one exception to the general way of distributing:
6271 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6272 if (code == XOR && inner_code == IOR)
6274 inner_code = AND;
6275 other = gen_unary (NOT, GET_MODE (x), other);
6278 /* We may be able to continuing distributing the result, so call
6279 ourselves recursively on the inner operation before forming the
6280 outer operation, which we return. */
6281 return gen_binary (inner_code, GET_MODE (x),
6282 apply_distributive_law (tem), other);
6285 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6286 in MODE.
6288 Return an equivalent form, if different from X. Otherwise, return X. If
6289 X is zero, we are to always construct the equivalent form. */
6291 static rtx
6292 simplify_and_const_int (x, mode, varop, constop)
6293 rtx x;
6294 enum machine_mode mode;
6295 rtx varop;
6296 unsigned HOST_WIDE_INT constop;
6298 register enum machine_mode tmode;
6299 register rtx temp;
6300 unsigned HOST_WIDE_INT nonzero;
6301 int i;
6303 /* Simplify VAROP knowing that we will be only looking at some of the
6304 bits in it. */
6305 varop = force_to_mode (varop, mode, constop, NULL_RTX);
6307 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6308 CONST_INT, we are done. */
6309 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6310 return varop;
6312 /* See what bits may be nonzero in VAROP. Unlike the general case of
6313 a call to nonzero_bits, here we don't care about bits outside
6314 MODE. */
6316 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6318 /* Turn off all bits in the constant that are known to already be zero.
6319 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6320 which is tested below. */
6322 constop &= nonzero;
6324 /* If we don't have any bits left, return zero. */
6325 if (constop == 0)
6326 return const0_rtx;
6328 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6329 a power of two, we can replace this with a ASHIFT. */
6330 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6331 && (i = exact_log2 (constop)) >= 0)
6332 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6334 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6335 or XOR, then try to apply the distributive law. This may eliminate
6336 operations if either branch can be simplified because of the AND.
6337 It may also make some cases more complex, but those cases probably
6338 won't match a pattern either with or without this. */
6340 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6341 return
6342 gen_lowpart_for_combine
6343 (mode,
6344 apply_distributive_law
6345 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6346 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6347 XEXP (varop, 0), constop),
6348 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6349 XEXP (varop, 1), constop))));
6351 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6352 if we already had one (just check for the simplest cases). */
6353 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6354 && GET_MODE (XEXP (x, 0)) == mode
6355 && SUBREG_REG (XEXP (x, 0)) == varop)
6356 varop = XEXP (x, 0);
6357 else
6358 varop = gen_lowpart_for_combine (mode, varop);
6360 /* If we can't make the SUBREG, try to return what we were given. */
6361 if (GET_CODE (varop) == CLOBBER)
6362 return x ? x : varop;
6364 /* If we are only masking insignificant bits, return VAROP. */
6365 if (constop == nonzero)
6366 x = varop;
6368 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6369 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6370 x = gen_binary (AND, mode, varop, GEN_INT (constop));
6372 else
6374 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6375 || INTVAL (XEXP (x, 1)) != constop)
6376 SUBST (XEXP (x, 1), GEN_INT (constop));
6378 SUBST (XEXP (x, 0), varop);
6381 return x;
6384 /* Given an expression, X, compute which bits in X can be non-zero.
6385 We don't care about bits outside of those defined in MODE.
6387 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6388 a shift, AND, or zero_extract, we can do better. */
6390 static unsigned HOST_WIDE_INT
6391 nonzero_bits (x, mode)
6392 rtx x;
6393 enum machine_mode mode;
6395 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6396 unsigned HOST_WIDE_INT inner_nz;
6397 enum rtx_code code;
6398 int mode_width = GET_MODE_BITSIZE (mode);
6399 rtx tem;
6401 /* If X is wider than MODE, use its mode instead. */
6402 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6404 mode = GET_MODE (x);
6405 nonzero = GET_MODE_MASK (mode);
6406 mode_width = GET_MODE_BITSIZE (mode);
6409 if (mode_width > HOST_BITS_PER_WIDE_INT)
6410 /* Our only callers in this case look for single bit values. So
6411 just return the mode mask. Those tests will then be false. */
6412 return nonzero;
6414 #ifndef WORD_REGISTER_OPERATIONS
6415 /* If MODE is wider than X, but both are a single word for both the host
6416 and target machines, we can compute this from which bits of the
6417 object might be nonzero in its own mode, taking into account the fact
6418 that on many CISC machines, accessing an object in a wider mode
6419 causes the high-order bits to become undefined. So they are
6420 not known to be zero. */
6422 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6423 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6424 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6425 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
6427 nonzero &= nonzero_bits (x, GET_MODE (x));
6428 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6429 return nonzero;
6431 #endif
6433 code = GET_CODE (x);
6434 switch (code)
6436 case REG:
6437 #ifdef STACK_BOUNDARY
6438 /* If this is the stack pointer, we may know something about its
6439 alignment. If PUSH_ROUNDING is defined, it is possible for the
6440 stack to be momentarily aligned only to that amount, so we pick
6441 the least alignment. */
6443 if (x == stack_pointer_rtx)
6445 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6447 #ifdef PUSH_ROUNDING
6448 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6449 #endif
6451 return nonzero & ~ (sp_alignment - 1);
6453 #endif
6455 /* If X is a register whose nonzero bits value is current, use it.
6456 Otherwise, if X is a register whose value we can find, use that
6457 value. Otherwise, use the previously-computed global nonzero bits
6458 for this register. */
6460 if (reg_last_set_value[REGNO (x)] != 0
6461 && reg_last_set_mode[REGNO (x)] == mode
6462 && (reg_n_sets[REGNO (x)] == 1
6463 || reg_last_set_label[REGNO (x)] == label_tick)
6464 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6465 return reg_last_set_nonzero_bits[REGNO (x)];
6467 tem = get_last_value (x);
6469 if (tem)
6471 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6472 /* If X is narrower than MODE and TEM is a non-negative
6473 constant that would appear negative in the mode of X,
6474 sign-extend it for use in reg_nonzero_bits because some
6475 machines (maybe most) will actually do the sign-extension
6476 and this is the conservative approach.
6478 ??? For 2.5, try to tighten up the MD files in this regard
6479 instead of this kludge. */
6481 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6482 && GET_CODE (tem) == CONST_INT
6483 && INTVAL (tem) > 0
6484 && 0 != (INTVAL (tem)
6485 & ((HOST_WIDE_INT) 1
6486 << GET_MODE_BITSIZE (GET_MODE (x)))))
6487 tem = GEN_INT (INTVAL (tem)
6488 | ((HOST_WIDE_INT) (-1)
6489 << GET_MODE_BITSIZE (GET_MODE (x))));
6490 #endif
6491 return nonzero_bits (tem, mode);
6493 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6494 return reg_nonzero_bits[REGNO (x)] & nonzero;
6495 else
6496 return nonzero;
6498 case CONST_INT:
6499 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6500 /* If X is negative in MODE, sign-extend the value. */
6501 if (INTVAL (x) > 0
6502 && 0 != (INTVAL (x)
6503 & ((HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (GET_MODE (x)))))
6504 return (INTVAL (x)
6505 | ((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (GET_MODE (x))));
6506 #endif
6508 return INTVAL (x);
6510 case MEM:
6511 #ifdef LOAD_EXTEND_OP
6512 /* In many, if not most, RISC machines, reading a byte from memory
6513 zeros the rest of the register. Noticing that fact saves a lot
6514 of extra zero-extends. */
6515 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6516 nonzero &= GET_MODE_MASK (GET_MODE (x));
6517 #endif
6518 break;
6520 case EQ: case NE:
6521 case GT: case GTU:
6522 case LT: case LTU:
6523 case GE: case GEU:
6524 case LE: case LEU:
6526 /* If this produces an integer result, we know which bits are set.
6527 Code here used to clear bits outside the mode of X, but that is
6528 now done above. */
6530 if (GET_MODE_CLASS (mode) == MODE_INT
6531 && mode_width <= HOST_BITS_PER_WIDE_INT)
6532 nonzero = STORE_FLAG_VALUE;
6533 break;
6535 case NEG:
6536 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6537 == GET_MODE_BITSIZE (GET_MODE (x)))
6538 nonzero = 1;
6540 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6541 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6542 break;
6544 case ABS:
6545 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6546 == GET_MODE_BITSIZE (GET_MODE (x)))
6547 nonzero = 1;
6548 break;
6550 case TRUNCATE:
6551 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6552 break;
6554 case ZERO_EXTEND:
6555 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6556 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6557 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6558 break;
6560 case SIGN_EXTEND:
6561 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6562 Otherwise, show all the bits in the outer mode but not the inner
6563 may be non-zero. */
6564 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6565 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6567 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6568 if (inner_nz &
6569 (((HOST_WIDE_INT) 1
6570 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6571 inner_nz |= (GET_MODE_MASK (mode)
6572 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6575 nonzero &= inner_nz;
6576 break;
6578 case AND:
6579 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6580 & nonzero_bits (XEXP (x, 1), mode));
6581 break;
6583 case XOR: case IOR:
6584 case UMIN: case UMAX: case SMIN: case SMAX:
6585 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6586 | nonzero_bits (XEXP (x, 1), mode));
6587 break;
6589 case PLUS: case MINUS:
6590 case MULT:
6591 case DIV: case UDIV:
6592 case MOD: case UMOD:
6593 /* We can apply the rules of arithmetic to compute the number of
6594 high- and low-order zero bits of these operations. We start by
6595 computing the width (position of the highest-order non-zero bit)
6596 and the number of low-order zero bits for each value. */
6598 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6599 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6600 int width0 = floor_log2 (nz0) + 1;
6601 int width1 = floor_log2 (nz1) + 1;
6602 int low0 = floor_log2 (nz0 & -nz0);
6603 int low1 = floor_log2 (nz1 & -nz1);
6604 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6605 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6606 int result_width = mode_width;
6607 int result_low = 0;
6609 switch (code)
6611 case PLUS:
6612 result_width = MAX (width0, width1) + 1;
6613 result_low = MIN (low0, low1);
6614 break;
6615 case MINUS:
6616 result_low = MIN (low0, low1);
6617 break;
6618 case MULT:
6619 result_width = width0 + width1;
6620 result_low = low0 + low1;
6621 break;
6622 case DIV:
6623 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6624 result_width = width0;
6625 break;
6626 case UDIV:
6627 result_width = width0;
6628 break;
6629 case MOD:
6630 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6631 result_width = MIN (width0, width1);
6632 result_low = MIN (low0, low1);
6633 break;
6634 case UMOD:
6635 result_width = MIN (width0, width1);
6636 result_low = MIN (low0, low1);
6637 break;
6640 if (result_width < mode_width)
6641 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6643 if (result_low > 0)
6644 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6646 break;
6648 case ZERO_EXTRACT:
6649 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6650 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6651 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6652 break;
6654 case SUBREG:
6655 /* If this is a SUBREG formed for a promoted variable that has
6656 been zero-extended, we know that at least the high-order bits
6657 are zero, though others might be too. */
6659 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6660 nonzero = (GET_MODE_MASK (GET_MODE (x))
6661 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
6663 /* If the inner mode is a single word for both the host and target
6664 machines, we can compute this from which bits of the inner
6665 object might be nonzero. */
6666 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6667 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6668 <= HOST_BITS_PER_WIDE_INT))
6670 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
6672 #ifndef WORD_REGISTER_OPERATIONS
6673 /* On many CISC machines, accessing an object in a wider mode
6674 causes the high-order bits to become undefined. So they are
6675 not known to be zero. */
6676 if (GET_MODE_SIZE (GET_MODE (x))
6677 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6678 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6679 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6680 #endif
6682 break;
6684 case ASHIFTRT:
6685 case LSHIFTRT:
6686 case ASHIFT:
6687 case LSHIFT:
6688 case ROTATE:
6689 /* The nonzero bits are in two classes: any bits within MODE
6690 that aren't in GET_MODE (x) are always significant. The rest of the
6691 nonzero bits are those that are significant in the operand of
6692 the shift when shifted the appropriate number of bits. This
6693 shows that high-order bits are cleared by the right shift and
6694 low-order bits by left shifts. */
6695 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6696 && INTVAL (XEXP (x, 1)) >= 0
6697 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6699 enum machine_mode inner_mode = GET_MODE (x);
6700 int width = GET_MODE_BITSIZE (inner_mode);
6701 int count = INTVAL (XEXP (x, 1));
6702 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6703 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6704 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
6705 unsigned HOST_WIDE_INT outer = 0;
6707 if (mode_width > width)
6708 outer = (op_nonzero & nonzero & ~ mode_mask);
6710 if (code == LSHIFTRT)
6711 inner >>= count;
6712 else if (code == ASHIFTRT)
6714 inner >>= count;
6716 /* If the sign bit may have been nonzero before the shift, we
6717 need to mark all the places it could have been copied to
6718 by the shift as possibly nonzero. */
6719 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6720 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6722 else if (code == LSHIFT || code == ASHIFT)
6723 inner <<= count;
6724 else
6725 inner = ((inner << (count % width)
6726 | (inner >> (width - (count % width)))) & mode_mask);
6728 nonzero &= (outer | inner);
6730 break;
6732 case FFS:
6733 /* This is at most the number of bits in the mode. */
6734 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6735 break;
6737 case IF_THEN_ELSE:
6738 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6739 | nonzero_bits (XEXP (x, 2), mode));
6740 break;
6743 return nonzero;
6746 /* Return the number of bits at the high-order end of X that are known to
6747 be equal to the sign bit. X will be used in mode MODE; if MODE is
6748 VOIDmode, X will be used in its own mode. The returned value will always
6749 be between 1 and the number of bits in MODE. */
6751 static int
6752 num_sign_bit_copies (x, mode)
6753 rtx x;
6754 enum machine_mode mode;
6756 enum rtx_code code = GET_CODE (x);
6757 int bitwidth;
6758 int num0, num1, result;
6759 unsigned HOST_WIDE_INT nonzero;
6760 rtx tem;
6762 /* If we weren't given a mode, use the mode of X. If the mode is still
6763 VOIDmode, we don't know anything. */
6765 if (mode == VOIDmode)
6766 mode = GET_MODE (x);
6768 if (mode == VOIDmode)
6769 return 1;
6771 bitwidth = GET_MODE_BITSIZE (mode);
6773 /* For a smaller object, just ignore the high bits. */
6774 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
6775 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
6776 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
6778 switch (code)
6780 case REG:
6782 if (reg_last_set_value[REGNO (x)] != 0
6783 && reg_last_set_mode[REGNO (x)] == mode
6784 && (reg_n_sets[REGNO (x)] == 1
6785 || reg_last_set_label[REGNO (x)] == label_tick)
6786 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6787 return reg_last_set_sign_bit_copies[REGNO (x)];
6789 tem = get_last_value (x);
6790 if (tem != 0)
6791 return num_sign_bit_copies (tem, mode);
6793 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6794 return reg_sign_bit_copies[REGNO (x)];
6795 break;
6797 case MEM:
6798 #ifdef LOAD_EXTEND_OP
6799 /* Some RISC machines sign-extend all loads of smaller than a word. */
6800 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
6801 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6802 #endif
6803 break;
6805 case CONST_INT:
6806 /* If the constant is negative, take its 1's complement and remask.
6807 Then see how many zero bits we have. */
6808 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
6809 if (bitwidth <= HOST_BITS_PER_WIDE_INT
6810 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6811 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
6813 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
6815 case SUBREG:
6816 /* If this is a SUBREG for a promoted object that is sign-extended
6817 and we are looking at it in a wider mode, we know that at least the
6818 high-order bits are known to be sign bit copies. */
6820 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6821 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
6822 num_sign_bit_copies (SUBREG_REG (x), mode));
6824 /* For a smaller object, just ignore the high bits. */
6825 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6827 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6828 return MAX (1, (num0
6829 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6830 - bitwidth)));
6833 #ifdef WORD_REGISTER_OPERATIONS
6834 /* For paradoxical SUBREGs on machines where all register operations
6835 affect the entire register, just look inside. Note that we are
6836 passing MODE to the recursive call, so the number of sign bit copies
6837 will remain relative to that mode, not the inner mode. */
6839 if (GET_MODE_SIZE (GET_MODE (x))
6840 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6841 return num_sign_bit_copies (SUBREG_REG (x), mode);
6842 #endif
6843 break;
6845 case SIGN_EXTRACT:
6846 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6847 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6848 break;
6850 case SIGN_EXTEND:
6851 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6852 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6854 case TRUNCATE:
6855 /* For a smaller object, just ignore the high bits. */
6856 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6857 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6858 - bitwidth)));
6860 case NOT:
6861 return num_sign_bit_copies (XEXP (x, 0), mode);
6863 case ROTATE: case ROTATERT:
6864 /* If we are rotating left by a number of bits less than the number
6865 of sign bit copies, we can just subtract that amount from the
6866 number. */
6867 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6868 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6870 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6871 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6872 : bitwidth - INTVAL (XEXP (x, 1))));
6874 break;
6876 case NEG:
6877 /* In general, this subtracts one sign bit copy. But if the value
6878 is known to be positive, the number of sign bit copies is the
6879 same as that of the input. Finally, if the input has just one bit
6880 that might be nonzero, all the bits are copies of the sign bit. */
6881 nonzero = nonzero_bits (XEXP (x, 0), mode);
6882 if (nonzero == 1)
6883 return bitwidth;
6885 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6886 if (num0 > 1
6887 && bitwidth <= HOST_BITS_PER_WIDE_INT
6888 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
6889 num0--;
6891 return num0;
6893 case IOR: case AND: case XOR:
6894 case SMIN: case SMAX: case UMIN: case UMAX:
6895 /* Logical operations will preserve the number of sign-bit copies.
6896 MIN and MAX operations always return one of the operands. */
6897 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6898 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6899 return MIN (num0, num1);
6901 case PLUS: case MINUS:
6902 /* For addition and subtraction, we can have a 1-bit carry. However,
6903 if we are subtracting 1 from a positive number, there will not
6904 be such a carry. Furthermore, if the positive number is known to
6905 be 0 or 1, we know the result is either -1 or 0. */
6907 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6908 && bitwidth <= HOST_BITS_PER_WIDE_INT)
6910 nonzero = nonzero_bits (XEXP (x, 0), mode);
6911 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6912 return (nonzero == 1 || nonzero == 0 ? bitwidth
6913 : bitwidth - floor_log2 (nonzero) - 1);
6916 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6917 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6918 return MAX (1, MIN (num0, num1) - 1);
6920 case MULT:
6921 /* The number of bits of the product is the sum of the number of
6922 bits of both terms. However, unless one of the terms if known
6923 to be positive, we must allow for an additional bit since negating
6924 a negative number can remove one sign bit copy. */
6926 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6927 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6929 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6930 if (result > 0
6931 && bitwidth <= HOST_BITS_PER_WIDE_INT
6932 && ((nonzero_bits (XEXP (x, 0), mode)
6933 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6934 && (nonzero_bits (XEXP (x, 1), mode)
6935 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6936 result--;
6938 return MAX (1, result);
6940 case UDIV:
6941 /* The result must be <= the first operand. */
6942 return num_sign_bit_copies (XEXP (x, 0), mode);
6944 case UMOD:
6945 /* The result must be <= the scond operand. */
6946 return num_sign_bit_copies (XEXP (x, 1), mode);
6948 case DIV:
6949 /* Similar to unsigned division, except that we have to worry about
6950 the case where the divisor is negative, in which case we have
6951 to add 1. */
6952 result = num_sign_bit_copies (XEXP (x, 0), mode);
6953 if (result > 1
6954 && bitwidth <= HOST_BITS_PER_WIDE_INT
6955 && (nonzero_bits (XEXP (x, 1), mode)
6956 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6957 result --;
6959 return result;
6961 case MOD:
6962 result = num_sign_bit_copies (XEXP (x, 1), mode);
6963 if (result > 1
6964 && bitwidth <= HOST_BITS_PER_WIDE_INT
6965 && (nonzero_bits (XEXP (x, 1), mode)
6966 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6967 result --;
6969 return result;
6971 case ASHIFTRT:
6972 /* Shifts by a constant add to the number of bits equal to the
6973 sign bit. */
6974 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6975 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6976 && INTVAL (XEXP (x, 1)) > 0)
6977 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6979 return num0;
6981 case ASHIFT:
6982 case LSHIFT:
6983 /* Left shifts destroy copies. */
6984 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6985 || INTVAL (XEXP (x, 1)) < 0
6986 || INTVAL (XEXP (x, 1)) >= bitwidth)
6987 return 1;
6989 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6990 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6992 case IF_THEN_ELSE:
6993 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6994 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6995 return MIN (num0, num1);
6997 #if STORE_FLAG_VALUE == -1
6998 case EQ: case NE: case GE: case GT: case LE: case LT:
6999 case GEU: case GTU: case LEU: case LTU:
7000 return bitwidth;
7001 #endif
7004 /* If we haven't been able to figure it out by one of the above rules,
7005 see if some of the high-order bits are known to be zero. If so,
7006 count those bits and return one less than that amount. If we can't
7007 safely compute the mask for this mode, always return BITWIDTH. */
7009 if (bitwidth > HOST_BITS_PER_WIDE_INT)
7010 return 1;
7012 nonzero = nonzero_bits (x, mode);
7013 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
7014 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
7017 /* Return the number of "extended" bits there are in X, when interpreted
7018 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7019 unsigned quantities, this is the number of high-order zero bits.
7020 For signed quantities, this is the number of copies of the sign bit
7021 minus 1. In both case, this function returns the number of "spare"
7022 bits. For example, if two quantities for which this function returns
7023 at least 1 are added, the addition is known not to overflow.
7025 This function will always return 0 unless called during combine, which
7026 implies that it must be called from a define_split. */
7029 extended_count (x, mode, unsignedp)
7030 rtx x;
7031 enum machine_mode mode;
7032 int unsignedp;
7034 if (nonzero_sign_valid == 0)
7035 return 0;
7037 return (unsignedp
7038 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7039 && (GET_MODE_BITSIZE (mode) - 1
7040 - floor_log2 (nonzero_bits (x, mode))))
7041 : num_sign_bit_copies (x, mode) - 1);
7044 /* This function is called from `simplify_shift_const' to merge two
7045 outer operations. Specifically, we have already found that we need
7046 to perform operation *POP0 with constant *PCONST0 at the outermost
7047 position. We would now like to also perform OP1 with constant CONST1
7048 (with *POP0 being done last).
7050 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7051 the resulting operation. *PCOMP_P is set to 1 if we would need to
7052 complement the innermost operand, otherwise it is unchanged.
7054 MODE is the mode in which the operation will be done. No bits outside
7055 the width of this mode matter. It is assumed that the width of this mode
7056 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
7058 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7059 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7060 result is simply *PCONST0.
7062 If the resulting operation cannot be expressed as one operation, we
7063 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7065 static int
7066 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7067 enum rtx_code *pop0;
7068 HOST_WIDE_INT *pconst0;
7069 enum rtx_code op1;
7070 HOST_WIDE_INT const1;
7071 enum machine_mode mode;
7072 int *pcomp_p;
7074 enum rtx_code op0 = *pop0;
7075 HOST_WIDE_INT const0 = *pconst0;
7077 const0 &= GET_MODE_MASK (mode);
7078 const1 &= GET_MODE_MASK (mode);
7080 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7081 if (op0 == AND)
7082 const1 &= const0;
7084 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7085 if OP0 is SET. */
7087 if (op1 == NIL || op0 == SET)
7088 return 1;
7090 else if (op0 == NIL)
7091 op0 = op1, const0 = const1;
7093 else if (op0 == op1)
7095 switch (op0)
7097 case AND:
7098 const0 &= const1;
7099 break;
7100 case IOR:
7101 const0 |= const1;
7102 break;
7103 case XOR:
7104 const0 ^= const1;
7105 break;
7106 case PLUS:
7107 const0 += const1;
7108 break;
7109 case NEG:
7110 op0 = NIL;
7111 break;
7115 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7116 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7117 return 0;
7119 /* If the two constants aren't the same, we can't do anything. The
7120 remaining six cases can all be done. */
7121 else if (const0 != const1)
7122 return 0;
7124 else
7125 switch (op0)
7127 case IOR:
7128 if (op1 == AND)
7129 /* (a & b) | b == b */
7130 op0 = SET;
7131 else /* op1 == XOR */
7132 /* (a ^ b) | b == a | b */
7134 break;
7136 case XOR:
7137 if (op1 == AND)
7138 /* (a & b) ^ b == (~a) & b */
7139 op0 = AND, *pcomp_p = 1;
7140 else /* op1 == IOR */
7141 /* (a | b) ^ b == a & ~b */
7142 op0 = AND, *pconst0 = ~ const0;
7143 break;
7145 case AND:
7146 if (op1 == IOR)
7147 /* (a | b) & b == b */
7148 op0 = SET;
7149 else /* op1 == XOR */
7150 /* (a ^ b) & b) == (~a) & b */
7151 *pcomp_p = 1;
7152 break;
7155 /* Check for NO-OP cases. */
7156 const0 &= GET_MODE_MASK (mode);
7157 if (const0 == 0
7158 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7159 op0 = NIL;
7160 else if (const0 == 0 && op0 == AND)
7161 op0 = SET;
7162 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7163 op0 = NIL;
7165 *pop0 = op0;
7166 *pconst0 = const0;
7168 return 1;
7171 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7172 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7173 that we started with.
7175 The shift is normally computed in the widest mode we find in VAROP, as
7176 long as it isn't a different number of words than RESULT_MODE. Exceptions
7177 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7179 static rtx
7180 simplify_shift_const (x, code, result_mode, varop, count)
7181 rtx x;
7182 enum rtx_code code;
7183 enum machine_mode result_mode;
7184 rtx varop;
7185 int count;
7187 enum rtx_code orig_code = code;
7188 int orig_count = count;
7189 enum machine_mode mode = result_mode;
7190 enum machine_mode shift_mode, tmode;
7191 int mode_words
7192 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7193 /* We form (outer_op (code varop count) (outer_const)). */
7194 enum rtx_code outer_op = NIL;
7195 HOST_WIDE_INT outer_const;
7196 rtx const_rtx;
7197 int complement_p = 0;
7198 rtx new;
7200 /* If we were given an invalid count, don't do anything except exactly
7201 what was requested. */
7203 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7205 if (x)
7206 return x;
7208 return gen_rtx (code, mode, varop, GEN_INT (count));
7211 /* Unless one of the branches of the `if' in this loop does a `continue',
7212 we will `break' the loop after the `if'. */
7214 while (count != 0)
7216 /* If we have an operand of (clobber (const_int 0)), just return that
7217 value. */
7218 if (GET_CODE (varop) == CLOBBER)
7219 return varop;
7221 /* If we discovered we had to complement VAROP, leave. Making a NOT
7222 here would cause an infinite loop. */
7223 if (complement_p)
7224 break;
7226 /* Convert ROTATETRT to ROTATE. */
7227 if (code == ROTATERT)
7228 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7230 /* Canonicalize LSHIFT to ASHIFT. */
7231 if (code == LSHIFT)
7232 code = ASHIFT;
7234 /* We need to determine what mode we will do the shift in. If the
7235 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7236 was originally done in. Otherwise, we can do it in MODE, the widest
7237 mode encountered. */
7238 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7240 /* Handle cases where the count is greater than the size of the mode
7241 minus 1. For ASHIFT, use the size minus one as the count (this can
7242 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7243 take the count modulo the size. For other shifts, the result is
7244 zero.
7246 Since these shifts are being produced by the compiler by combining
7247 multiple operations, each of which are defined, we know what the
7248 result is supposed to be. */
7250 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7252 if (code == ASHIFTRT)
7253 count = GET_MODE_BITSIZE (shift_mode) - 1;
7254 else if (code == ROTATE || code == ROTATERT)
7255 count %= GET_MODE_BITSIZE (shift_mode);
7256 else
7258 /* We can't simply return zero because there may be an
7259 outer op. */
7260 varop = const0_rtx;
7261 count = 0;
7262 break;
7266 /* Negative counts are invalid and should not have been made (a
7267 programmer-specified negative count should have been handled
7268 above). */
7269 else if (count < 0)
7270 abort ();
7272 /* An arithmetic right shift of a quantity known to be -1 or 0
7273 is a no-op. */
7274 if (code == ASHIFTRT
7275 && (num_sign_bit_copies (varop, shift_mode)
7276 == GET_MODE_BITSIZE (shift_mode)))
7278 count = 0;
7279 break;
7282 /* If we are doing an arithmetic right shift and discarding all but
7283 the sign bit copies, this is equivalent to doing a shift by the
7284 bitsize minus one. Convert it into that shift because it will often
7285 allow other simplifications. */
7287 if (code == ASHIFTRT
7288 && (count + num_sign_bit_copies (varop, shift_mode)
7289 >= GET_MODE_BITSIZE (shift_mode)))
7290 count = GET_MODE_BITSIZE (shift_mode) - 1;
7292 /* We simplify the tests below and elsewhere by converting
7293 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7294 `make_compound_operation' will convert it to a ASHIFTRT for
7295 those machines (such as Vax) that don't have a LSHIFTRT. */
7296 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
7297 && code == ASHIFTRT
7298 && ((nonzero_bits (varop, shift_mode)
7299 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7300 == 0))
7301 code = LSHIFTRT;
7303 switch (GET_CODE (varop))
7305 case SIGN_EXTEND:
7306 case ZERO_EXTEND:
7307 case SIGN_EXTRACT:
7308 case ZERO_EXTRACT:
7309 new = expand_compound_operation (varop);
7310 if (new != varop)
7312 varop = new;
7313 continue;
7315 break;
7317 case MEM:
7318 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7319 minus the width of a smaller mode, we can do this with a
7320 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7321 if ((code == ASHIFTRT || code == LSHIFTRT)
7322 && ! mode_dependent_address_p (XEXP (varop, 0))
7323 && ! MEM_VOLATILE_P (varop)
7324 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7325 MODE_INT, 1)) != BLKmode)
7327 #if BYTES_BIG_ENDIAN
7328 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7329 #else
7330 new = gen_rtx (MEM, tmode,
7331 plus_constant (XEXP (varop, 0),
7332 count / BITS_PER_UNIT));
7333 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7334 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7335 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7336 #endif
7337 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7338 : ZERO_EXTEND, mode, new);
7339 count = 0;
7340 continue;
7342 break;
7344 case USE:
7345 /* Similar to the case above, except that we can only do this if
7346 the resulting mode is the same as that of the underlying
7347 MEM and adjust the address depending on the *bits* endianness
7348 because of the way that bit-field extract insns are defined. */
7349 if ((code == ASHIFTRT || code == LSHIFTRT)
7350 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7351 MODE_INT, 1)) != BLKmode
7352 && tmode == GET_MODE (XEXP (varop, 0)))
7354 #if BITS_BIG_ENDIAN
7355 new = XEXP (varop, 0);
7356 #else
7357 new = copy_rtx (XEXP (varop, 0));
7358 SUBST (XEXP (new, 0),
7359 plus_constant (XEXP (new, 0),
7360 count / BITS_PER_UNIT));
7361 #endif
7363 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7364 : ZERO_EXTEND, mode, new);
7365 count = 0;
7366 continue;
7368 break;
7370 case SUBREG:
7371 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7372 the same number of words as what we've seen so far. Then store
7373 the widest mode in MODE. */
7374 if (subreg_lowpart_p (varop)
7375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7376 > GET_MODE_SIZE (GET_MODE (varop)))
7377 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7378 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7379 == mode_words))
7381 varop = SUBREG_REG (varop);
7382 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7383 mode = GET_MODE (varop);
7384 continue;
7386 break;
7388 case MULT:
7389 /* Some machines use MULT instead of ASHIFT because MULT
7390 is cheaper. But it is still better on those machines to
7391 merge two shifts into one. */
7392 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7393 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7395 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7396 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7397 continue;
7399 break;
7401 case UDIV:
7402 /* Similar, for when divides are cheaper. */
7403 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7404 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7406 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7407 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7408 continue;
7410 break;
7412 case ASHIFTRT:
7413 /* If we are extracting just the sign bit of an arithmetic right
7414 shift, that shift is not needed. */
7415 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7417 varop = XEXP (varop, 0);
7418 continue;
7421 /* ... fall through ... */
7423 case LSHIFTRT:
7424 case ASHIFT:
7425 case LSHIFT:
7426 case ROTATE:
7427 /* Here we have two nested shifts. The result is usually the
7428 AND of a new shift with a mask. We compute the result below. */
7429 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7430 && INTVAL (XEXP (varop, 1)) >= 0
7431 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7432 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7433 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7435 enum rtx_code first_code = GET_CODE (varop);
7436 int first_count = INTVAL (XEXP (varop, 1));
7437 unsigned HOST_WIDE_INT mask;
7438 rtx mask_rtx;
7439 rtx inner;
7441 if (first_code == LSHIFT)
7442 first_code = ASHIFT;
7444 /* We have one common special case. We can't do any merging if
7445 the inner code is an ASHIFTRT of a smaller mode. However, if
7446 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7447 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7448 we can convert it to
7449 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7450 This simplifies certain SIGN_EXTEND operations. */
7451 if (code == ASHIFT && first_code == ASHIFTRT
7452 && (GET_MODE_BITSIZE (result_mode)
7453 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7455 /* C3 has the low-order C1 bits zero. */
7457 mask = (GET_MODE_MASK (mode)
7458 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7460 varop = simplify_and_const_int (NULL_RTX, result_mode,
7461 XEXP (varop, 0), mask);
7462 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7463 varop, count);
7464 count = first_count;
7465 code = ASHIFTRT;
7466 continue;
7469 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7470 than C1 high-order bits equal to the sign bit, we can convert
7471 this to either an ASHIFT or a ASHIFTRT depending on the
7472 two counts.
7474 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7476 if (code == ASHIFTRT && first_code == ASHIFT
7477 && GET_MODE (varop) == shift_mode
7478 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7479 > first_count))
7481 count -= first_count;
7482 if (count < 0)
7483 count = - count, code = ASHIFT;
7484 varop = XEXP (varop, 0);
7485 continue;
7488 /* There are some cases we can't do. If CODE is ASHIFTRT,
7489 we can only do this if FIRST_CODE is also ASHIFTRT.
7491 We can't do the case when CODE is ROTATE and FIRST_CODE is
7492 ASHIFTRT.
7494 If the mode of this shift is not the mode of the outer shift,
7495 we can't do this if either shift is ASHIFTRT or ROTATE.
7497 Finally, we can't do any of these if the mode is too wide
7498 unless the codes are the same.
7500 Handle the case where the shift codes are the same
7501 first. */
7503 if (code == first_code)
7505 if (GET_MODE (varop) != result_mode
7506 && (code == ASHIFTRT || code == ROTATE))
7507 break;
7509 count += first_count;
7510 varop = XEXP (varop, 0);
7511 continue;
7514 if (code == ASHIFTRT
7515 || (code == ROTATE && first_code == ASHIFTRT)
7516 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7517 || (GET_MODE (varop) != result_mode
7518 && (first_code == ASHIFTRT || first_code == ROTATE
7519 || code == ROTATE)))
7520 break;
7522 /* To compute the mask to apply after the shift, shift the
7523 nonzero bits of the inner shift the same way the
7524 outer shift will. */
7526 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7528 mask_rtx
7529 = simplify_binary_operation (code, result_mode, mask_rtx,
7530 GEN_INT (count));
7532 /* Give up if we can't compute an outer operation to use. */
7533 if (mask_rtx == 0
7534 || GET_CODE (mask_rtx) != CONST_INT
7535 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7536 INTVAL (mask_rtx),
7537 result_mode, &complement_p))
7538 break;
7540 /* If the shifts are in the same direction, we add the
7541 counts. Otherwise, we subtract them. */
7542 if ((code == ASHIFTRT || code == LSHIFTRT)
7543 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7544 count += first_count;
7545 else
7546 count -= first_count;
7548 /* If COUNT is positive, the new shift is usually CODE,
7549 except for the two exceptions below, in which case it is
7550 FIRST_CODE. If the count is negative, FIRST_CODE should
7551 always be used */
7552 if (count > 0
7553 && ((first_code == ROTATE && code == ASHIFT)
7554 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7555 code = first_code;
7556 else if (count < 0)
7557 code = first_code, count = - count;
7559 varop = XEXP (varop, 0);
7560 continue;
7563 /* If we have (A << B << C) for any shift, we can convert this to
7564 (A << C << B). This wins if A is a constant. Only try this if
7565 B is not a constant. */
7567 else if (GET_CODE (varop) == code
7568 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7569 && 0 != (new
7570 = simplify_binary_operation (code, mode,
7571 XEXP (varop, 0),
7572 GEN_INT (count))))
7574 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7575 count = 0;
7576 continue;
7578 break;
7580 case NOT:
7581 /* Make this fit the case below. */
7582 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7583 GEN_INT (GET_MODE_MASK (mode)));
7584 continue;
7586 case IOR:
7587 case AND:
7588 case XOR:
7589 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7590 with C the size of VAROP - 1 and the shift is logical if
7591 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7592 we have an (le X 0) operation. If we have an arithmetic shift
7593 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7594 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7596 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7597 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7598 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7599 && (code == LSHIFTRT || code == ASHIFTRT)
7600 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7601 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7603 count = 0;
7604 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7605 const0_rtx);
7607 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7608 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7610 continue;
7613 /* If we have (shift (logical)), move the logical to the outside
7614 to allow it to possibly combine with another logical and the
7615 shift to combine with another shift. This also canonicalizes to
7616 what a ZERO_EXTRACT looks like. Also, some machines have
7617 (and (shift)) insns. */
7619 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7620 && (new = simplify_binary_operation (code, result_mode,
7621 XEXP (varop, 1),
7622 GEN_INT (count))) != 0
7623 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7624 INTVAL (new), result_mode, &complement_p))
7626 varop = XEXP (varop, 0);
7627 continue;
7630 /* If we can't do that, try to simplify the shift in each arm of the
7631 logical expression, make a new logical expression, and apply
7632 the inverse distributive law. */
7634 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
7635 XEXP (varop, 0), count);
7636 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
7637 XEXP (varop, 1), count);
7639 varop = gen_binary (GET_CODE (varop), GET_MODE (varop), lhs, rhs);
7640 varop = apply_distributive_law (varop);
7642 count = 0;
7644 break;
7646 case EQ:
7647 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7648 says that the sign bit can be tested, FOO has mode MODE, C is
7649 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7650 may be nonzero. */
7651 if (code == LSHIFT
7652 && XEXP (varop, 1) == const0_rtx
7653 && GET_MODE (XEXP (varop, 0)) == result_mode
7654 && count == GET_MODE_BITSIZE (result_mode) - 1
7655 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7656 && ((STORE_FLAG_VALUE
7657 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7658 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7659 && merge_outer_ops (&outer_op, &outer_const, XOR,
7660 (HOST_WIDE_INT) 1, result_mode,
7661 &complement_p))
7663 varop = XEXP (varop, 0);
7664 count = 0;
7665 continue;
7667 break;
7669 case NEG:
7670 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7671 than the number of bits in the mode is equivalent to A. */
7672 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7673 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
7675 varop = XEXP (varop, 0);
7676 count = 0;
7677 continue;
7680 /* NEG commutes with ASHIFT since it is multiplication. Move the
7681 NEG outside to allow shifts to combine. */
7682 if (code == ASHIFT
7683 && merge_outer_ops (&outer_op, &outer_const, NEG,
7684 (HOST_WIDE_INT) 0, result_mode,
7685 &complement_p))
7687 varop = XEXP (varop, 0);
7688 continue;
7690 break;
7692 case PLUS:
7693 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7694 is one less than the number of bits in the mode is
7695 equivalent to (xor A 1). */
7696 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7697 && XEXP (varop, 1) == constm1_rtx
7698 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7699 && merge_outer_ops (&outer_op, &outer_const, XOR,
7700 (HOST_WIDE_INT) 1, result_mode,
7701 &complement_p))
7703 count = 0;
7704 varop = XEXP (varop, 0);
7705 continue;
7708 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7709 that might be nonzero in BAR are those being shifted out and those
7710 bits are known zero in FOO, we can replace the PLUS with FOO.
7711 Similarly in the other operand order. This code occurs when
7712 we are computing the size of a variable-size array. */
7714 if ((code == ASHIFTRT || code == LSHIFTRT)
7715 && count < HOST_BITS_PER_WIDE_INT
7716 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7717 && (nonzero_bits (XEXP (varop, 1), result_mode)
7718 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
7720 varop = XEXP (varop, 0);
7721 continue;
7723 else if ((code == ASHIFTRT || code == LSHIFTRT)
7724 && count < HOST_BITS_PER_WIDE_INT
7725 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7726 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7727 >> count)
7728 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7729 & nonzero_bits (XEXP (varop, 1),
7730 result_mode)))
7732 varop = XEXP (varop, 1);
7733 continue;
7736 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7737 if (code == ASHIFT
7738 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7739 && (new = simplify_binary_operation (ASHIFT, result_mode,
7740 XEXP (varop, 1),
7741 GEN_INT (count))) != 0
7742 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7743 INTVAL (new), result_mode, &complement_p))
7745 varop = XEXP (varop, 0);
7746 continue;
7748 break;
7750 case MINUS:
7751 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7752 with C the size of VAROP - 1 and the shift is logical if
7753 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7754 we have a (gt X 0) operation. If the shift is arithmetic with
7755 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7756 we have a (neg (gt X 0)) operation. */
7758 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7759 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7760 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7761 && (code == LSHIFTRT || code == ASHIFTRT)
7762 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7763 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7764 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7766 count = 0;
7767 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7768 const0_rtx);
7770 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7771 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7773 continue;
7775 break;
7778 break;
7781 /* We need to determine what mode to do the shift in. If the shift is
7782 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7783 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7784 The code we care about is that of the shift that will actually be done,
7785 not the shift that was originally requested. */
7786 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7788 /* We have now finished analyzing the shift. The result should be
7789 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7790 OUTER_OP is non-NIL, it is an operation that needs to be applied
7791 to the result of the shift. OUTER_CONST is the relevant constant,
7792 but we must turn off all bits turned off in the shift.
7794 If we were passed a value for X, see if we can use any pieces of
7795 it. If not, make new rtx. */
7797 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7798 && GET_CODE (XEXP (x, 1)) == CONST_INT
7799 && INTVAL (XEXP (x, 1)) == count)
7800 const_rtx = XEXP (x, 1);
7801 else
7802 const_rtx = GEN_INT (count);
7804 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7805 && GET_MODE (XEXP (x, 0)) == shift_mode
7806 && SUBREG_REG (XEXP (x, 0)) == varop)
7807 varop = XEXP (x, 0);
7808 else if (GET_MODE (varop) != shift_mode)
7809 varop = gen_lowpart_for_combine (shift_mode, varop);
7811 /* If we can't make the SUBREG, try to return what we were given. */
7812 if (GET_CODE (varop) == CLOBBER)
7813 return x ? x : varop;
7815 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7816 if (new != 0)
7817 x = new;
7818 else
7820 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7821 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7823 SUBST (XEXP (x, 0), varop);
7824 SUBST (XEXP (x, 1), const_rtx);
7827 /* If we have an outer operation and we just made a shift, it is
7828 possible that we could have simplified the shift were it not
7829 for the outer operation. So try to do the simplification
7830 recursively. */
7832 if (outer_op != NIL && GET_CODE (x) == code
7833 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7834 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
7835 INTVAL (XEXP (x, 1)));
7837 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7838 turn off all the bits that the shift would have turned off. */
7839 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7840 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7841 GET_MODE_MASK (result_mode) >> orig_count);
7843 /* Do the remainder of the processing in RESULT_MODE. */
7844 x = gen_lowpart_for_combine (result_mode, x);
7846 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7847 operation. */
7848 if (complement_p)
7849 x = gen_unary (NOT, result_mode, x);
7851 if (outer_op != NIL)
7853 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7854 outer_const &= GET_MODE_MASK (result_mode);
7856 if (outer_op == AND)
7857 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7858 else if (outer_op == SET)
7859 /* This means that we have determined that the result is
7860 equivalent to a constant. This should be rare. */
7861 x = GEN_INT (outer_const);
7862 else if (GET_RTX_CLASS (outer_op) == '1')
7863 x = gen_unary (outer_op, result_mode, x);
7864 else
7865 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7868 return x;
7871 /* Like recog, but we receive the address of a pointer to a new pattern.
7872 We try to match the rtx that the pointer points to.
7873 If that fails, we may try to modify or replace the pattern,
7874 storing the replacement into the same pointer object.
7876 Modifications include deletion or addition of CLOBBERs.
7878 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7879 the CLOBBERs are placed.
7881 The value is the final insn code from the pattern ultimately matched,
7882 or -1. */
7884 static int
7885 recog_for_combine (pnewpat, insn, pnotes)
7886 rtx *pnewpat;
7887 rtx insn;
7888 rtx *pnotes;
7890 register rtx pat = *pnewpat;
7891 int insn_code_number;
7892 int num_clobbers_to_add = 0;
7893 int i;
7894 rtx notes = 0;
7896 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
7897 we use to indicate that something didn't match. If we find such a
7898 thing, force rejection. */
7899 if (GET_CODE (pat) == PARALLEL)
7900 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7901 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
7902 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
7903 return -1;
7905 /* Is the result of combination a valid instruction? */
7906 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7908 /* If it isn't, there is the possibility that we previously had an insn
7909 that clobbered some register as a side effect, but the combined
7910 insn doesn't need to do that. So try once more without the clobbers
7911 unless this represents an ASM insn. */
7913 if (insn_code_number < 0 && ! check_asm_operands (pat)
7914 && GET_CODE (pat) == PARALLEL)
7916 int pos;
7918 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7919 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7921 if (i != pos)
7922 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7923 pos++;
7926 SUBST_INT (XVECLEN (pat, 0), pos);
7928 if (pos == 1)
7929 pat = XVECEXP (pat, 0, 0);
7931 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7934 /* If we had any clobbers to add, make a new pattern than contains
7935 them. Then check to make sure that all of them are dead. */
7936 if (num_clobbers_to_add)
7938 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7939 gen_rtvec (GET_CODE (pat) == PARALLEL
7940 ? XVECLEN (pat, 0) + num_clobbers_to_add
7941 : num_clobbers_to_add + 1));
7943 if (GET_CODE (pat) == PARALLEL)
7944 for (i = 0; i < XVECLEN (pat, 0); i++)
7945 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7946 else
7947 XVECEXP (newpat, 0, 0) = pat;
7949 add_clobbers (newpat, insn_code_number);
7951 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7952 i < XVECLEN (newpat, 0); i++)
7954 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7955 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7956 return -1;
7957 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7958 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7960 pat = newpat;
7963 *pnewpat = pat;
7964 *pnotes = notes;
7966 return insn_code_number;
7969 /* Like gen_lowpart but for use by combine. In combine it is not possible
7970 to create any new pseudoregs. However, it is safe to create
7971 invalid memory addresses, because combine will try to recognize
7972 them and all they will do is make the combine attempt fail.
7974 If for some reason this cannot do its job, an rtx
7975 (clobber (const_int 0)) is returned.
7976 An insn containing that will not be recognized. */
7978 #undef gen_lowpart
7980 static rtx
7981 gen_lowpart_for_combine (mode, x)
7982 enum machine_mode mode;
7983 register rtx x;
7985 rtx result;
7987 if (GET_MODE (x) == mode)
7988 return x;
7990 /* We can only support MODE being wider than a word if X is a
7991 constant integer or has a mode the same size. */
7993 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7994 && ! ((GET_MODE (x) == VOIDmode
7995 && (GET_CODE (x) == CONST_INT
7996 || GET_CODE (x) == CONST_DOUBLE))
7997 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
7998 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8000 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8001 won't know what to do. So we will strip off the SUBREG here and
8002 process normally. */
8003 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8005 x = SUBREG_REG (x);
8006 if (GET_MODE (x) == mode)
8007 return x;
8010 result = gen_lowpart_common (mode, x);
8011 if (result)
8012 return result;
8014 if (GET_CODE (x) == MEM)
8016 register int offset = 0;
8017 rtx new;
8019 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8020 address. */
8021 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8022 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8024 /* If we want to refer to something bigger than the original memref,
8025 generate a perverse subreg instead. That will force a reload
8026 of the original memref X. */
8027 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8028 return gen_rtx (SUBREG, mode, x, 0);
8030 #if WORDS_BIG_ENDIAN
8031 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8032 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8033 #endif
8034 #if BYTES_BIG_ENDIAN
8035 /* Adjust the address so that the address-after-the-data
8036 is unchanged. */
8037 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8038 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8039 #endif
8040 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8041 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8042 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8043 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8044 return new;
8047 /* If X is a comparison operator, rewrite it in a new mode. This
8048 probably won't match, but may allow further simplifications. */
8049 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8050 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8052 /* If we couldn't simplify X any other way, just enclose it in a
8053 SUBREG. Normally, this SUBREG won't match, but some patterns may
8054 include an explicit SUBREG or we may simplify it further in combine. */
8055 else
8057 int word = 0;
8059 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8060 word = ((GET_MODE_SIZE (GET_MODE (x))
8061 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8062 / UNITS_PER_WORD);
8063 return gen_rtx (SUBREG, mode, x, word);
8067 /* Make an rtx expression. This is a subset of gen_rtx and only supports
8068 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8070 If the identical expression was previously in the insn (in the undobuf),
8071 it will be returned. Only if it is not found will a new expression
8072 be made. */
8074 /*VARARGS2*/
8075 static rtx
8076 gen_rtx_combine (va_alist)
8077 va_dcl
8079 va_list p;
8080 enum rtx_code code;
8081 enum machine_mode mode;
8082 int n_args;
8083 rtx args[3];
8084 int i, j;
8085 char *fmt;
8086 rtx rt;
8088 va_start (p);
8089 code = va_arg (p, enum rtx_code);
8090 mode = va_arg (p, enum machine_mode);
8091 n_args = GET_RTX_LENGTH (code);
8092 fmt = GET_RTX_FORMAT (code);
8094 if (n_args == 0 || n_args > 3)
8095 abort ();
8097 /* Get each arg and verify that it is supposed to be an expression. */
8098 for (j = 0; j < n_args; j++)
8100 if (*fmt++ != 'e')
8101 abort ();
8103 args[j] = va_arg (p, rtx);
8106 /* See if this is in undobuf. Be sure we don't use objects that came
8107 from another insn; this could produce circular rtl structures. */
8109 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8110 if (!undobuf.undo[i].is_int
8111 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8112 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
8114 for (j = 0; j < n_args; j++)
8115 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
8116 break;
8118 if (j == n_args)
8119 return undobuf.undo[i].old_contents.r;
8122 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8123 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8124 rt = rtx_alloc (code);
8125 PUT_MODE (rt, mode);
8126 XEXP (rt, 0) = args[0];
8127 if (n_args > 1)
8129 XEXP (rt, 1) = args[1];
8130 if (n_args > 2)
8131 XEXP (rt, 2) = args[2];
8133 return rt;
8136 /* These routines make binary and unary operations by first seeing if they
8137 fold; if not, a new expression is allocated. */
8139 static rtx
8140 gen_binary (code, mode, op0, op1)
8141 enum rtx_code code;
8142 enum machine_mode mode;
8143 rtx op0, op1;
8145 rtx result;
8146 rtx tem;
8148 if (GET_RTX_CLASS (code) == 'c'
8149 && (GET_CODE (op0) == CONST_INT
8150 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8151 tem = op0, op0 = op1, op1 = tem;
8153 if (GET_RTX_CLASS (code) == '<')
8155 enum machine_mode op_mode = GET_MODE (op0);
8156 if (op_mode == VOIDmode)
8157 op_mode = GET_MODE (op1);
8158 result = simplify_relational_operation (code, op_mode, op0, op1);
8160 else
8161 result = simplify_binary_operation (code, mode, op0, op1);
8163 if (result)
8164 return result;
8166 /* Put complex operands first and constants second. */
8167 if (GET_RTX_CLASS (code) == 'c'
8168 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8169 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8170 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8171 || (GET_CODE (op0) == SUBREG
8172 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8173 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8174 return gen_rtx_combine (code, mode, op1, op0);
8176 return gen_rtx_combine (code, mode, op0, op1);
8179 static rtx
8180 gen_unary (code, mode, op0)
8181 enum rtx_code code;
8182 enum machine_mode mode;
8183 rtx op0;
8185 rtx result = simplify_unary_operation (code, mode, op0, mode);
8187 if (result)
8188 return result;
8190 return gen_rtx_combine (code, mode, op0);
8193 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
8194 comparison code that will be tested.
8196 The result is a possibly different comparison code to use. *POP0 and
8197 *POP1 may be updated.
8199 It is possible that we might detect that a comparison is either always
8200 true or always false. However, we do not perform general constant
8201 folding in combine, so this knowledge isn't useful. Such tautologies
8202 should have been detected earlier. Hence we ignore all such cases. */
8204 static enum rtx_code
8205 simplify_comparison (code, pop0, pop1)
8206 enum rtx_code code;
8207 rtx *pop0;
8208 rtx *pop1;
8210 rtx op0 = *pop0;
8211 rtx op1 = *pop1;
8212 rtx tem, tem1;
8213 int i;
8214 enum machine_mode mode, tmode;
8216 /* Try a few ways of applying the same transformation to both operands. */
8217 while (1)
8219 /* If both operands are the same constant shift, see if we can ignore the
8220 shift. We can if the shift is a rotate or if the bits shifted out of
8221 this shift are known to be zero for both inputs and if the type of
8222 comparison is compatible with the shift. */
8223 if (GET_CODE (op0) == GET_CODE (op1)
8224 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8225 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8226 || ((GET_CODE (op0) == LSHIFTRT
8227 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8228 && (code != GT && code != LT && code != GE && code != LE))
8229 || (GET_CODE (op0) == ASHIFTRT
8230 && (code != GTU && code != LTU
8231 && code != GEU && code != GEU)))
8232 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8233 && INTVAL (XEXP (op0, 1)) >= 0
8234 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8235 && XEXP (op0, 1) == XEXP (op1, 1))
8237 enum machine_mode mode = GET_MODE (op0);
8238 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8239 int shift_count = INTVAL (XEXP (op0, 1));
8241 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8242 mask &= (mask >> shift_count) << shift_count;
8243 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8244 mask = (mask & (mask << shift_count)) >> shift_count;
8246 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8247 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
8248 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8249 else
8250 break;
8253 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8254 SUBREGs are of the same mode, and, in both cases, the AND would
8255 be redundant if the comparison was done in the narrower mode,
8256 do the comparison in the narrower mode (e.g., we are AND'ing with 1
8257 and the operand's possibly nonzero bits are 0xffffff01; in that case
8258 if we only care about QImode, we don't need the AND). This case
8259 occurs if the output mode of an scc insn is not SImode and
8260 STORE_FLAG_VALUE == 1 (e.g., the 386). */
8262 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8263 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8264 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8265 && GET_CODE (XEXP (op0, 0)) == SUBREG
8266 && GET_CODE (XEXP (op1, 0)) == SUBREG
8267 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
8268 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
8269 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
8270 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
8271 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8272 <= HOST_BITS_PER_WIDE_INT)
8273 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
8274 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8275 & ~ INTVAL (XEXP (op0, 1))) == 0
8276 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
8277 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
8278 & ~ INTVAL (XEXP (op1, 1))) == 0)
8280 op0 = SUBREG_REG (XEXP (op0, 0));
8281 op1 = SUBREG_REG (XEXP (op1, 0));
8283 /* the resulting comparison is always unsigned since we masked off
8284 the original sign bit. */
8285 code = unsigned_condition (code);
8287 else
8288 break;
8291 /* If the first operand is a constant, swap the operands and adjust the
8292 comparison code appropriately. */
8293 if (CONSTANT_P (op0))
8295 tem = op0, op0 = op1, op1 = tem;
8296 code = swap_condition (code);
8299 /* We now enter a loop during which we will try to simplify the comparison.
8300 For the most part, we only are concerned with comparisons with zero,
8301 but some things may really be comparisons with zero but not start
8302 out looking that way. */
8304 while (GET_CODE (op1) == CONST_INT)
8306 enum machine_mode mode = GET_MODE (op0);
8307 int mode_width = GET_MODE_BITSIZE (mode);
8308 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8309 int equality_comparison_p;
8310 int sign_bit_comparison_p;
8311 int unsigned_comparison_p;
8312 HOST_WIDE_INT const_op;
8314 /* We only want to handle integral modes. This catches VOIDmode,
8315 CCmode, and the floating-point modes. An exception is that we
8316 can handle VOIDmode if OP0 is a COMPARE or a comparison
8317 operation. */
8319 if (GET_MODE_CLASS (mode) != MODE_INT
8320 && ! (mode == VOIDmode
8321 && (GET_CODE (op0) == COMPARE
8322 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8323 break;
8325 /* Get the constant we are comparing against and turn off all bits
8326 not on in our mode. */
8327 const_op = INTVAL (op1);
8328 if (mode_width <= HOST_BITS_PER_WIDE_INT)
8329 const_op &= mask;
8331 /* If we are comparing against a constant power of two and the value
8332 being compared can only have that single bit nonzero (e.g., it was
8333 `and'ed with that bit), we can replace this with a comparison
8334 with zero. */
8335 if (const_op
8336 && (code == EQ || code == NE || code == GE || code == GEU
8337 || code == LT || code == LTU)
8338 && mode_width <= HOST_BITS_PER_WIDE_INT
8339 && exact_log2 (const_op) >= 0
8340 && nonzero_bits (op0, mode) == const_op)
8342 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8343 op1 = const0_rtx, const_op = 0;
8346 /* Similarly, if we are comparing a value known to be either -1 or
8347 0 with -1, change it to the opposite comparison against zero. */
8349 if (const_op == -1
8350 && (code == EQ || code == NE || code == GT || code == LE
8351 || code == GEU || code == LTU)
8352 && num_sign_bit_copies (op0, mode) == mode_width)
8354 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8355 op1 = const0_rtx, const_op = 0;
8358 /* Do some canonicalizations based on the comparison code. We prefer
8359 comparisons against zero and then prefer equality comparisons.
8360 If we can reduce the size of a constant, we will do that too. */
8362 switch (code)
8364 case LT:
8365 /* < C is equivalent to <= (C - 1) */
8366 if (const_op > 0)
8368 const_op -= 1;
8369 op1 = GEN_INT (const_op);
8370 code = LE;
8371 /* ... fall through to LE case below. */
8373 else
8374 break;
8376 case LE:
8377 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8378 if (const_op < 0)
8380 const_op += 1;
8381 op1 = GEN_INT (const_op);
8382 code = LT;
8385 /* If we are doing a <= 0 comparison on a value known to have
8386 a zero sign bit, we can replace this with == 0. */
8387 else if (const_op == 0
8388 && mode_width <= HOST_BITS_PER_WIDE_INT
8389 && (nonzero_bits (op0, mode)
8390 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8391 code = EQ;
8392 break;
8394 case GE:
8395 /* >= C is equivalent to > (C - 1). */
8396 if (const_op > 0)
8398 const_op -= 1;
8399 op1 = GEN_INT (const_op);
8400 code = GT;
8401 /* ... fall through to GT below. */
8403 else
8404 break;
8406 case GT:
8407 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8408 if (const_op < 0)
8410 const_op += 1;
8411 op1 = GEN_INT (const_op);
8412 code = GE;
8415 /* If we are doing a > 0 comparison on a value known to have
8416 a zero sign bit, we can replace this with != 0. */
8417 else if (const_op == 0
8418 && mode_width <= HOST_BITS_PER_WIDE_INT
8419 && (nonzero_bits (op0, mode)
8420 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8421 code = NE;
8422 break;
8424 case LTU:
8425 /* < C is equivalent to <= (C - 1). */
8426 if (const_op > 0)
8428 const_op -= 1;
8429 op1 = GEN_INT (const_op);
8430 code = LEU;
8431 /* ... fall through ... */
8434 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8435 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8437 const_op = 0, op1 = const0_rtx;
8438 code = GE;
8439 break;
8441 else
8442 break;
8444 case LEU:
8445 /* unsigned <= 0 is equivalent to == 0 */
8446 if (const_op == 0)
8447 code = EQ;
8449 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8450 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8452 const_op = 0, op1 = const0_rtx;
8453 code = GE;
8455 break;
8457 case GEU:
8458 /* >= C is equivalent to < (C - 1). */
8459 if (const_op > 1)
8461 const_op -= 1;
8462 op1 = GEN_INT (const_op);
8463 code = GTU;
8464 /* ... fall through ... */
8467 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8468 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8470 const_op = 0, op1 = const0_rtx;
8471 code = LT;
8473 else
8474 break;
8476 case GTU:
8477 /* unsigned > 0 is equivalent to != 0 */
8478 if (const_op == 0)
8479 code = NE;
8481 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8482 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8484 const_op = 0, op1 = const0_rtx;
8485 code = LT;
8487 break;
8490 /* Compute some predicates to simplify code below. */
8492 equality_comparison_p = (code == EQ || code == NE);
8493 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8494 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8495 || code == LEU);
8497 /* If this is a sign bit comparison and we can do arithmetic in
8498 MODE, say that we will only be needing the sign bit of OP0. */
8499 if (sign_bit_comparison_p
8500 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8501 op0 = force_to_mode (op0, mode,
8502 ((HOST_WIDE_INT) 1
8503 << (GET_MODE_BITSIZE (mode) - 1)),
8504 NULL_RTX);
8506 /* Now try cases based on the opcode of OP0. If none of the cases
8507 does a "continue", we exit this loop immediately after the
8508 switch. */
8510 switch (GET_CODE (op0))
8512 case ZERO_EXTRACT:
8513 /* If we are extracting a single bit from a variable position in
8514 a constant that has only a single bit set and are comparing it
8515 with zero, we can convert this into an equality comparison
8516 between the position and the location of the single bit. We can't
8517 do this if bit endian and we don't have an extzv since we then
8518 can't know what mode to use for the endianness adjustment. */
8520 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8521 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8522 && XEXP (op0, 1) == const1_rtx
8523 && equality_comparison_p && const_op == 0
8524 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8526 #if BITS_BIG_ENDIAN
8527 i = (GET_MODE_BITSIZE
8528 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8529 #endif
8531 op0 = XEXP (op0, 2);
8532 op1 = GEN_INT (i);
8533 const_op = i;
8535 /* Result is nonzero iff shift count is equal to I. */
8536 code = reverse_condition (code);
8537 continue;
8539 #endif
8541 /* ... fall through ... */
8543 case SIGN_EXTRACT:
8544 tem = expand_compound_operation (op0);
8545 if (tem != op0)
8547 op0 = tem;
8548 continue;
8550 break;
8552 case NOT:
8553 /* If testing for equality, we can take the NOT of the constant. */
8554 if (equality_comparison_p
8555 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8557 op0 = XEXP (op0, 0);
8558 op1 = tem;
8559 continue;
8562 /* If just looking at the sign bit, reverse the sense of the
8563 comparison. */
8564 if (sign_bit_comparison_p)
8566 op0 = XEXP (op0, 0);
8567 code = (code == GE ? LT : GE);
8568 continue;
8570 break;
8572 case NEG:
8573 /* If testing for equality, we can take the NEG of the constant. */
8574 if (equality_comparison_p
8575 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8577 op0 = XEXP (op0, 0);
8578 op1 = tem;
8579 continue;
8582 /* The remaining cases only apply to comparisons with zero. */
8583 if (const_op != 0)
8584 break;
8586 /* When X is ABS or is known positive,
8587 (neg X) is < 0 if and only if X != 0. */
8589 if (sign_bit_comparison_p
8590 && (GET_CODE (XEXP (op0, 0)) == ABS
8591 || (mode_width <= HOST_BITS_PER_WIDE_INT
8592 && (nonzero_bits (XEXP (op0, 0), mode)
8593 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
8595 op0 = XEXP (op0, 0);
8596 code = (code == LT ? NE : EQ);
8597 continue;
8600 /* If we have NEG of something whose two high-order bits are the
8601 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8602 if (num_sign_bit_copies (op0, mode) >= 2)
8604 op0 = XEXP (op0, 0);
8605 code = swap_condition (code);
8606 continue;
8608 break;
8610 case ROTATE:
8611 /* If we are testing equality and our count is a constant, we
8612 can perform the inverse operation on our RHS. */
8613 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8614 && (tem = simplify_binary_operation (ROTATERT, mode,
8615 op1, XEXP (op0, 1))) != 0)
8617 op0 = XEXP (op0, 0);
8618 op1 = tem;
8619 continue;
8622 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8623 a particular bit. Convert it to an AND of a constant of that
8624 bit. This will be converted into a ZERO_EXTRACT. */
8625 if (const_op == 0 && sign_bit_comparison_p
8626 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8627 && mode_width <= HOST_BITS_PER_WIDE_INT)
8629 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8630 ((HOST_WIDE_INT) 1
8631 << (mode_width - 1
8632 - INTVAL (XEXP (op0, 1)))));
8633 code = (code == LT ? NE : EQ);
8634 continue;
8637 /* ... fall through ... */
8639 case ABS:
8640 /* ABS is ignorable inside an equality comparison with zero. */
8641 if (const_op == 0 && equality_comparison_p)
8643 op0 = XEXP (op0, 0);
8644 continue;
8646 break;
8649 case SIGN_EXTEND:
8650 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8651 to (compare FOO CONST) if CONST fits in FOO's mode and we
8652 are either testing inequality or have an unsigned comparison
8653 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8654 if (! unsigned_comparison_p
8655 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8656 <= HOST_BITS_PER_WIDE_INT)
8657 && ((unsigned HOST_WIDE_INT) const_op
8658 < (((HOST_WIDE_INT) 1
8659 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8661 op0 = XEXP (op0, 0);
8662 continue;
8664 break;
8666 case SUBREG:
8667 /* Check for the case where we are comparing A - C1 with C2,
8668 both constants are smaller than 1/2 the maxium positive
8669 value in MODE, and the comparison is equality or unsigned.
8670 In that case, if A is either zero-extended to MODE or has
8671 sufficient sign bits so that the high-order bit in MODE
8672 is a copy of the sign in the inner mode, we can prove that it is
8673 safe to do the operation in the wider mode. This simplifies
8674 many range checks. */
8676 if (mode_width <= HOST_BITS_PER_WIDE_INT
8677 && subreg_lowpart_p (op0)
8678 && GET_CODE (SUBREG_REG (op0)) == PLUS
8679 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8680 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8681 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8682 < GET_MODE_MASK (mode) / 2)
8683 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
8684 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8685 GET_MODE (SUBREG_REG (op0)))
8686 & ~ GET_MODE_MASK (mode))
8687 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8688 GET_MODE (SUBREG_REG (op0)))
8689 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8690 - GET_MODE_BITSIZE (mode)))))
8692 op0 = SUBREG_REG (op0);
8693 continue;
8696 /* If the inner mode is narrower and we are extracting the low part,
8697 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8698 if (subreg_lowpart_p (op0)
8699 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8700 /* Fall through */ ;
8701 else
8702 break;
8704 /* ... fall through ... */
8706 case ZERO_EXTEND:
8707 if ((unsigned_comparison_p || equality_comparison_p)
8708 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8709 <= HOST_BITS_PER_WIDE_INT)
8710 && ((unsigned HOST_WIDE_INT) const_op
8711 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8713 op0 = XEXP (op0, 0);
8714 continue;
8716 break;
8718 case PLUS:
8719 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
8720 this for equality comparisons due to pathological cases involving
8721 overflows. */
8722 if (equality_comparison_p
8723 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8724 op1, XEXP (op0, 1))))
8726 op0 = XEXP (op0, 0);
8727 op1 = tem;
8728 continue;
8731 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8732 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8733 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8735 op0 = XEXP (XEXP (op0, 0), 0);
8736 code = (code == LT ? EQ : NE);
8737 continue;
8739 break;
8741 case MINUS:
8742 /* (eq (minus A B) C) -> (eq A (plus B C)) or
8743 (eq B (minus A C)), whichever simplifies. We can only do
8744 this for equality comparisons due to pathological cases involving
8745 overflows. */
8746 if (equality_comparison_p
8747 && 0 != (tem = simplify_binary_operation (PLUS, mode,
8748 XEXP (op0, 1), op1)))
8750 op0 = XEXP (op0, 0);
8751 op1 = tem;
8752 continue;
8755 if (equality_comparison_p
8756 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8757 XEXP (op0, 0), op1)))
8759 op0 = XEXP (op0, 1);
8760 op1 = tem;
8761 continue;
8764 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8765 of bits in X minus 1, is one iff X > 0. */
8766 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8767 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8768 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8769 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8771 op0 = XEXP (op0, 1);
8772 code = (code == GE ? LE : GT);
8773 continue;
8775 break;
8777 case XOR:
8778 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8779 if C is zero or B is a constant. */
8780 if (equality_comparison_p
8781 && 0 != (tem = simplify_binary_operation (XOR, mode,
8782 XEXP (op0, 1), op1)))
8784 op0 = XEXP (op0, 0);
8785 op1 = tem;
8786 continue;
8788 break;
8790 case EQ: case NE:
8791 case LT: case LTU: case LE: case LEU:
8792 case GT: case GTU: case GE: case GEU:
8793 /* We can't do anything if OP0 is a condition code value, rather
8794 than an actual data value. */
8795 if (const_op != 0
8796 #ifdef HAVE_cc0
8797 || XEXP (op0, 0) == cc0_rtx
8798 #endif
8799 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8800 break;
8802 /* Get the two operands being compared. */
8803 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8804 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8805 else
8806 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8808 /* Check for the cases where we simply want the result of the
8809 earlier test or the opposite of that result. */
8810 if (code == NE
8811 || (code == EQ && reversible_comparison_p (op0))
8812 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8813 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8814 && (STORE_FLAG_VALUE
8815 & (((HOST_WIDE_INT) 1
8816 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8817 && (code == LT
8818 || (code == GE && reversible_comparison_p (op0)))))
8820 code = (code == LT || code == NE
8821 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8822 op0 = tem, op1 = tem1;
8823 continue;
8825 break;
8827 case IOR:
8828 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8829 iff X <= 0. */
8830 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8831 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8832 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8834 op0 = XEXP (op0, 1);
8835 code = (code == GE ? GT : LE);
8836 continue;
8838 break;
8840 case AND:
8841 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8842 will be converted to a ZERO_EXTRACT later. */
8843 if (const_op == 0 && equality_comparison_p
8844 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8845 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8846 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8848 op0 = simplify_and_const_int
8849 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8850 XEXP (op0, 1),
8851 XEXP (XEXP (op0, 0), 1)),
8852 (HOST_WIDE_INT) 1);
8853 continue;
8856 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8857 zero and X is a comparison and C1 and C2 describe only bits set
8858 in STORE_FLAG_VALUE, we can compare with X. */
8859 if (const_op == 0 && equality_comparison_p
8860 && mode_width <= HOST_BITS_PER_WIDE_INT
8861 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8862 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8863 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8864 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8865 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8867 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8868 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8869 if ((~ STORE_FLAG_VALUE & mask) == 0
8870 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8871 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8872 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8874 op0 = XEXP (XEXP (op0, 0), 0);
8875 continue;
8879 /* If we are doing an equality comparison of an AND of a bit equal
8880 to the sign bit, replace this with a LT or GE comparison of
8881 the underlying value. */
8882 if (equality_comparison_p
8883 && const_op == 0
8884 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8885 && mode_width <= HOST_BITS_PER_WIDE_INT
8886 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8887 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8889 op0 = XEXP (op0, 0);
8890 code = (code == EQ ? GE : LT);
8891 continue;
8894 /* If this AND operation is really a ZERO_EXTEND from a narrower
8895 mode, the constant fits within that mode, and this is either an
8896 equality or unsigned comparison, try to do this comparison in
8897 the narrower mode. */
8898 if ((equality_comparison_p || unsigned_comparison_p)
8899 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8900 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8901 & GET_MODE_MASK (mode))
8902 + 1)) >= 0
8903 && const_op >> i == 0
8904 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8906 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8907 continue;
8909 break;
8911 case ASHIFT:
8912 case LSHIFT:
8913 /* If we have (compare (xshift FOO N) (const_int C)) and
8914 the high order N bits of FOO (N+1 if an inequality comparison)
8915 are known to be zero, we can do this by comparing FOO with C
8916 shifted right N bits so long as the low-order N bits of C are
8917 zero. */
8918 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8919 && INTVAL (XEXP (op0, 1)) >= 0
8920 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8921 < HOST_BITS_PER_WIDE_INT)
8922 && ((const_op
8923 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
8924 && mode_width <= HOST_BITS_PER_WIDE_INT
8925 && (nonzero_bits (XEXP (op0, 0), mode)
8926 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8927 + ! equality_comparison_p))) == 0)
8929 const_op >>= INTVAL (XEXP (op0, 1));
8930 op1 = GEN_INT (const_op);
8931 op0 = XEXP (op0, 0);
8932 continue;
8935 /* If we are doing a sign bit comparison, it means we are testing
8936 a particular bit. Convert it to the appropriate AND. */
8937 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8938 && mode_width <= HOST_BITS_PER_WIDE_INT)
8940 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8941 ((HOST_WIDE_INT) 1
8942 << (mode_width - 1
8943 - INTVAL (XEXP (op0, 1)))));
8944 code = (code == LT ? NE : EQ);
8945 continue;
8948 /* If this an equality comparison with zero and we are shifting
8949 the low bit to the sign bit, we can convert this to an AND of the
8950 low-order bit. */
8951 if (const_op == 0 && equality_comparison_p
8952 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8953 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8955 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8956 (HOST_WIDE_INT) 1);
8957 continue;
8959 break;
8961 case ASHIFTRT:
8962 /* If this is an equality comparison with zero, we can do this
8963 as a logical shift, which might be much simpler. */
8964 if (equality_comparison_p && const_op == 0
8965 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8967 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8968 XEXP (op0, 0),
8969 INTVAL (XEXP (op0, 1)));
8970 continue;
8973 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8974 do the comparison in a narrower mode. */
8975 if (! unsigned_comparison_p
8976 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8977 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8978 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8979 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8980 MODE_INT, 1)) != BLKmode
8981 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8982 || ((unsigned HOST_WIDE_INT) - const_op
8983 <= GET_MODE_MASK (tmode))))
8985 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8986 continue;
8989 /* ... fall through ... */
8990 case LSHIFTRT:
8991 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8992 the low order N bits of FOO are known to be zero, we can do this
8993 by comparing FOO with C shifted left N bits so long as no
8994 overflow occurs. */
8995 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8996 && INTVAL (XEXP (op0, 1)) >= 0
8997 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8998 && mode_width <= HOST_BITS_PER_WIDE_INT
8999 && (nonzero_bits (XEXP (op0, 0), mode)
9000 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
9001 && (const_op == 0
9002 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9003 < mode_width)))
9005 const_op <<= INTVAL (XEXP (op0, 1));
9006 op1 = GEN_INT (const_op);
9007 op0 = XEXP (op0, 0);
9008 continue;
9011 /* If we are using this shift to extract just the sign bit, we
9012 can replace this with an LT or GE comparison. */
9013 if (const_op == 0
9014 && (equality_comparison_p || sign_bit_comparison_p)
9015 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9016 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9018 op0 = XEXP (op0, 0);
9019 code = (code == NE || code == GT ? LT : GE);
9020 continue;
9022 break;
9025 break;
9028 /* Now make any compound operations involved in this comparison. Then,
9029 check for an outmost SUBREG on OP0 that isn't doing anything or is
9030 paradoxical. The latter case can only occur when it is known that the
9031 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9032 We can never remove a SUBREG for a non-equality comparison because the
9033 sign bit is in a different place in the underlying object. */
9035 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9036 op1 = make_compound_operation (op1, SET);
9038 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9039 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9040 && (code == NE || code == EQ)
9041 && ((GET_MODE_SIZE (GET_MODE (op0))
9042 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9044 op0 = SUBREG_REG (op0);
9045 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9048 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9049 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9050 && (code == NE || code == EQ)
9051 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9052 <= HOST_BITS_PER_WIDE_INT)
9053 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
9054 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9055 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9056 op1),
9057 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
9058 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9059 op0 = SUBREG_REG (op0), op1 = tem;
9061 /* We now do the opposite procedure: Some machines don't have compare
9062 insns in all modes. If OP0's mode is an integer mode smaller than a
9063 word and we can't do a compare in that mode, see if there is a larger
9064 mode for which we can do the compare. There are a number of cases in
9065 which we can use the wider mode. */
9067 mode = GET_MODE (op0);
9068 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9069 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9070 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9071 for (tmode = GET_MODE_WIDER_MODE (mode);
9072 (tmode != VOIDmode
9073 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
9074 tmode = GET_MODE_WIDER_MODE (tmode))
9075 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
9077 /* If the only nonzero bits in OP0 and OP1 are those in the
9078 narrower mode and this is an equality or unsigned comparison,
9079 we can use the wider mode. Similarly for sign-extended
9080 values and equality or signed comparisons. */
9081 if (((code == EQ || code == NE
9082 || code == GEU || code == GTU || code == LEU || code == LTU)
9083 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9084 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
9085 || ((code == EQ || code == NE
9086 || code == GE || code == GT || code == LE || code == LT)
9087 && (num_sign_bit_copies (op0, tmode)
9088 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
9089 && (num_sign_bit_copies (op1, tmode)
9090 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
9092 op0 = gen_lowpart_for_combine (tmode, op0);
9093 op1 = gen_lowpart_for_combine (tmode, op1);
9094 break;
9097 /* If this is a test for negative, we can make an explicit
9098 test of the sign bit. */
9100 if (op1 == const0_rtx && (code == LT || code == GE)
9101 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9103 op0 = gen_binary (AND, tmode,
9104 gen_lowpart_for_combine (tmode, op0),
9105 GEN_INT ((HOST_WIDE_INT) 1
9106 << (GET_MODE_BITSIZE (mode) - 1)));
9107 code = (code == LT) ? NE : EQ;
9108 break;
9112 *pop0 = op0;
9113 *pop1 = op1;
9115 return code;
9118 /* Return 1 if we know that X, a comparison operation, is not operating
9119 on a floating-point value or is EQ or NE, meaning that we can safely
9120 reverse it. */
9122 static int
9123 reversible_comparison_p (x)
9124 rtx x;
9126 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
9127 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9128 return 1;
9130 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9132 case MODE_INT:
9133 case MODE_PARTIAL_INT:
9134 case MODE_COMPLEX_INT:
9135 return 1;
9137 case MODE_CC:
9138 x = get_last_value (XEXP (x, 0));
9139 return (x && GET_CODE (x) == COMPARE
9140 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
9143 return 0;
9146 /* Utility function for following routine. Called when X is part of a value
9147 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9148 for each register mentioned. Similar to mention_regs in cse.c */
9150 static void
9151 update_table_tick (x)
9152 rtx x;
9154 register enum rtx_code code = GET_CODE (x);
9155 register char *fmt = GET_RTX_FORMAT (code);
9156 register int i;
9158 if (code == REG)
9160 int regno = REGNO (x);
9161 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9162 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9164 for (i = regno; i < endregno; i++)
9165 reg_last_set_table_tick[i] = label_tick;
9167 return;
9170 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9171 /* Note that we can't have an "E" in values stored; see
9172 get_last_value_validate. */
9173 if (fmt[i] == 'e')
9174 update_table_tick (XEXP (x, i));
9177 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9178 are saying that the register is clobbered and we no longer know its
9179 value. If INSN is zero, don't update reg_last_set; this is only permitted
9180 with VALUE also zero and is used to invalidate the register. */
9182 static void
9183 record_value_for_reg (reg, insn, value)
9184 rtx reg;
9185 rtx insn;
9186 rtx value;
9188 int regno = REGNO (reg);
9189 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9190 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9191 int i;
9193 /* If VALUE contains REG and we have a previous value for REG, substitute
9194 the previous value. */
9195 if (value && insn && reg_overlap_mentioned_p (reg, value))
9197 rtx tem;
9199 /* Set things up so get_last_value is allowed to see anything set up to
9200 our insn. */
9201 subst_low_cuid = INSN_CUID (insn);
9202 tem = get_last_value (reg);
9204 if (tem)
9205 value = replace_rtx (copy_rtx (value), reg, tem);
9208 /* For each register modified, show we don't know its value, that
9209 we don't know about its bitwise content, that its value has been
9210 updated, and that we don't know the location of the death of the
9211 register. */
9212 for (i = regno; i < endregno; i ++)
9214 if (insn)
9215 reg_last_set[i] = insn;
9216 reg_last_set_value[i] = 0;
9217 reg_last_set_mode[i] = 0;
9218 reg_last_set_nonzero_bits[i] = 0;
9219 reg_last_set_sign_bit_copies[i] = 0;
9220 reg_last_death[i] = 0;
9223 /* Mark registers that are being referenced in this value. */
9224 if (value)
9225 update_table_tick (value);
9227 /* Now update the status of each register being set.
9228 If someone is using this register in this block, set this register
9229 to invalid since we will get confused between the two lives in this
9230 basic block. This makes using this register always invalid. In cse, we
9231 scan the table to invalidate all entries using this register, but this
9232 is too much work for us. */
9234 for (i = regno; i < endregno; i++)
9236 reg_last_set_label[i] = label_tick;
9237 if (value && reg_last_set_table_tick[i] == label_tick)
9238 reg_last_set_invalid[i] = 1;
9239 else
9240 reg_last_set_invalid[i] = 0;
9243 /* The value being assigned might refer to X (like in "x++;"). In that
9244 case, we must replace it with (clobber (const_int 0)) to prevent
9245 infinite loops. */
9246 if (value && ! get_last_value_validate (&value,
9247 reg_last_set_label[regno], 0))
9249 value = copy_rtx (value);
9250 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9251 value = 0;
9254 /* For the main register being modified, update the value, the mode, the
9255 nonzero bits, and the number of sign bit copies. */
9257 reg_last_set_value[regno] = value;
9259 if (value)
9261 subst_low_cuid = INSN_CUID (insn);
9262 reg_last_set_mode[regno] = GET_MODE (reg);
9263 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9264 reg_last_set_sign_bit_copies[regno]
9265 = num_sign_bit_copies (value, GET_MODE (reg));
9269 /* Used for communication between the following two routines. */
9270 static rtx record_dead_insn;
9272 /* Called via note_stores from record_dead_and_set_regs to handle one
9273 SET or CLOBBER in an insn. */
9275 static void
9276 record_dead_and_set_regs_1 (dest, setter)
9277 rtx dest, setter;
9279 if (GET_CODE (dest) == REG)
9281 /* If we are setting the whole register, we know its value. Otherwise
9282 show that we don't know the value. We can handle SUBREG in
9283 some cases. */
9284 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9285 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9286 else if (GET_CODE (setter) == SET
9287 && GET_CODE (SET_DEST (setter)) == SUBREG
9288 && SUBREG_REG (SET_DEST (setter)) == dest
9289 && subreg_lowpart_p (SET_DEST (setter)))
9290 record_value_for_reg (dest, record_dead_insn,
9291 gen_lowpart_for_combine (GET_MODE (dest),
9292 SET_SRC (setter)));
9293 else
9294 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
9296 else if (GET_CODE (dest) == MEM
9297 /* Ignore pushes, they clobber nothing. */
9298 && ! push_operand (dest, GET_MODE (dest)))
9299 mem_last_set = INSN_CUID (record_dead_insn);
9302 /* Update the records of when each REG was most recently set or killed
9303 for the things done by INSN. This is the last thing done in processing
9304 INSN in the combiner loop.
9306 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9307 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9308 and also the similar information mem_last_set (which insn most recently
9309 modified memory) and last_call_cuid (which insn was the most recent
9310 subroutine call). */
9312 static void
9313 record_dead_and_set_regs (insn)
9314 rtx insn;
9316 register rtx link;
9317 int i;
9319 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9321 if (REG_NOTE_KIND (link) == REG_DEAD
9322 && GET_CODE (XEXP (link, 0)) == REG)
9324 int regno = REGNO (XEXP (link, 0));
9325 int endregno
9326 = regno + (regno < FIRST_PSEUDO_REGISTER
9327 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9328 : 1);
9330 for (i = regno; i < endregno; i++)
9331 reg_last_death[i] = insn;
9333 else if (REG_NOTE_KIND (link) == REG_INC)
9334 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
9337 if (GET_CODE (insn) == CALL_INSN)
9339 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9340 if (call_used_regs[i])
9342 reg_last_set_value[i] = 0;
9343 reg_last_set_mode[i] = 0;
9344 reg_last_set_nonzero_bits[i] = 0;
9345 reg_last_set_sign_bit_copies[i] = 0;
9346 reg_last_death[i] = 0;
9349 last_call_cuid = mem_last_set = INSN_CUID (insn);
9352 record_dead_insn = insn;
9353 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9356 /* Utility routine for the following function. Verify that all the registers
9357 mentioned in *LOC are valid when *LOC was part of a value set when
9358 label_tick == TICK. Return 0 if some are not.
9360 If REPLACE is non-zero, replace the invalid reference with
9361 (clobber (const_int 0)) and return 1. This replacement is useful because
9362 we often can get useful information about the form of a value (e.g., if
9363 it was produced by a shift that always produces -1 or 0) even though
9364 we don't know exactly what registers it was produced from. */
9366 static int
9367 get_last_value_validate (loc, tick, replace)
9368 rtx *loc;
9369 int tick;
9370 int replace;
9372 rtx x = *loc;
9373 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9374 int len = GET_RTX_LENGTH (GET_CODE (x));
9375 int i;
9377 if (GET_CODE (x) == REG)
9379 int regno = REGNO (x);
9380 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9381 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9382 int j;
9384 for (j = regno; j < endregno; j++)
9385 if (reg_last_set_invalid[j]
9386 /* If this is a pseudo-register that was only set once, it is
9387 always valid. */
9388 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9389 && reg_last_set_label[j] > tick))
9391 if (replace)
9392 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9393 return replace;
9396 return 1;
9399 for (i = 0; i < len; i++)
9400 if ((fmt[i] == 'e'
9401 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9402 /* Don't bother with these. They shouldn't occur anyway. */
9403 || fmt[i] == 'E')
9404 return 0;
9406 /* If we haven't found a reason for it to be invalid, it is valid. */
9407 return 1;
9410 /* Get the last value assigned to X, if known. Some registers
9411 in the value may be replaced with (clobber (const_int 0)) if their value
9412 is known longer known reliably. */
9414 static rtx
9415 get_last_value (x)
9416 rtx x;
9418 int regno;
9419 rtx value;
9421 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9422 then convert it to the desired mode. If this is a paradoxical SUBREG,
9423 we cannot predict what values the "extra" bits might have. */
9424 if (GET_CODE (x) == SUBREG
9425 && subreg_lowpart_p (x)
9426 && (GET_MODE_SIZE (GET_MODE (x))
9427 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9428 && (value = get_last_value (SUBREG_REG (x))) != 0)
9429 return gen_lowpart_for_combine (GET_MODE (x), value);
9431 if (GET_CODE (x) != REG)
9432 return 0;
9434 regno = REGNO (x);
9435 value = reg_last_set_value[regno];
9437 /* If we don't have a value or if it isn't for this basic block, return 0. */
9439 if (value == 0
9440 || (reg_n_sets[regno] != 1
9441 && reg_last_set_label[regno] != label_tick))
9442 return 0;
9444 /* If the value was set in a later insn that the ones we are processing,
9445 we can't use it even if the register was only set once, but make a quick
9446 check to see if the previous insn set it to something. This is commonly
9447 the case when the same pseudo is used by repeated insns. */
9449 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
9451 rtx insn, set;
9453 for (insn = prev_nonnote_insn (subst_insn);
9454 insn && INSN_CUID (insn) >= subst_low_cuid;
9455 insn = prev_nonnote_insn (insn))
9458 if (insn
9459 && (set = single_set (insn)) != 0
9460 && rtx_equal_p (SET_DEST (set), x))
9462 value = SET_SRC (set);
9464 /* Make sure that VALUE doesn't reference X. Replace any
9465 expliit references with a CLOBBER. If there are any remaining
9466 references (rare), don't use the value. */
9468 if (reg_mentioned_p (x, value))
9469 value = replace_rtx (copy_rtx (value), x,
9470 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9472 if (reg_overlap_mentioned_p (x, value))
9473 return 0;
9475 else
9476 return 0;
9479 /* If the value has all its registers valid, return it. */
9480 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9481 return value;
9483 /* Otherwise, make a copy and replace any invalid register with
9484 (clobber (const_int 0)). If that fails for some reason, return 0. */
9486 value = copy_rtx (value);
9487 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9488 return value;
9490 return 0;
9493 /* Return nonzero if expression X refers to a REG or to memory
9494 that is set in an instruction more recent than FROM_CUID. */
9496 static int
9497 use_crosses_set_p (x, from_cuid)
9498 register rtx x;
9499 int from_cuid;
9501 register char *fmt;
9502 register int i;
9503 register enum rtx_code code = GET_CODE (x);
9505 if (code == REG)
9507 register int regno = REGNO (x);
9508 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
9509 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9511 #ifdef PUSH_ROUNDING
9512 /* Don't allow uses of the stack pointer to be moved,
9513 because we don't know whether the move crosses a push insn. */
9514 if (regno == STACK_POINTER_REGNUM)
9515 return 1;
9516 #endif
9517 for (;regno < endreg; regno++)
9518 if (reg_last_set[regno]
9519 && INSN_CUID (reg_last_set[regno]) > from_cuid)
9520 return 1;
9521 return 0;
9524 if (code == MEM && mem_last_set > from_cuid)
9525 return 1;
9527 fmt = GET_RTX_FORMAT (code);
9529 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9531 if (fmt[i] == 'E')
9533 register int j;
9534 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9535 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9536 return 1;
9538 else if (fmt[i] == 'e'
9539 && use_crosses_set_p (XEXP (x, i), from_cuid))
9540 return 1;
9542 return 0;
9545 /* Define three variables used for communication between the following
9546 routines. */
9548 static int reg_dead_regno, reg_dead_endregno;
9549 static int reg_dead_flag;
9551 /* Function called via note_stores from reg_dead_at_p.
9553 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9554 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9556 static void
9557 reg_dead_at_p_1 (dest, x)
9558 rtx dest;
9559 rtx x;
9561 int regno, endregno;
9563 if (GET_CODE (dest) != REG)
9564 return;
9566 regno = REGNO (dest);
9567 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9568 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9570 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9571 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9574 /* Return non-zero if REG is known to be dead at INSN.
9576 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9577 referencing REG, it is dead. If we hit a SET referencing REG, it is
9578 live. Otherwise, see if it is live or dead at the start of the basic
9579 block we are in. */
9581 static int
9582 reg_dead_at_p (reg, insn)
9583 rtx reg;
9584 rtx insn;
9586 int block, i;
9588 /* Set variables for reg_dead_at_p_1. */
9589 reg_dead_regno = REGNO (reg);
9590 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9591 ? HARD_REGNO_NREGS (reg_dead_regno,
9592 GET_MODE (reg))
9593 : 1);
9595 reg_dead_flag = 0;
9597 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9598 beginning of function. */
9599 for (; insn && GET_CODE (insn) != CODE_LABEL;
9600 insn = prev_nonnote_insn (insn))
9602 note_stores (PATTERN (insn), reg_dead_at_p_1);
9603 if (reg_dead_flag)
9604 return reg_dead_flag == 1 ? 1 : 0;
9606 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9607 return 1;
9610 /* Get the basic block number that we were in. */
9611 if (insn == 0)
9612 block = 0;
9613 else
9615 for (block = 0; block < n_basic_blocks; block++)
9616 if (insn == basic_block_head[block])
9617 break;
9619 if (block == n_basic_blocks)
9620 return 0;
9623 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9624 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9625 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
9626 return 0;
9628 return 1;
9631 /* Remove register number REGNO from the dead registers list of INSN.
9633 Return the note used to record the death, if there was one. */
9636 remove_death (regno, insn)
9637 int regno;
9638 rtx insn;
9640 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9642 if (note)
9644 reg_n_deaths[regno]--;
9645 remove_note (insn, note);
9648 return note;
9651 /* For each register (hardware or pseudo) used within expression X, if its
9652 death is in an instruction with cuid between FROM_CUID (inclusive) and
9653 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9654 list headed by PNOTES.
9656 This is done when X is being merged by combination into TO_INSN. These
9657 notes will then be distributed as needed. */
9659 static void
9660 move_deaths (x, from_cuid, to_insn, pnotes)
9661 rtx x;
9662 int from_cuid;
9663 rtx to_insn;
9664 rtx *pnotes;
9666 register char *fmt;
9667 register int len, i;
9668 register enum rtx_code code = GET_CODE (x);
9670 if (code == REG)
9672 register int regno = REGNO (x);
9673 register rtx where_dead = reg_last_death[regno];
9675 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9676 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9678 rtx note = remove_death (regno, where_dead);
9680 /* It is possible for the call above to return 0. This can occur
9681 when reg_last_death points to I2 or I1 that we combined with.
9682 In that case make a new note.
9684 We must also check for the case where X is a hard register
9685 and NOTE is a death note for a range of hard registers
9686 including X. In that case, we must put REG_DEAD notes for
9687 the remaining registers in place of NOTE. */
9689 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
9690 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
9691 != GET_MODE_SIZE (GET_MODE (x))))
9693 int deadregno = REGNO (XEXP (note, 0));
9694 int deadend
9695 = (deadregno + HARD_REGNO_NREGS (deadregno,
9696 GET_MODE (XEXP (note, 0))));
9697 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9698 int i;
9700 for (i = deadregno; i < deadend; i++)
9701 if (i < regno || i >= ourend)
9702 REG_NOTES (where_dead)
9703 = gen_rtx (EXPR_LIST, REG_DEAD,
9704 gen_rtx (REG, word_mode, i),
9705 REG_NOTES (where_dead));
9708 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
9710 XEXP (note, 1) = *pnotes;
9711 *pnotes = note;
9713 else
9714 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9716 reg_n_deaths[regno]++;
9719 return;
9722 else if (GET_CODE (x) == SET)
9724 rtx dest = SET_DEST (x);
9726 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9728 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9729 that accesses one word of a multi-word item, some
9730 piece of everything register in the expression is used by
9731 this insn, so remove any old death. */
9733 if (GET_CODE (dest) == ZERO_EXTRACT
9734 || GET_CODE (dest) == STRICT_LOW_PART
9735 || (GET_CODE (dest) == SUBREG
9736 && (((GET_MODE_SIZE (GET_MODE (dest))
9737 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9738 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9739 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9741 move_deaths (dest, from_cuid, to_insn, pnotes);
9742 return;
9745 /* If this is some other SUBREG, we know it replaces the entire
9746 value, so use that as the destination. */
9747 if (GET_CODE (dest) == SUBREG)
9748 dest = SUBREG_REG (dest);
9750 /* If this is a MEM, adjust deaths of anything used in the address.
9751 For a REG (the only other possibility), the entire value is
9752 being replaced so the old value is not used in this insn. */
9754 if (GET_CODE (dest) == MEM)
9755 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9756 return;
9759 else if (GET_CODE (x) == CLOBBER)
9760 return;
9762 len = GET_RTX_LENGTH (code);
9763 fmt = GET_RTX_FORMAT (code);
9765 for (i = 0; i < len; i++)
9767 if (fmt[i] == 'E')
9769 register int j;
9770 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9771 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9773 else if (fmt[i] == 'e')
9774 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9778 /* Return 1 if X is the target of a bit-field assignment in BODY, the
9779 pattern of an insn. X must be a REG. */
9781 static int
9782 reg_bitfield_target_p (x, body)
9783 rtx x;
9784 rtx body;
9786 int i;
9788 if (GET_CODE (body) == SET)
9790 rtx dest = SET_DEST (body);
9791 rtx target;
9792 int regno, tregno, endregno, endtregno;
9794 if (GET_CODE (dest) == ZERO_EXTRACT)
9795 target = XEXP (dest, 0);
9796 else if (GET_CODE (dest) == STRICT_LOW_PART)
9797 target = SUBREG_REG (XEXP (dest, 0));
9798 else
9799 return 0;
9801 if (GET_CODE (target) == SUBREG)
9802 target = SUBREG_REG (target);
9804 if (GET_CODE (target) != REG)
9805 return 0;
9807 tregno = REGNO (target), regno = REGNO (x);
9808 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9809 return target == x;
9811 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9812 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9814 return endregno > tregno && regno < endtregno;
9817 else if (GET_CODE (body) == PARALLEL)
9818 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9819 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9820 return 1;
9822 return 0;
9825 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9826 as appropriate. I3 and I2 are the insns resulting from the combination
9827 insns including FROM (I2 may be zero).
9829 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9830 not need REG_DEAD notes because they are being substituted for. This
9831 saves searching in the most common cases.
9833 Each note in the list is either ignored or placed on some insns, depending
9834 on the type of note. */
9836 static void
9837 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9838 rtx notes;
9839 rtx from_insn;
9840 rtx i3, i2;
9841 rtx elim_i2, elim_i1;
9843 rtx note, next_note;
9844 rtx tem;
9846 for (note = notes; note; note = next_note)
9848 rtx place = 0, place2 = 0;
9850 /* If this NOTE references a pseudo register, ensure it references
9851 the latest copy of that register. */
9852 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9853 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9854 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9856 next_note = XEXP (note, 1);
9857 switch (REG_NOTE_KIND (note))
9859 case REG_UNUSED:
9860 /* If this register is set or clobbered in I3, put the note there
9861 unless there is one already. */
9862 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9864 if (! (GET_CODE (XEXP (note, 0)) == REG
9865 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9866 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9867 place = i3;
9869 /* Otherwise, if this register is used by I3, then this register
9870 now dies here, so we must put a REG_DEAD note here unless there
9871 is one already. */
9872 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9873 && ! (GET_CODE (XEXP (note, 0)) == REG
9874 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9875 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9877 PUT_REG_NOTE_KIND (note, REG_DEAD);
9878 place = i3;
9880 break;
9882 case REG_EQUAL:
9883 case REG_EQUIV:
9884 case REG_NONNEG:
9885 /* These notes say something about results of an insn. We can
9886 only support them if they used to be on I3 in which case they
9887 remain on I3. Otherwise they are ignored.
9889 If the note refers to an expression that is not a constant, we
9890 must also ignore the note since we cannot tell whether the
9891 equivalence is still true. It might be possible to do
9892 slightly better than this (we only have a problem if I2DEST
9893 or I1DEST is present in the expression), but it doesn't
9894 seem worth the trouble. */
9896 if (from_insn == i3
9897 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9898 place = i3;
9899 break;
9901 case REG_INC:
9902 case REG_NO_CONFLICT:
9903 case REG_LABEL:
9904 /* These notes say something about how a register is used. They must
9905 be present on any use of the register in I2 or I3. */
9906 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9907 place = i3;
9909 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9911 if (place)
9912 place2 = i2;
9913 else
9914 place = i2;
9916 break;
9918 case REG_WAS_0:
9919 /* It is too much trouble to try to see if this note is still
9920 correct in all situations. It is better to simply delete it. */
9921 break;
9923 case REG_RETVAL:
9924 /* If the insn previously containing this note still exists,
9925 put it back where it was. Otherwise move it to the previous
9926 insn. Adjust the corresponding REG_LIBCALL note. */
9927 if (GET_CODE (from_insn) != NOTE)
9928 place = from_insn;
9929 else
9931 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9932 place = prev_real_insn (from_insn);
9933 if (tem && place)
9934 XEXP (tem, 0) = place;
9936 break;
9938 case REG_LIBCALL:
9939 /* This is handled similarly to REG_RETVAL. */
9940 if (GET_CODE (from_insn) != NOTE)
9941 place = from_insn;
9942 else
9944 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9945 place = next_real_insn (from_insn);
9946 if (tem && place)
9947 XEXP (tem, 0) = place;
9949 break;
9951 case REG_DEAD:
9952 /* If the register is used as an input in I3, it dies there.
9953 Similarly for I2, if it is non-zero and adjacent to I3.
9955 If the register is not used as an input in either I3 or I2
9956 and it is not one of the registers we were supposed to eliminate,
9957 there are two possibilities. We might have a non-adjacent I2
9958 or we might have somehow eliminated an additional register
9959 from a computation. For example, we might have had A & B where
9960 we discover that B will always be zero. In this case we will
9961 eliminate the reference to A.
9963 In both cases, we must search to see if we can find a previous
9964 use of A and put the death note there. */
9966 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9967 place = i3;
9968 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9969 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9970 place = i2;
9972 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9973 break;
9975 /* If the register is used in both I2 and I3 and it dies in I3,
9976 we might have added another reference to it. If reg_n_refs
9977 was 2, bump it to 3. This has to be correct since the
9978 register must have been set somewhere. The reason this is
9979 done is because local-alloc.c treats 2 references as a
9980 special case. */
9982 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9983 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9984 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9985 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9987 if (place == 0)
9988 for (tem = prev_nonnote_insn (i3);
9989 tem && (GET_CODE (tem) == INSN
9990 || GET_CODE (tem) == CALL_INSN);
9991 tem = prev_nonnote_insn (tem))
9993 /* If the register is being set at TEM, see if that is all
9994 TEM is doing. If so, delete TEM. Otherwise, make this
9995 into a REG_UNUSED note instead. */
9996 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9998 rtx set = single_set (tem);
10000 /* Verify that it was the set, and not a clobber that
10001 modified the register. */
10003 if (set != 0 && ! side_effects_p (SET_SRC (set))
10004 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
10006 /* Move the notes and links of TEM elsewhere.
10007 This might delete other dead insns recursively.
10008 First set the pattern to something that won't use
10009 any register. */
10011 PATTERN (tem) = pc_rtx;
10013 distribute_notes (REG_NOTES (tem), tem, tem,
10014 NULL_RTX, NULL_RTX, NULL_RTX);
10015 distribute_links (LOG_LINKS (tem));
10017 PUT_CODE (tem, NOTE);
10018 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10019 NOTE_SOURCE_FILE (tem) = 0;
10021 else
10023 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10025 /* If there isn't already a REG_UNUSED note, put one
10026 here. */
10027 if (! find_regno_note (tem, REG_UNUSED,
10028 REGNO (XEXP (note, 0))))
10029 place = tem;
10030 break;
10033 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
10035 place = tem;
10036 break;
10040 /* If the register is set or already dead at PLACE, we needn't do
10041 anything with this note if it is still a REG_DEAD note.
10043 Note that we cannot use just `dead_or_set_p' here since we can
10044 convert an assignment to a register into a bit-field assignment.
10045 Therefore, we must also omit the note if the register is the
10046 target of a bitfield assignment. */
10048 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10050 int regno = REGNO (XEXP (note, 0));
10052 if (dead_or_set_p (place, XEXP (note, 0))
10053 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10055 /* Unless the register previously died in PLACE, clear
10056 reg_last_death. [I no longer understand why this is
10057 being done.] */
10058 if (reg_last_death[regno] != place)
10059 reg_last_death[regno] = 0;
10060 place = 0;
10062 else
10063 reg_last_death[regno] = place;
10065 /* If this is a death note for a hard reg that is occupying
10066 multiple registers, ensure that we are still using all
10067 parts of the object. If we find a piece of the object
10068 that is unused, we must add a USE for that piece before
10069 PLACE and put the appropriate REG_DEAD note on it.
10071 An alternative would be to put a REG_UNUSED for the pieces
10072 on the insn that set the register, but that can't be done if
10073 it is not in the same block. It is simpler, though less
10074 efficient, to add the USE insns. */
10076 if (place && regno < FIRST_PSEUDO_REGISTER
10077 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10079 int endregno
10080 = regno + HARD_REGNO_NREGS (regno,
10081 GET_MODE (XEXP (note, 0)));
10082 int all_used = 1;
10083 int i;
10085 for (i = regno; i < endregno; i++)
10086 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
10088 rtx piece = gen_rtx (REG, word_mode, i);
10089 rtx p;
10091 /* See if we already placed a USE note for this
10092 register in front of PLACE. */
10093 for (p = place;
10094 GET_CODE (PREV_INSN (p)) == INSN
10095 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10096 p = PREV_INSN (p))
10097 if (rtx_equal_p (piece,
10098 XEXP (PATTERN (PREV_INSN (p)), 0)))
10100 p = 0;
10101 break;
10104 if (p)
10106 rtx use_insn
10107 = emit_insn_before (gen_rtx (USE, VOIDmode,
10108 piece),
10110 REG_NOTES (use_insn)
10111 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10112 REG_NOTES (use_insn));
10115 all_used = 0;
10118 /* Check for the case where the register dying partially
10119 overlaps the register set by this insn. */
10120 if (all_used)
10121 for (i = regno; i < endregno; i++)
10122 if (dead_or_set_regno_p (place, i))
10124 all_used = 0;
10125 break;
10128 if (! all_used)
10130 /* Put only REG_DEAD notes for pieces that are
10131 still used and that are not already dead or set. */
10133 for (i = regno; i < endregno; i++)
10135 rtx piece = gen_rtx (REG, word_mode, i);
10137 if (reg_referenced_p (piece, PATTERN (place))
10138 && ! dead_or_set_p (place, piece)
10139 && ! reg_bitfield_target_p (piece,
10140 PATTERN (place)))
10141 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10142 piece,
10143 REG_NOTES (place));
10146 place = 0;
10150 break;
10152 default:
10153 /* Any other notes should not be present at this point in the
10154 compilation. */
10155 abort ();
10158 if (place)
10160 XEXP (note, 1) = REG_NOTES (place);
10161 REG_NOTES (place) = note;
10163 else if ((REG_NOTE_KIND (note) == REG_DEAD
10164 || REG_NOTE_KIND (note) == REG_UNUSED)
10165 && GET_CODE (XEXP (note, 0)) == REG)
10166 reg_n_deaths[REGNO (XEXP (note, 0))]--;
10168 if (place2)
10170 if ((REG_NOTE_KIND (note) == REG_DEAD
10171 || REG_NOTE_KIND (note) == REG_UNUSED)
10172 && GET_CODE (XEXP (note, 0)) == REG)
10173 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10175 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10176 XEXP (note, 0), REG_NOTES (place2));
10181 /* Similarly to above, distribute the LOG_LINKS that used to be present on
10182 I3, I2, and I1 to new locations. This is also called in one case to
10183 add a link pointing at I3 when I3's destination is changed. */
10185 static void
10186 distribute_links (links)
10187 rtx links;
10189 rtx link, next_link;
10191 for (link = links; link; link = next_link)
10193 rtx place = 0;
10194 rtx insn;
10195 rtx set, reg;
10197 next_link = XEXP (link, 1);
10199 /* If the insn that this link points to is a NOTE or isn't a single
10200 set, ignore it. In the latter case, it isn't clear what we
10201 can do other than ignore the link, since we can't tell which
10202 register it was for. Such links wouldn't be used by combine
10203 anyway.
10205 It is not possible for the destination of the target of the link to
10206 have been changed by combine. The only potential of this is if we
10207 replace I3, I2, and I1 by I3 and I2. But in that case the
10208 destination of I2 also remains unchanged. */
10210 if (GET_CODE (XEXP (link, 0)) == NOTE
10211 || (set = single_set (XEXP (link, 0))) == 0)
10212 continue;
10214 reg = SET_DEST (set);
10215 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10216 || GET_CODE (reg) == SIGN_EXTRACT
10217 || GET_CODE (reg) == STRICT_LOW_PART)
10218 reg = XEXP (reg, 0);
10220 /* A LOG_LINK is defined as being placed on the first insn that uses
10221 a register and points to the insn that sets the register. Start
10222 searching at the next insn after the target of the link and stop
10223 when we reach a set of the register or the end of the basic block.
10225 Note that this correctly handles the link that used to point from
10226 I3 to I2. Also note that not much searching is typically done here
10227 since most links don't point very far away. */
10229 for (insn = NEXT_INSN (XEXP (link, 0));
10230 (insn && (this_basic_block == n_basic_blocks - 1
10231 || basic_block_head[this_basic_block + 1] != insn));
10232 insn = NEXT_INSN (insn))
10233 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10234 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10236 if (reg_referenced_p (reg, PATTERN (insn)))
10237 place = insn;
10238 break;
10241 /* If we found a place to put the link, place it there unless there
10242 is already a link to the same insn as LINK at that point. */
10244 if (place)
10246 rtx link2;
10248 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10249 if (XEXP (link2, 0) == XEXP (link, 0))
10250 break;
10252 if (link2 == 0)
10254 XEXP (link, 1) = LOG_LINKS (place);
10255 LOG_LINKS (place) = link;
10261 void
10262 dump_combine_stats (file)
10263 FILE *file;
10265 fprintf
10266 (file,
10267 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10268 combine_attempts, combine_merges, combine_extras, combine_successes);
10271 void
10272 dump_combine_total_stats (file)
10273 FILE *file;
10275 fprintf
10276 (file,
10277 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10278 total_attempts, total_merges, total_extras, total_successes);