PR tree-optimization/43833
[official-gcc/alias-decl.git] / gcc / combine.c
blob0c934457d260d532f906cde972e3c556477fecb5
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information isn't
53 completely updated (however this is only a local issue since it is
54 regenerated before the next pass that uses it):
56 - reg_live_length is not updated
57 - reg_n_refs is not adjusted in the rare case when a register is
58 no longer required in a computation
59 - there are extremely rare cases (see distribute_notes) when a
60 REG_DEAD note is lost
61 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
62 removed because there is no way to know which register it was
63 linking
65 To simplify substitution, we combine only when the earlier insn(s)
66 consist of only a single assignment. To simplify updating afterward,
67 we never combine when a subroutine call appears in the middle.
69 Since we do not represent assignments to CC0 explicitly except when that
70 is all an insn does, there is no LOG_LINKS entry in an insn that uses
71 the condition code for the insn that set the condition code.
72 Fortunately, these two insns must be consecutive.
73 Therefore, every JUMP_INSN is taken to have an implicit logical link
74 to the preceding insn. This is not quite right, since non-jumps can
75 also use the condition code; but in practice such insns would not
76 combine anyway. */
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "tm.h"
82 #include "rtl.h"
83 #include "tree.h"
84 #include "tm_p.h"
85 #include "flags.h"
86 #include "regs.h"
87 #include "hard-reg-set.h"
88 #include "basic-block.h"
89 #include "insn-config.h"
90 #include "function.h"
91 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
92 #include "expr.h"
93 #include "insn-attr.h"
94 #include "recog.h"
95 #include "real.h"
96 #include "toplev.h"
97 #include "target.h"
98 #include "optabs.h"
99 #include "insn-codes.h"
100 #include "rtlhooks-def.h"
101 /* Include output.h for dump_file. */
102 #include "output.h"
103 #include "params.h"
104 #include "timevar.h"
105 #include "tree-pass.h"
106 #include "df.h"
107 #include "cgraph.h"
109 /* Number of attempts to combine instructions in this function. */
111 static int combine_attempts;
113 /* Number of attempts that got as far as substitution in this function. */
115 static int combine_merges;
117 /* Number of instructions combined with added SETs in this function. */
119 static int combine_extras;
121 /* Number of instructions combined in this function. */
123 static int combine_successes;
125 /* Totals over entire compilation. */
127 static int total_attempts, total_merges, total_extras, total_successes;
129 /* combine_instructions may try to replace the right hand side of the
130 second instruction with the value of an associated REG_EQUAL note
131 before throwing it at try_combine. That is problematic when there
132 is a REG_DEAD note for a register used in the old right hand side
133 and can cause distribute_notes to do wrong things. This is the
134 second instruction if it has been so modified, null otherwise. */
136 static rtx i2mod;
138 /* When I2MOD is nonnull, this is a copy of the old right hand side. */
140 static rtx i2mod_old_rhs;
142 /* When I2MOD is nonnull, this is a copy of the new right hand side. */
144 static rtx i2mod_new_rhs;
146 typedef struct reg_stat_struct {
147 /* Record last point of death of (hard or pseudo) register n. */
148 rtx last_death;
150 /* Record last point of modification of (hard or pseudo) register n. */
151 rtx last_set;
153 /* The next group of fields allows the recording of the last value assigned
154 to (hard or pseudo) register n. We use this information to see if an
155 operation being processed is redundant given a prior operation performed
156 on the register. For example, an `and' with a constant is redundant if
157 all the zero bits are already known to be turned off.
159 We use an approach similar to that used by cse, but change it in the
160 following ways:
162 (1) We do not want to reinitialize at each label.
163 (2) It is useful, but not critical, to know the actual value assigned
164 to a register. Often just its form is helpful.
166 Therefore, we maintain the following fields:
168 last_set_value the last value assigned
169 last_set_label records the value of label_tick when the
170 register was assigned
171 last_set_table_tick records the value of label_tick when a
172 value using the register is assigned
173 last_set_invalid set to nonzero when it is not valid
174 to use the value of this register in some
175 register's value
177 To understand the usage of these tables, it is important to understand
178 the distinction between the value in last_set_value being valid and
179 the register being validly contained in some other expression in the
180 table.
182 (The next two parameters are out of date).
184 reg_stat[i].last_set_value is valid if it is nonzero, and either
185 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
187 Register I may validly appear in any expression returned for the value
188 of another register if reg_n_sets[i] is 1. It may also appear in the
189 value for register J if reg_stat[j].last_set_invalid is zero, or
190 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
192 If an expression is found in the table containing a register which may
193 not validly appear in an expression, the register is replaced by
194 something that won't match, (clobber (const_int 0)). */
196 /* Record last value assigned to (hard or pseudo) register n. */
198 rtx last_set_value;
200 /* Record the value of label_tick when an expression involving register n
201 is placed in last_set_value. */
203 int last_set_table_tick;
205 /* Record the value of label_tick when the value for register n is placed in
206 last_set_value. */
208 int last_set_label;
210 /* These fields are maintained in parallel with last_set_value and are
211 used to store the mode in which the register was last set, the bits
212 that were known to be zero when it was last set, and the number of
213 sign bits copies it was known to have when it was last set. */
215 unsigned HOST_WIDE_INT last_set_nonzero_bits;
216 char last_set_sign_bit_copies;
217 ENUM_BITFIELD(machine_mode) last_set_mode : 8;
219 /* Set nonzero if references to register n in expressions should not be
220 used. last_set_invalid is set nonzero when this register is being
221 assigned to and last_set_table_tick == label_tick. */
223 char last_set_invalid;
225 /* Some registers that are set more than once and used in more than one
226 basic block are nevertheless always set in similar ways. For example,
227 a QImode register may be loaded from memory in two places on a machine
228 where byte loads zero extend.
230 We record in the following fields if a register has some leading bits
231 that are always equal to the sign bit, and what we know about the
232 nonzero bits of a register, specifically which bits are known to be
233 zero.
235 If an entry is zero, it means that we don't know anything special. */
237 unsigned char sign_bit_copies;
239 unsigned HOST_WIDE_INT nonzero_bits;
241 /* Record the value of the label_tick when the last truncation
242 happened. The field truncated_to_mode is only valid if
243 truncation_label == label_tick. */
245 int truncation_label;
247 /* Record the last truncation seen for this register. If truncation
248 is not a nop to this mode we might be able to save an explicit
249 truncation if we know that value already contains a truncated
250 value. */
252 ENUM_BITFIELD(machine_mode) truncated_to_mode : 8;
253 } reg_stat_type;
255 DEF_VEC_O(reg_stat_type);
256 DEF_VEC_ALLOC_O(reg_stat_type,heap);
258 static VEC(reg_stat_type,heap) *reg_stat;
260 /* Record the luid of the last insn that invalidated memory
261 (anything that writes memory, and subroutine calls, but not pushes). */
263 static int mem_last_set;
265 /* Record the luid of the last CALL_INSN
266 so we can tell whether a potential combination crosses any calls. */
268 static int last_call_luid;
270 /* When `subst' is called, this is the insn that is being modified
271 (by combining in a previous insn). The PATTERN of this insn
272 is still the old pattern partially modified and it should not be
273 looked at, but this may be used to examine the successors of the insn
274 to judge whether a simplification is valid. */
276 static rtx subst_insn;
278 /* This is the lowest LUID that `subst' is currently dealing with.
279 get_last_value will not return a value if the register was set at or
280 after this LUID. If not for this mechanism, we could get confused if
281 I2 or I1 in try_combine were an insn that used the old value of a register
282 to obtain a new value. In that case, we might erroneously get the
283 new value of the register when we wanted the old one. */
285 static int subst_low_luid;
287 /* This contains any hard registers that are used in newpat; reg_dead_at_p
288 must consider all these registers to be always live. */
290 static HARD_REG_SET newpat_used_regs;
292 /* This is an insn to which a LOG_LINKS entry has been added. If this
293 insn is the earlier than I2 or I3, combine should rescan starting at
294 that location. */
296 static rtx added_links_insn;
298 /* Basic block in which we are performing combines. */
299 static basic_block this_basic_block;
300 static bool optimize_this_for_speed_p;
303 /* Length of the currently allocated uid_insn_cost array. */
305 static int max_uid_known;
307 /* The following array records the insn_rtx_cost for every insn
308 in the instruction stream. */
310 static int *uid_insn_cost;
312 /* The following array records the LOG_LINKS for every insn in the
313 instruction stream as an INSN_LIST rtx. */
315 static rtx *uid_log_links;
317 #define INSN_COST(INSN) (uid_insn_cost[INSN_UID (INSN)])
318 #define LOG_LINKS(INSN) (uid_log_links[INSN_UID (INSN)])
320 /* Incremented for each basic block. */
322 static int label_tick;
324 /* Reset to label_tick for each extended basic block in scanning order. */
326 static int label_tick_ebb_start;
328 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
329 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
331 static enum machine_mode nonzero_bits_mode;
333 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
334 be safely used. It is zero while computing them and after combine has
335 completed. This former test prevents propagating values based on
336 previously set values, which can be incorrect if a variable is modified
337 in a loop. */
339 static int nonzero_sign_valid;
342 /* Record one modification to rtl structure
343 to be undone by storing old_contents into *where. */
345 enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE };
347 struct undo
349 struct undo *next;
350 enum undo_kind kind;
351 union { rtx r; int i; enum machine_mode m; } old_contents;
352 union { rtx *r; int *i; } where;
355 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
356 num_undo says how many are currently recorded.
358 other_insn is nonzero if we have modified some other insn in the process
359 of working on subst_insn. It must be verified too. */
361 struct undobuf
363 struct undo *undos;
364 struct undo *frees;
365 rtx other_insn;
368 static struct undobuf undobuf;
370 /* Number of times the pseudo being substituted for
371 was found and replaced. */
373 static int n_occurrences;
375 static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
376 enum machine_mode,
377 unsigned HOST_WIDE_INT,
378 unsigned HOST_WIDE_INT *);
379 static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
380 enum machine_mode,
381 unsigned int, unsigned int *);
382 static void do_SUBST (rtx *, rtx);
383 static void do_SUBST_INT (int *, int);
384 static void init_reg_last (void);
385 static void setup_incoming_promotions (rtx);
386 static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
387 static int cant_combine_insn_p (rtx);
388 static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
389 static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
390 static int contains_muldiv (rtx);
391 static rtx try_combine (rtx, rtx, rtx, int *);
392 static void undo_all (void);
393 static void undo_commit (void);
394 static rtx *find_split_point (rtx *, rtx, bool);
395 static rtx subst (rtx, rtx, rtx, int, int);
396 static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
397 static rtx simplify_if_then_else (rtx);
398 static rtx simplify_set (rtx);
399 static rtx simplify_logical (rtx);
400 static rtx expand_compound_operation (rtx);
401 static const_rtx expand_field_assignment (const_rtx);
402 static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
403 rtx, unsigned HOST_WIDE_INT, int, int, int);
404 static rtx extract_left_shift (rtx, int);
405 static rtx make_compound_operation (rtx, enum rtx_code);
406 static int get_pos_from_mask (unsigned HOST_WIDE_INT,
407 unsigned HOST_WIDE_INT *);
408 static rtx canon_reg_for_combine (rtx, rtx);
409 static rtx force_to_mode (rtx, enum machine_mode,
410 unsigned HOST_WIDE_INT, int);
411 static rtx if_then_else_cond (rtx, rtx *, rtx *);
412 static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
413 static int rtx_equal_for_field_assignment_p (rtx, rtx);
414 static rtx make_field_assignment (rtx);
415 static rtx apply_distributive_law (rtx);
416 static rtx distribute_and_simplify_rtx (rtx, int);
417 static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
418 unsigned HOST_WIDE_INT);
419 static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
420 unsigned HOST_WIDE_INT);
421 static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
422 HOST_WIDE_INT, enum machine_mode, int *);
423 static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
424 static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
425 int);
426 static int recog_for_combine (rtx *, rtx, rtx *);
427 static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
428 static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
429 static void update_table_tick (rtx);
430 static void record_value_for_reg (rtx, rtx, rtx);
431 static void check_promoted_subreg (rtx, rtx);
432 static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
433 static void record_dead_and_set_regs (rtx);
434 static int get_last_value_validate (rtx *, rtx, int, int);
435 static rtx get_last_value (const_rtx);
436 static int use_crosses_set_p (const_rtx, int);
437 static void reg_dead_at_p_1 (rtx, const_rtx, void *);
438 static int reg_dead_at_p (rtx, rtx);
439 static void move_deaths (rtx, rtx, int, rtx, rtx *);
440 static int reg_bitfield_target_p (rtx, rtx);
441 static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
442 static void distribute_links (rtx);
443 static void mark_used_regs_combine (rtx);
444 static void record_promoted_value (rtx, rtx);
445 static int unmentioned_reg_p_1 (rtx *, void *);
446 static bool unmentioned_reg_p (rtx, rtx);
447 static int record_truncated_value (rtx *, void *);
448 static void record_truncated_values (rtx *, void *);
449 static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
450 static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
453 /* It is not safe to use ordinary gen_lowpart in combine.
454 See comments in gen_lowpart_for_combine. */
455 #undef RTL_HOOKS_GEN_LOWPART
456 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
458 /* Our implementation of gen_lowpart never emits a new pseudo. */
459 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
460 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
462 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
463 #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
465 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
466 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
468 #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
469 #define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
471 static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
474 /* Try to split PATTERN found in INSN. This returns NULL_RTX if
475 PATTERN can not be split. Otherwise, it returns an insn sequence.
476 This is a wrapper around split_insns which ensures that the
477 reg_stat vector is made larger if the splitter creates a new
478 register. */
480 static rtx
481 combine_split_insns (rtx pattern, rtx insn)
483 rtx ret;
484 unsigned int nregs;
486 ret = split_insns (pattern, insn);
487 nregs = max_reg_num ();
488 if (nregs > VEC_length (reg_stat_type, reg_stat))
489 VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
490 return ret;
493 /* This is used by find_single_use to locate an rtx in LOC that
494 contains exactly one use of DEST, which is typically either a REG
495 or CC0. It returns a pointer to the innermost rtx expression
496 containing DEST. Appearances of DEST that are being used to
497 totally replace it are not counted. */
499 static rtx *
500 find_single_use_1 (rtx dest, rtx *loc)
502 rtx x = *loc;
503 enum rtx_code code = GET_CODE (x);
504 rtx *result = NULL;
505 rtx *this_result;
506 int i;
507 const char *fmt;
509 switch (code)
511 case CONST_INT:
512 case CONST:
513 case LABEL_REF:
514 case SYMBOL_REF:
515 case CONST_DOUBLE:
516 case CONST_VECTOR:
517 case CLOBBER:
518 return 0;
520 case SET:
521 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
522 of a REG that occupies all of the REG, the insn uses DEST if
523 it is mentioned in the destination or the source. Otherwise, we
524 need just check the source. */
525 if (GET_CODE (SET_DEST (x)) != CC0
526 && GET_CODE (SET_DEST (x)) != PC
527 && !REG_P (SET_DEST (x))
528 && ! (GET_CODE (SET_DEST (x)) == SUBREG
529 && REG_P (SUBREG_REG (SET_DEST (x)))
530 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
531 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
532 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
533 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
534 break;
536 return find_single_use_1 (dest, &SET_SRC (x));
538 case MEM:
539 case SUBREG:
540 return find_single_use_1 (dest, &XEXP (x, 0));
542 default:
543 break;
546 /* If it wasn't one of the common cases above, check each expression and
547 vector of this code. Look for a unique usage of DEST. */
549 fmt = GET_RTX_FORMAT (code);
550 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
552 if (fmt[i] == 'e')
554 if (dest == XEXP (x, i)
555 || (REG_P (dest) && REG_P (XEXP (x, i))
556 && REGNO (dest) == REGNO (XEXP (x, i))))
557 this_result = loc;
558 else
559 this_result = find_single_use_1 (dest, &XEXP (x, i));
561 if (result == NULL)
562 result = this_result;
563 else if (this_result)
564 /* Duplicate usage. */
565 return NULL;
567 else if (fmt[i] == 'E')
569 int j;
571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
573 if (XVECEXP (x, i, j) == dest
574 || (REG_P (dest)
575 && REG_P (XVECEXP (x, i, j))
576 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
577 this_result = loc;
578 else
579 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
581 if (result == NULL)
582 result = this_result;
583 else if (this_result)
584 return NULL;
589 return result;
593 /* See if DEST, produced in INSN, is used only a single time in the
594 sequel. If so, return a pointer to the innermost rtx expression in which
595 it is used.
597 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
599 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
600 care about REG_DEAD notes or LOG_LINKS.
602 Otherwise, we find the single use by finding an insn that has a
603 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
604 only referenced once in that insn, we know that it must be the first
605 and last insn referencing DEST. */
607 static rtx *
608 find_single_use (rtx dest, rtx insn, rtx *ploc)
610 basic_block bb;
611 rtx next;
612 rtx *result;
613 rtx link;
615 #ifdef HAVE_cc0
616 if (dest == cc0_rtx)
618 next = NEXT_INSN (insn);
619 if (next == 0
620 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
621 return 0;
623 result = find_single_use_1 (dest, &PATTERN (next));
624 if (result && ploc)
625 *ploc = next;
626 return result;
628 #endif
630 if (!REG_P (dest))
631 return 0;
633 bb = BLOCK_FOR_INSN (insn);
634 for (next = NEXT_INSN (insn);
635 next && BLOCK_FOR_INSN (next) == bb;
636 next = NEXT_INSN (next))
637 if (INSN_P (next) && dead_or_set_p (next, dest))
639 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
640 if (XEXP (link, 0) == insn)
641 break;
643 if (link)
645 result = find_single_use_1 (dest, &PATTERN (next));
646 if (ploc)
647 *ploc = next;
648 return result;
652 return 0;
655 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
656 insn. The substitution can be undone by undo_all. If INTO is already
657 set to NEWVAL, do not record this change. Because computing NEWVAL might
658 also call SUBST, we have to compute it before we put anything into
659 the undo table. */
661 static void
662 do_SUBST (rtx *into, rtx newval)
664 struct undo *buf;
665 rtx oldval = *into;
667 if (oldval == newval)
668 return;
670 /* We'd like to catch as many invalid transformations here as
671 possible. Unfortunately, there are way too many mode changes
672 that are perfectly valid, so we'd waste too much effort for
673 little gain doing the checks here. Focus on catching invalid
674 transformations involving integer constants. */
675 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
676 && CONST_INT_P (newval))
678 /* Sanity check that we're replacing oldval with a CONST_INT
679 that is a valid sign-extension for the original mode. */
680 gcc_assert (INTVAL (newval)
681 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
683 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
684 CONST_INT is not valid, because after the replacement, the
685 original mode would be gone. Unfortunately, we can't tell
686 when do_SUBST is called to replace the operand thereof, so we
687 perform this test on oldval instead, checking whether an
688 invalid replacement took place before we got here. */
689 gcc_assert (!(GET_CODE (oldval) == SUBREG
690 && CONST_INT_P (SUBREG_REG (oldval))));
691 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
692 && CONST_INT_P (XEXP (oldval, 0))));
695 if (undobuf.frees)
696 buf = undobuf.frees, undobuf.frees = buf->next;
697 else
698 buf = XNEW (struct undo);
700 buf->kind = UNDO_RTX;
701 buf->where.r = into;
702 buf->old_contents.r = oldval;
703 *into = newval;
705 buf->next = undobuf.undos, undobuf.undos = buf;
708 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
710 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
711 for the value of a HOST_WIDE_INT value (including CONST_INT) is
712 not safe. */
714 static void
715 do_SUBST_INT (int *into, int newval)
717 struct undo *buf;
718 int oldval = *into;
720 if (oldval == newval)
721 return;
723 if (undobuf.frees)
724 buf = undobuf.frees, undobuf.frees = buf->next;
725 else
726 buf = XNEW (struct undo);
728 buf->kind = UNDO_INT;
729 buf->where.i = into;
730 buf->old_contents.i = oldval;
731 *into = newval;
733 buf->next = undobuf.undos, undobuf.undos = buf;
736 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
738 /* Similar to SUBST, but just substitute the mode. This is used when
739 changing the mode of a pseudo-register, so that any other
740 references to the entry in the regno_reg_rtx array will change as
741 well. */
743 static void
744 do_SUBST_MODE (rtx *into, enum machine_mode newval)
746 struct undo *buf;
747 enum machine_mode oldval = GET_MODE (*into);
749 if (oldval == newval)
750 return;
752 if (undobuf.frees)
753 buf = undobuf.frees, undobuf.frees = buf->next;
754 else
755 buf = XNEW (struct undo);
757 buf->kind = UNDO_MODE;
758 buf->where.r = into;
759 buf->old_contents.m = oldval;
760 adjust_reg_mode (*into, newval);
762 buf->next = undobuf.undos, undobuf.undos = buf;
765 #define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE(&(INTO), (NEWVAL))
767 /* Subroutine of try_combine. Determine whether the combine replacement
768 patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
769 insn_rtx_cost that the original instruction sequence I1, I2, I3 and
770 undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX.
771 NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX. This
772 function returns false, if the costs of all instructions can be
773 estimated, and the replacements are more expensive than the original
774 sequence. */
776 static bool
777 combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat,
778 rtx newotherpat)
780 int i1_cost, i2_cost, i3_cost;
781 int new_i2_cost, new_i3_cost;
782 int old_cost, new_cost;
784 /* Lookup the original insn_rtx_costs. */
785 i2_cost = INSN_COST (i2);
786 i3_cost = INSN_COST (i3);
788 if (i1)
790 i1_cost = INSN_COST (i1);
791 old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
792 ? i1_cost + i2_cost + i3_cost : 0;
794 else
796 old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
797 i1_cost = 0;
800 /* Calculate the replacement insn_rtx_costs. */
801 new_i3_cost = insn_rtx_cost (newpat, optimize_this_for_speed_p);
802 if (newi2pat)
804 new_i2_cost = insn_rtx_cost (newi2pat, optimize_this_for_speed_p);
805 new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
806 ? new_i2_cost + new_i3_cost : 0;
808 else
810 new_cost = new_i3_cost;
811 new_i2_cost = 0;
814 if (undobuf.other_insn)
816 int old_other_cost, new_other_cost;
818 old_other_cost = INSN_COST (undobuf.other_insn);
819 new_other_cost = insn_rtx_cost (newotherpat, optimize_this_for_speed_p);
820 if (old_other_cost > 0 && new_other_cost > 0)
822 old_cost += old_other_cost;
823 new_cost += new_other_cost;
825 else
826 old_cost = 0;
829 /* Disallow this recombination if both new_cost and old_cost are
830 greater than zero, and new_cost is greater than old cost. */
831 if (old_cost > 0
832 && new_cost > old_cost)
834 if (dump_file)
836 if (i1)
838 fprintf (dump_file,
839 "rejecting combination of insns %d, %d and %d\n",
840 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
841 fprintf (dump_file, "original costs %d + %d + %d = %d\n",
842 i1_cost, i2_cost, i3_cost, old_cost);
844 else
846 fprintf (dump_file,
847 "rejecting combination of insns %d and %d\n",
848 INSN_UID (i2), INSN_UID (i3));
849 fprintf (dump_file, "original costs %d + %d = %d\n",
850 i2_cost, i3_cost, old_cost);
853 if (newi2pat)
855 fprintf (dump_file, "replacement costs %d + %d = %d\n",
856 new_i2_cost, new_i3_cost, new_cost);
858 else
859 fprintf (dump_file, "replacement cost %d\n", new_cost);
862 return false;
865 /* Update the uid_insn_cost array with the replacement costs. */
866 INSN_COST (i2) = new_i2_cost;
867 INSN_COST (i3) = new_i3_cost;
868 if (i1)
869 INSN_COST (i1) = 0;
871 return true;
875 /* Delete any insns that copy a register to itself. */
877 static void
878 delete_noop_moves (void)
880 rtx insn, next;
881 basic_block bb;
883 FOR_EACH_BB (bb)
885 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
887 next = NEXT_INSN (insn);
888 if (INSN_P (insn) && noop_move_p (insn))
890 if (dump_file)
891 fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
893 delete_insn_and_edges (insn);
900 /* Fill in log links field for all insns. */
902 static void
903 create_log_links (void)
905 basic_block bb;
906 rtx *next_use, insn;
907 df_ref *def_vec, *use_vec;
909 next_use = XCNEWVEC (rtx, max_reg_num ());
911 /* Pass through each block from the end, recording the uses of each
912 register and establishing log links when def is encountered.
913 Note that we do not clear next_use array in order to save time,
914 so we have to test whether the use is in the same basic block as def.
916 There are a few cases below when we do not consider the definition or
917 usage -- these are taken from original flow.c did. Don't ask me why it is
918 done this way; I don't know and if it works, I don't want to know. */
920 FOR_EACH_BB (bb)
922 FOR_BB_INSNS_REVERSE (bb, insn)
924 if (!NONDEBUG_INSN_P (insn))
925 continue;
927 /* Log links are created only once. */
928 gcc_assert (!LOG_LINKS (insn));
930 for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
932 df_ref def = *def_vec;
933 int regno = DF_REF_REGNO (def);
934 rtx use_insn;
936 if (!next_use[regno])
937 continue;
939 /* Do not consider if it is pre/post modification in MEM. */
940 if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
941 continue;
943 /* Do not make the log link for frame pointer. */
944 if ((regno == FRAME_POINTER_REGNUM
945 && (! reload_completed || frame_pointer_needed))
946 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
947 || (regno == HARD_FRAME_POINTER_REGNUM
948 && (! reload_completed || frame_pointer_needed))
949 #endif
950 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
951 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
952 #endif
954 continue;
956 use_insn = next_use[regno];
957 if (BLOCK_FOR_INSN (use_insn) == bb)
959 /* flow.c claimed:
961 We don't build a LOG_LINK for hard registers contained
962 in ASM_OPERANDs. If these registers get replaced,
963 we might wind up changing the semantics of the insn,
964 even if reload can make what appear to be valid
965 assignments later. */
966 if (regno >= FIRST_PSEUDO_REGISTER
967 || asm_noperands (PATTERN (use_insn)) < 0)
969 /* Don't add duplicate links between instructions. */
970 rtx links;
971 for (links = LOG_LINKS (use_insn); links;
972 links = XEXP (links, 1))
973 if (insn == XEXP (links, 0))
974 break;
976 if (!links)
977 LOG_LINKS (use_insn) =
978 alloc_INSN_LIST (insn, LOG_LINKS (use_insn));
981 next_use[regno] = NULL_RTX;
984 for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
986 df_ref use = *use_vec;
987 int regno = DF_REF_REGNO (use);
989 /* Do not consider the usage of the stack pointer
990 by function call. */
991 if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
992 continue;
994 next_use[regno] = insn;
999 free (next_use);
1002 /* Clear LOG_LINKS fields of insns. */
1004 static void
1005 clear_log_links (void)
1007 rtx insn;
1009 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1010 if (INSN_P (insn))
1011 free_INSN_LIST_list (&LOG_LINKS (insn));
1014 /* Main entry point for combiner. F is the first insn of the function.
1015 NREGS is the first unused pseudo-reg number.
1017 Return nonzero if the combiner has turned an indirect jump
1018 instruction into a direct jump. */
1019 static int
1020 combine_instructions (rtx f, unsigned int nregs)
1022 rtx insn, next;
1023 #ifdef HAVE_cc0
1024 rtx prev;
1025 #endif
1026 rtx links, nextlinks;
1027 rtx first;
1028 basic_block last_bb;
1030 int new_direct_jump_p = 0;
1032 for (first = f; first && !INSN_P (first); )
1033 first = NEXT_INSN (first);
1034 if (!first)
1035 return 0;
1037 combine_attempts = 0;
1038 combine_merges = 0;
1039 combine_extras = 0;
1040 combine_successes = 0;
1042 rtl_hooks = combine_rtl_hooks;
1044 VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
1046 init_recog_no_volatile ();
1048 /* Allocate array for insn info. */
1049 max_uid_known = get_max_uid ();
1050 uid_log_links = XCNEWVEC (rtx, max_uid_known + 1);
1051 uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1053 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1055 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1056 problems when, for example, we have j <<= 1 in a loop. */
1058 nonzero_sign_valid = 0;
1059 label_tick = label_tick_ebb_start = 1;
1061 /* Scan all SETs and see if we can deduce anything about what
1062 bits are known to be zero for some registers and how many copies
1063 of the sign bit are known to exist for those registers.
1065 Also set any known values so that we can use it while searching
1066 for what bits are known to be set. */
1068 setup_incoming_promotions (first);
1069 /* Allow the entry block and the first block to fall into the same EBB.
1070 Conceptually the incoming promotions are assigned to the entry block. */
1071 last_bb = ENTRY_BLOCK_PTR;
1073 create_log_links ();
1074 FOR_EACH_BB (this_basic_block)
1076 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1077 last_call_luid = 0;
1078 mem_last_set = -1;
1080 label_tick++;
1081 if (!single_pred_p (this_basic_block)
1082 || single_pred (this_basic_block) != last_bb)
1083 label_tick_ebb_start = label_tick;
1084 last_bb = this_basic_block;
1086 FOR_BB_INSNS (this_basic_block, insn)
1087 if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1089 subst_low_luid = DF_INSN_LUID (insn);
1090 subst_insn = insn;
1092 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
1093 insn);
1094 record_dead_and_set_regs (insn);
1096 #ifdef AUTO_INC_DEC
1097 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1098 if (REG_NOTE_KIND (links) == REG_INC)
1099 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1100 insn);
1101 #endif
1103 /* Record the current insn_rtx_cost of this instruction. */
1104 if (NONJUMP_INSN_P (insn))
1105 INSN_COST (insn) = insn_rtx_cost (PATTERN (insn),
1106 optimize_this_for_speed_p);
1107 if (dump_file)
1108 fprintf(dump_file, "insn_cost %d: %d\n",
1109 INSN_UID (insn), INSN_COST (insn));
1113 nonzero_sign_valid = 1;
1115 /* Now scan all the insns in forward order. */
1116 label_tick = label_tick_ebb_start = 1;
1117 init_reg_last ();
1118 setup_incoming_promotions (first);
1119 last_bb = ENTRY_BLOCK_PTR;
1121 FOR_EACH_BB (this_basic_block)
1123 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1124 last_call_luid = 0;
1125 mem_last_set = -1;
1127 label_tick++;
1128 if (!single_pred_p (this_basic_block)
1129 || single_pred (this_basic_block) != last_bb)
1130 label_tick_ebb_start = label_tick;
1131 last_bb = this_basic_block;
1133 rtl_profile_for_bb (this_basic_block);
1134 for (insn = BB_HEAD (this_basic_block);
1135 insn != NEXT_INSN (BB_END (this_basic_block));
1136 insn = next ? next : NEXT_INSN (insn))
1138 next = 0;
1139 if (NONDEBUG_INSN_P (insn))
1141 /* See if we know about function return values before this
1142 insn based upon SUBREG flags. */
1143 check_promoted_subreg (insn, PATTERN (insn));
1145 /* See if we can find hardregs and subreg of pseudos in
1146 narrower modes. This could help turning TRUNCATEs
1147 into SUBREGs. */
1148 note_uses (&PATTERN (insn), record_truncated_values, NULL);
1150 /* Try this insn with each insn it links back to. */
1152 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1153 if ((next = try_combine (insn, XEXP (links, 0),
1154 NULL_RTX, &new_direct_jump_p)) != 0)
1155 goto retry;
1157 /* Try each sequence of three linked insns ending with this one. */
1159 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1161 rtx link = XEXP (links, 0);
1163 /* If the linked insn has been replaced by a note, then there
1164 is no point in pursuing this chain any further. */
1165 if (NOTE_P (link))
1166 continue;
1168 for (nextlinks = LOG_LINKS (link);
1169 nextlinks;
1170 nextlinks = XEXP (nextlinks, 1))
1171 if ((next = try_combine (insn, link,
1172 XEXP (nextlinks, 0),
1173 &new_direct_jump_p)) != 0)
1174 goto retry;
1177 #ifdef HAVE_cc0
1178 /* Try to combine a jump insn that uses CC0
1179 with a preceding insn that sets CC0, and maybe with its
1180 logical predecessor as well.
1181 This is how we make decrement-and-branch insns.
1182 We need this special code because data flow connections
1183 via CC0 do not get entered in LOG_LINKS. */
1185 if (JUMP_P (insn)
1186 && (prev = prev_nonnote_insn (insn)) != 0
1187 && NONJUMP_INSN_P (prev)
1188 && sets_cc0_p (PATTERN (prev)))
1190 if ((next = try_combine (insn, prev,
1191 NULL_RTX, &new_direct_jump_p)) != 0)
1192 goto retry;
1194 for (nextlinks = LOG_LINKS (prev); nextlinks;
1195 nextlinks = XEXP (nextlinks, 1))
1196 if ((next = try_combine (insn, prev,
1197 XEXP (nextlinks, 0),
1198 &new_direct_jump_p)) != 0)
1199 goto retry;
1202 /* Do the same for an insn that explicitly references CC0. */
1203 if (NONJUMP_INSN_P (insn)
1204 && (prev = prev_nonnote_insn (insn)) != 0
1205 && NONJUMP_INSN_P (prev)
1206 && sets_cc0_p (PATTERN (prev))
1207 && GET_CODE (PATTERN (insn)) == SET
1208 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
1210 if ((next = try_combine (insn, prev,
1211 NULL_RTX, &new_direct_jump_p)) != 0)
1212 goto retry;
1214 for (nextlinks = LOG_LINKS (prev); nextlinks;
1215 nextlinks = XEXP (nextlinks, 1))
1216 if ((next = try_combine (insn, prev,
1217 XEXP (nextlinks, 0),
1218 &new_direct_jump_p)) != 0)
1219 goto retry;
1222 /* Finally, see if any of the insns that this insn links to
1223 explicitly references CC0. If so, try this insn, that insn,
1224 and its predecessor if it sets CC0. */
1225 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1226 if (NONJUMP_INSN_P (XEXP (links, 0))
1227 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
1228 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
1229 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
1230 && NONJUMP_INSN_P (prev)
1231 && sets_cc0_p (PATTERN (prev))
1232 && (next = try_combine (insn, XEXP (links, 0),
1233 prev, &new_direct_jump_p)) != 0)
1234 goto retry;
1235 #endif
1237 /* Try combining an insn with two different insns whose results it
1238 uses. */
1239 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1240 for (nextlinks = XEXP (links, 1); nextlinks;
1241 nextlinks = XEXP (nextlinks, 1))
1242 if ((next = try_combine (insn, XEXP (links, 0),
1243 XEXP (nextlinks, 0),
1244 &new_direct_jump_p)) != 0)
1245 goto retry;
1247 /* Try this insn with each REG_EQUAL note it links back to. */
1248 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1250 rtx set, note;
1251 rtx temp = XEXP (links, 0);
1252 if ((set = single_set (temp)) != 0
1253 && (note = find_reg_equal_equiv_note (temp)) != 0
1254 && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1255 /* Avoid using a register that may already been marked
1256 dead by an earlier instruction. */
1257 && ! unmentioned_reg_p (note, SET_SRC (set))
1258 && (GET_MODE (note) == VOIDmode
1259 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1260 : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
1262 /* Temporarily replace the set's source with the
1263 contents of the REG_EQUAL note. The insn will
1264 be deleted or recognized by try_combine. */
1265 rtx orig = SET_SRC (set);
1266 SET_SRC (set) = note;
1267 i2mod = temp;
1268 i2mod_old_rhs = copy_rtx (orig);
1269 i2mod_new_rhs = copy_rtx (note);
1270 next = try_combine (insn, i2mod, NULL_RTX,
1271 &new_direct_jump_p);
1272 i2mod = NULL_RTX;
1273 if (next)
1274 goto retry;
1275 SET_SRC (set) = orig;
1279 if (!NOTE_P (insn))
1280 record_dead_and_set_regs (insn);
1282 retry:
1288 default_rtl_profile ();
1289 clear_log_links ();
1290 clear_bb_flags ();
1291 new_direct_jump_p |= purge_all_dead_edges ();
1292 delete_noop_moves ();
1294 /* Clean up. */
1295 free (uid_log_links);
1296 free (uid_insn_cost);
1297 VEC_free (reg_stat_type, heap, reg_stat);
1300 struct undo *undo, *next;
1301 for (undo = undobuf.frees; undo; undo = next)
1303 next = undo->next;
1304 free (undo);
1306 undobuf.frees = 0;
1309 total_attempts += combine_attempts;
1310 total_merges += combine_merges;
1311 total_extras += combine_extras;
1312 total_successes += combine_successes;
1314 nonzero_sign_valid = 0;
1315 rtl_hooks = general_rtl_hooks;
1317 /* Make recognizer allow volatile MEMs again. */
1318 init_recog ();
1320 return new_direct_jump_p;
1323 /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1325 static void
1326 init_reg_last (void)
1328 unsigned int i;
1329 reg_stat_type *p;
1331 for (i = 0; VEC_iterate (reg_stat_type, reg_stat, i, p); ++i)
1332 memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1335 /* Set up any promoted values for incoming argument registers. */
1337 static void
1338 setup_incoming_promotions (rtx first)
1340 tree arg;
1341 bool strictly_local = false;
1343 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1344 arg = TREE_CHAIN (arg))
1346 rtx x, reg = DECL_INCOMING_RTL (arg);
1347 int uns1, uns3;
1348 enum machine_mode mode1, mode2, mode3, mode4;
1350 /* Only continue if the incoming argument is in a register. */
1351 if (!REG_P (reg))
1352 continue;
1354 /* Determine, if possible, whether all call sites of the current
1355 function lie within the current compilation unit. (This does
1356 take into account the exporting of a function via taking its
1357 address, and so forth.) */
1358 strictly_local = cgraph_local_info (current_function_decl)->local;
1360 /* The mode and signedness of the argument before any promotions happen
1361 (equal to the mode of the pseudo holding it at that stage). */
1362 mode1 = TYPE_MODE (TREE_TYPE (arg));
1363 uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1365 /* The mode and signedness of the argument after any source language and
1366 TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1367 mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1368 uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1370 /* The mode and signedness of the argument as it is actually passed,
1371 after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions. */
1372 mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
1373 TREE_TYPE (cfun->decl), 0);
1375 /* The mode of the register in which the argument is being passed. */
1376 mode4 = GET_MODE (reg);
1378 /* Eliminate sign extensions in the callee when:
1379 (a) A mode promotion has occurred; */
1380 if (mode1 == mode3)
1381 continue;
1382 /* (b) The mode of the register is the same as the mode of
1383 the argument as it is passed; */
1384 if (mode3 != mode4)
1385 continue;
1386 /* (c) There's no language level extension; */
1387 if (mode1 == mode2)
1389 /* (c.1) All callers are from the current compilation unit. If that's
1390 the case we don't have to rely on an ABI, we only have to know
1391 what we're generating right now, and we know that we will do the
1392 mode1 to mode2 promotion with the given sign. */
1393 else if (!strictly_local)
1394 continue;
1395 /* (c.2) The combination of the two promotions is useful. This is
1396 true when the signs match, or if the first promotion is unsigned.
1397 In the later case, (sign_extend (zero_extend x)) is the same as
1398 (zero_extend (zero_extend x)), so make sure to force UNS3 true. */
1399 else if (uns1)
1400 uns3 = true;
1401 else if (uns3)
1402 continue;
1404 /* Record that the value was promoted from mode1 to mode3,
1405 so that any sign extension at the head of the current
1406 function may be eliminated. */
1407 x = gen_rtx_CLOBBER (mode1, const0_rtx);
1408 x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1409 record_value_for_reg (reg, first, x);
1413 /* Called via note_stores. If X is a pseudo that is narrower than
1414 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1416 If we are setting only a portion of X and we can't figure out what
1417 portion, assume all bits will be used since we don't know what will
1418 be happening.
1420 Similarly, set how many bits of X are known to be copies of the sign bit
1421 at all locations in the function. This is the smallest number implied
1422 by any set of X. */
1424 static void
1425 set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1427 rtx insn = (rtx) data;
1428 unsigned int num;
1430 if (REG_P (x)
1431 && REGNO (x) >= FIRST_PSEUDO_REGISTER
1432 /* If this register is undefined at the start of the file, we can't
1433 say what its contents were. */
1434 && ! REGNO_REG_SET_P
1435 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
1436 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1438 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
1440 if (set == 0 || GET_CODE (set) == CLOBBER)
1442 rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1443 rsp->sign_bit_copies = 1;
1444 return;
1447 /* If this register is being initialized using itself, and the
1448 register is uninitialized in this basic block, and there are
1449 no LOG_LINKS which set the register, then part of the
1450 register is uninitialized. In that case we can't assume
1451 anything about the number of nonzero bits.
1453 ??? We could do better if we checked this in
1454 reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1455 could avoid making assumptions about the insn which initially
1456 sets the register, while still using the information in other
1457 insns. We would have to be careful to check every insn
1458 involved in the combination. */
1460 if (insn
1461 && reg_referenced_p (x, PATTERN (insn))
1462 && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1463 REGNO (x)))
1465 rtx link;
1467 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1469 if (dead_or_set_p (XEXP (link, 0), x))
1470 break;
1472 if (!link)
1474 rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1475 rsp->sign_bit_copies = 1;
1476 return;
1480 /* If this is a complex assignment, see if we can convert it into a
1481 simple assignment. */
1482 set = expand_field_assignment (set);
1484 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1485 set what we know about X. */
1487 if (SET_DEST (set) == x
1488 || (GET_CODE (SET_DEST (set)) == SUBREG
1489 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1490 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1491 && SUBREG_REG (SET_DEST (set)) == x))
1493 rtx src = SET_SRC (set);
1495 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1496 /* If X is narrower than a word and SRC is a non-negative
1497 constant that would appear negative in the mode of X,
1498 sign-extend it for use in reg_stat[].nonzero_bits because some
1499 machines (maybe most) will actually do the sign-extension
1500 and this is the conservative approach.
1502 ??? For 2.5, try to tighten up the MD files in this regard
1503 instead of this kludge. */
1505 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1506 && CONST_INT_P (src)
1507 && INTVAL (src) > 0
1508 && 0 != (INTVAL (src)
1509 & ((HOST_WIDE_INT) 1
1510 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1511 src = GEN_INT (INTVAL (src)
1512 | ((HOST_WIDE_INT) (-1)
1513 << GET_MODE_BITSIZE (GET_MODE (x))));
1514 #endif
1516 /* Don't call nonzero_bits if it cannot change anything. */
1517 if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1518 rsp->nonzero_bits |= nonzero_bits (src, nonzero_bits_mode);
1519 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1520 if (rsp->sign_bit_copies == 0
1521 || rsp->sign_bit_copies > num)
1522 rsp->sign_bit_copies = num;
1524 else
1526 rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1527 rsp->sign_bit_copies = 1;
1532 /* See if INSN can be combined into I3. PRED and SUCC are optionally
1533 insns that were previously combined into I3 or that will be combined
1534 into the merger of INSN and I3.
1536 Return 0 if the combination is not allowed for any reason.
1538 If the combination is allowed, *PDEST will be set to the single
1539 destination of INSN and *PSRC to the single source, and this function
1540 will return 1. */
1542 static int
1543 can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1544 rtx *pdest, rtx *psrc)
1546 int i;
1547 const_rtx set = 0;
1548 rtx src, dest;
1549 rtx p;
1550 #ifdef AUTO_INC_DEC
1551 rtx link;
1552 #endif
1553 int all_adjacent = (succ ? (next_active_insn (insn) == succ
1554 && next_active_insn (succ) == i3)
1555 : next_active_insn (insn) == i3);
1557 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1558 or a PARALLEL consisting of such a SET and CLOBBERs.
1560 If INSN has CLOBBER parallel parts, ignore them for our processing.
1561 By definition, these happen during the execution of the insn. When it
1562 is merged with another insn, all bets are off. If they are, in fact,
1563 needed and aren't also supplied in I3, they may be added by
1564 recog_for_combine. Otherwise, it won't match.
1566 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1567 note.
1569 Get the source and destination of INSN. If more than one, can't
1570 combine. */
1572 if (GET_CODE (PATTERN (insn)) == SET)
1573 set = PATTERN (insn);
1574 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1575 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1577 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1579 rtx elt = XVECEXP (PATTERN (insn), 0, i);
1581 switch (GET_CODE (elt))
1583 /* This is important to combine floating point insns
1584 for the SH4 port. */
1585 case USE:
1586 /* Combining an isolated USE doesn't make sense.
1587 We depend here on combinable_i3pat to reject them. */
1588 /* The code below this loop only verifies that the inputs of
1589 the SET in INSN do not change. We call reg_set_between_p
1590 to verify that the REG in the USE does not change between
1591 I3 and INSN.
1592 If the USE in INSN was for a pseudo register, the matching
1593 insn pattern will likely match any register; combining this
1594 with any other USE would only be safe if we knew that the
1595 used registers have identical values, or if there was
1596 something to tell them apart, e.g. different modes. For
1597 now, we forgo such complicated tests and simply disallow
1598 combining of USES of pseudo registers with any other USE. */
1599 if (REG_P (XEXP (elt, 0))
1600 && GET_CODE (PATTERN (i3)) == PARALLEL)
1602 rtx i3pat = PATTERN (i3);
1603 int i = XVECLEN (i3pat, 0) - 1;
1604 unsigned int regno = REGNO (XEXP (elt, 0));
1608 rtx i3elt = XVECEXP (i3pat, 0, i);
1610 if (GET_CODE (i3elt) == USE
1611 && REG_P (XEXP (i3elt, 0))
1612 && (REGNO (XEXP (i3elt, 0)) == regno
1613 ? reg_set_between_p (XEXP (elt, 0),
1614 PREV_INSN (insn), i3)
1615 : regno >= FIRST_PSEUDO_REGISTER))
1616 return 0;
1618 while (--i >= 0);
1620 break;
1622 /* We can ignore CLOBBERs. */
1623 case CLOBBER:
1624 break;
1626 case SET:
1627 /* Ignore SETs whose result isn't used but not those that
1628 have side-effects. */
1629 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1630 && insn_nothrow_p (insn)
1631 && !side_effects_p (elt))
1632 break;
1634 /* If we have already found a SET, this is a second one and
1635 so we cannot combine with this insn. */
1636 if (set)
1637 return 0;
1639 set = elt;
1640 break;
1642 default:
1643 /* Anything else means we can't combine. */
1644 return 0;
1648 if (set == 0
1649 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1650 so don't do anything with it. */
1651 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1652 return 0;
1654 else
1655 return 0;
1657 if (set == 0)
1658 return 0;
1660 set = expand_field_assignment (set);
1661 src = SET_SRC (set), dest = SET_DEST (set);
1663 /* Don't eliminate a store in the stack pointer. */
1664 if (dest == stack_pointer_rtx
1665 /* Don't combine with an insn that sets a register to itself if it has
1666 a REG_EQUAL note. This may be part of a LIBCALL sequence. */
1667 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1668 /* Can't merge an ASM_OPERANDS. */
1669 || GET_CODE (src) == ASM_OPERANDS
1670 /* Can't merge a function call. */
1671 || GET_CODE (src) == CALL
1672 /* Don't eliminate a function call argument. */
1673 || (CALL_P (i3)
1674 && (find_reg_fusage (i3, USE, dest)
1675 || (REG_P (dest)
1676 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1677 && global_regs[REGNO (dest)])))
1678 /* Don't substitute into an incremented register. */
1679 || FIND_REG_INC_NOTE (i3, dest)
1680 || (succ && FIND_REG_INC_NOTE (succ, dest))
1681 /* Don't substitute into a non-local goto, this confuses CFG. */
1682 || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1683 /* Make sure that DEST is not used after SUCC but before I3. */
1684 || (succ && ! all_adjacent
1685 && reg_used_between_p (dest, succ, i3))
1686 /* Make sure that the value that is to be substituted for the register
1687 does not use any registers whose values alter in between. However,
1688 If the insns are adjacent, a use can't cross a set even though we
1689 think it might (this can happen for a sequence of insns each setting
1690 the same destination; last_set of that register might point to
1691 a NOTE). If INSN has a REG_EQUIV note, the register is always
1692 equivalent to the memory so the substitution is valid even if there
1693 are intervening stores. Also, don't move a volatile asm or
1694 UNSPEC_VOLATILE across any other insns. */
1695 || (! all_adjacent
1696 && (((!MEM_P (src)
1697 || ! find_reg_note (insn, REG_EQUIV, src))
1698 && use_crosses_set_p (src, DF_INSN_LUID (insn)))
1699 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1700 || GET_CODE (src) == UNSPEC_VOLATILE))
1701 /* Don't combine across a CALL_INSN, because that would possibly
1702 change whether the life span of some REGs crosses calls or not,
1703 and it is a pain to update that information.
1704 Exception: if source is a constant, moving it later can't hurt.
1705 Accept that as a special case. */
1706 || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1707 return 0;
1709 /* DEST must either be a REG or CC0. */
1710 if (REG_P (dest))
1712 /* If register alignment is being enforced for multi-word items in all
1713 cases except for parameters, it is possible to have a register copy
1714 insn referencing a hard register that is not allowed to contain the
1715 mode being copied and which would not be valid as an operand of most
1716 insns. Eliminate this problem by not combining with such an insn.
1718 Also, on some machines we don't want to extend the life of a hard
1719 register. */
1721 if (REG_P (src)
1722 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1723 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1724 /* Don't extend the life of a hard register unless it is
1725 user variable (if we have few registers) or it can't
1726 fit into the desired register (meaning something special
1727 is going on).
1728 Also avoid substituting a return register into I3, because
1729 reload can't handle a conflict with constraints of other
1730 inputs. */
1731 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1732 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1733 return 0;
1735 else if (GET_CODE (dest) != CC0)
1736 return 0;
1739 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1740 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1741 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1743 /* Don't substitute for a register intended as a clobberable
1744 operand. */
1745 rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1746 if (rtx_equal_p (reg, dest))
1747 return 0;
1749 /* If the clobber represents an earlyclobber operand, we must not
1750 substitute an expression containing the clobbered register.
1751 As we do not analyze the constraint strings here, we have to
1752 make the conservative assumption. However, if the register is
1753 a fixed hard reg, the clobber cannot represent any operand;
1754 we leave it up to the machine description to either accept or
1755 reject use-and-clobber patterns. */
1756 if (!REG_P (reg)
1757 || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1758 || !fixed_regs[REGNO (reg)])
1759 if (reg_overlap_mentioned_p (reg, src))
1760 return 0;
1763 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1764 or not), reject, unless nothing volatile comes between it and I3 */
1766 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1768 /* Make sure succ doesn't contain a volatile reference. */
1769 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1770 return 0;
1772 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1773 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1774 return 0;
1777 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1778 to be an explicit register variable, and was chosen for a reason. */
1780 if (GET_CODE (src) == ASM_OPERANDS
1781 && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1782 return 0;
1784 /* If there are any volatile insns between INSN and I3, reject, because
1785 they might affect machine state. */
1787 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1788 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1789 return 0;
1791 /* If INSN contains an autoincrement or autodecrement, make sure that
1792 register is not used between there and I3, and not already used in
1793 I3 either. Neither must it be used in PRED or SUCC, if they exist.
1794 Also insist that I3 not be a jump; if it were one
1795 and the incremented register were spilled, we would lose. */
1797 #ifdef AUTO_INC_DEC
1798 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1799 if (REG_NOTE_KIND (link) == REG_INC
1800 && (JUMP_P (i3)
1801 || reg_used_between_p (XEXP (link, 0), insn, i3)
1802 || (pred != NULL_RTX
1803 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1804 || (succ != NULL_RTX
1805 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1806 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1807 return 0;
1808 #endif
1810 #ifdef HAVE_cc0
1811 /* Don't combine an insn that follows a CC0-setting insn.
1812 An insn that uses CC0 must not be separated from the one that sets it.
1813 We do, however, allow I2 to follow a CC0-setting insn if that insn
1814 is passed as I1; in that case it will be deleted also.
1815 We also allow combining in this case if all the insns are adjacent
1816 because that would leave the two CC0 insns adjacent as well.
1817 It would be more logical to test whether CC0 occurs inside I1 or I2,
1818 but that would be much slower, and this ought to be equivalent. */
1820 p = prev_nonnote_insn (insn);
1821 if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1822 && ! all_adjacent)
1823 return 0;
1824 #endif
1826 /* If we get here, we have passed all the tests and the combination is
1827 to be allowed. */
1829 *pdest = dest;
1830 *psrc = src;
1832 return 1;
1835 /* LOC is the location within I3 that contains its pattern or the component
1836 of a PARALLEL of the pattern. We validate that it is valid for combining.
1838 One problem is if I3 modifies its output, as opposed to replacing it
1839 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1840 so would produce an insn that is not equivalent to the original insns.
1842 Consider:
1844 (set (reg:DI 101) (reg:DI 100))
1845 (set (subreg:SI (reg:DI 101) 0) <foo>)
1847 This is NOT equivalent to:
1849 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1850 (set (reg:DI 101) (reg:DI 100))])
1852 Not only does this modify 100 (in which case it might still be valid
1853 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1855 We can also run into a problem if I2 sets a register that I1
1856 uses and I1 gets directly substituted into I3 (not via I2). In that
1857 case, we would be getting the wrong value of I2DEST into I3, so we
1858 must reject the combination. This case occurs when I2 and I1 both
1859 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1860 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1861 of a SET must prevent combination from occurring.
1863 Before doing the above check, we first try to expand a field assignment
1864 into a set of logical operations.
1866 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1867 we place a register that is both set and used within I3. If more than one
1868 such register is detected, we fail.
1870 Return 1 if the combination is valid, zero otherwise. */
1872 static int
1873 combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1874 int i1_not_in_src, rtx *pi3dest_killed)
1876 rtx x = *loc;
1878 if (GET_CODE (x) == SET)
1880 rtx set = x ;
1881 rtx dest = SET_DEST (set);
1882 rtx src = SET_SRC (set);
1883 rtx inner_dest = dest;
1884 rtx subdest;
1886 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1887 || GET_CODE (inner_dest) == SUBREG
1888 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1889 inner_dest = XEXP (inner_dest, 0);
1891 /* Check for the case where I3 modifies its output, as discussed
1892 above. We don't want to prevent pseudos from being combined
1893 into the address of a MEM, so only prevent the combination if
1894 i1 or i2 set the same MEM. */
1895 if ((inner_dest != dest &&
1896 (!MEM_P (inner_dest)
1897 || rtx_equal_p (i2dest, inner_dest)
1898 || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1899 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1900 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1902 /* This is the same test done in can_combine_p except we can't test
1903 all_adjacent; we don't have to, since this instruction will stay
1904 in place, thus we are not considering increasing the lifetime of
1905 INNER_DEST.
1907 Also, if this insn sets a function argument, combining it with
1908 something that might need a spill could clobber a previous
1909 function argument; the all_adjacent test in can_combine_p also
1910 checks this; here, we do a more specific test for this case. */
1912 || (REG_P (inner_dest)
1913 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1914 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1915 GET_MODE (inner_dest))))
1916 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1917 return 0;
1919 /* If DEST is used in I3, it is being killed in this insn, so
1920 record that for later. We have to consider paradoxical
1921 subregs here, since they kill the whole register, but we
1922 ignore partial subregs, STRICT_LOW_PART, etc.
1923 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1924 STACK_POINTER_REGNUM, since these are always considered to be
1925 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1926 subdest = dest;
1927 if (GET_CODE (subdest) == SUBREG
1928 && (GET_MODE_SIZE (GET_MODE (subdest))
1929 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1930 subdest = SUBREG_REG (subdest);
1931 if (pi3dest_killed
1932 && REG_P (subdest)
1933 && reg_referenced_p (subdest, PATTERN (i3))
1934 && REGNO (subdest) != FRAME_POINTER_REGNUM
1935 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1936 && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1937 #endif
1938 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1939 && (REGNO (subdest) != ARG_POINTER_REGNUM
1940 || ! fixed_regs [REGNO (subdest)])
1941 #endif
1942 && REGNO (subdest) != STACK_POINTER_REGNUM)
1944 if (*pi3dest_killed)
1945 return 0;
1947 *pi3dest_killed = subdest;
1951 else if (GET_CODE (x) == PARALLEL)
1953 int i;
1955 for (i = 0; i < XVECLEN (x, 0); i++)
1956 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1957 i1_not_in_src, pi3dest_killed))
1958 return 0;
1961 return 1;
1964 /* Return 1 if X is an arithmetic expression that contains a multiplication
1965 and division. We don't count multiplications by powers of two here. */
1967 static int
1968 contains_muldiv (rtx x)
1970 switch (GET_CODE (x))
1972 case MOD: case DIV: case UMOD: case UDIV:
1973 return 1;
1975 case MULT:
1976 return ! (CONST_INT_P (XEXP (x, 1))
1977 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1978 default:
1979 if (BINARY_P (x))
1980 return contains_muldiv (XEXP (x, 0))
1981 || contains_muldiv (XEXP (x, 1));
1983 if (UNARY_P (x))
1984 return contains_muldiv (XEXP (x, 0));
1986 return 0;
1990 /* Determine whether INSN can be used in a combination. Return nonzero if
1991 not. This is used in try_combine to detect early some cases where we
1992 can't perform combinations. */
1994 static int
1995 cant_combine_insn_p (rtx insn)
1997 rtx set;
1998 rtx src, dest;
2000 /* If this isn't really an insn, we can't do anything.
2001 This can occur when flow deletes an insn that it has merged into an
2002 auto-increment address. */
2003 if (! INSN_P (insn))
2004 return 1;
2006 /* Never combine loads and stores involving hard regs that are likely
2007 to be spilled. The register allocator can usually handle such
2008 reg-reg moves by tying. If we allow the combiner to make
2009 substitutions of likely-spilled regs, reload might die.
2010 As an exception, we allow combinations involving fixed regs; these are
2011 not available to the register allocator so there's no risk involved. */
2013 set = single_set (insn);
2014 if (! set)
2015 return 0;
2016 src = SET_SRC (set);
2017 dest = SET_DEST (set);
2018 if (GET_CODE (src) == SUBREG)
2019 src = SUBREG_REG (src);
2020 if (GET_CODE (dest) == SUBREG)
2021 dest = SUBREG_REG (dest);
2022 if (REG_P (src) && REG_P (dest)
2023 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
2024 && ! fixed_regs[REGNO (src)]
2025 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
2026 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
2027 && ! fixed_regs[REGNO (dest)]
2028 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
2029 return 1;
2031 return 0;
2034 struct likely_spilled_retval_info
2036 unsigned regno, nregs;
2037 unsigned mask;
2040 /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2041 hard registers that are known to be written to / clobbered in full. */
2042 static void
2043 likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2045 struct likely_spilled_retval_info *const info =
2046 (struct likely_spilled_retval_info *) data;
2047 unsigned regno, nregs;
2048 unsigned new_mask;
2050 if (!REG_P (XEXP (set, 0)))
2051 return;
2052 regno = REGNO (x);
2053 if (regno >= info->regno + info->nregs)
2054 return;
2055 nregs = hard_regno_nregs[regno][GET_MODE (x)];
2056 if (regno + nregs <= info->regno)
2057 return;
2058 new_mask = (2U << (nregs - 1)) - 1;
2059 if (regno < info->regno)
2060 new_mask >>= info->regno - regno;
2061 else
2062 new_mask <<= regno - info->regno;
2063 info->mask &= ~new_mask;
2066 /* Return nonzero iff part of the return value is live during INSN, and
2067 it is likely spilled. This can happen when more than one insn is needed
2068 to copy the return value, e.g. when we consider to combine into the
2069 second copy insn for a complex value. */
2071 static int
2072 likely_spilled_retval_p (rtx insn)
2074 rtx use = BB_END (this_basic_block);
2075 rtx reg, p;
2076 unsigned regno, nregs;
2077 /* We assume here that no machine mode needs more than
2078 32 hard registers when the value overlaps with a register
2079 for which FUNCTION_VALUE_REGNO_P is true. */
2080 unsigned mask;
2081 struct likely_spilled_retval_info info;
2083 if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2084 return 0;
2085 reg = XEXP (PATTERN (use), 0);
2086 if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
2087 return 0;
2088 regno = REGNO (reg);
2089 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
2090 if (nregs == 1)
2091 return 0;
2092 mask = (2U << (nregs - 1)) - 1;
2094 /* Disregard parts of the return value that are set later. */
2095 info.regno = regno;
2096 info.nregs = nregs;
2097 info.mask = mask;
2098 for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2099 if (INSN_P (p))
2100 note_stores (PATTERN (p), likely_spilled_retval_1, &info);
2101 mask = info.mask;
2103 /* Check if any of the (probably) live return value registers is
2104 likely spilled. */
2105 nregs --;
2108 if ((mask & 1 << nregs)
2109 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
2110 return 1;
2111 } while (nregs--);
2112 return 0;
2115 /* Adjust INSN after we made a change to its destination.
2117 Changing the destination can invalidate notes that say something about
2118 the results of the insn and a LOG_LINK pointing to the insn. */
2120 static void
2121 adjust_for_new_dest (rtx insn)
2123 /* For notes, be conservative and simply remove them. */
2124 remove_reg_equal_equiv_notes (insn);
2126 /* The new insn will have a destination that was previously the destination
2127 of an insn just above it. Call distribute_links to make a LOG_LINK from
2128 the next use of that destination. */
2129 distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
2131 df_insn_rescan (insn);
2134 /* Return TRUE if combine can reuse reg X in mode MODE.
2135 ADDED_SETS is nonzero if the original set is still required. */
2136 static bool
2137 can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
2139 unsigned int regno;
2141 if (!REG_P(x))
2142 return false;
2144 regno = REGNO (x);
2145 /* Allow hard registers if the new mode is legal, and occupies no more
2146 registers than the old mode. */
2147 if (regno < FIRST_PSEUDO_REGISTER)
2148 return (HARD_REGNO_MODE_OK (regno, mode)
2149 && (hard_regno_nregs[regno][GET_MODE (x)]
2150 >= hard_regno_nregs[regno][mode]));
2152 /* Or a pseudo that is only used once. */
2153 return (REG_N_SETS (regno) == 1 && !added_sets
2154 && !REG_USERVAR_P (x));
2158 /* Check whether X, the destination of a set, refers to part of
2159 the register specified by REG. */
2161 static bool
2162 reg_subword_p (rtx x, rtx reg)
2164 /* Check that reg is an integer mode register. */
2165 if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2166 return false;
2168 if (GET_CODE (x) == STRICT_LOW_PART
2169 || GET_CODE (x) == ZERO_EXTRACT)
2170 x = XEXP (x, 0);
2172 return GET_CODE (x) == SUBREG
2173 && SUBREG_REG (x) == reg
2174 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2177 #ifdef AUTO_INC_DEC
2178 /* Replace auto-increment addressing modes with explicit operations to
2179 access the same addresses without modifying the corresponding
2180 registers. If AFTER holds, SRC is meant to be reused after the
2181 side effect, otherwise it is to be reused before that. */
2183 static rtx
2184 cleanup_auto_inc_dec (rtx src, bool after, enum machine_mode mem_mode)
2186 rtx x = src;
2187 const RTX_CODE code = GET_CODE (x);
2188 int i;
2189 const char *fmt;
2191 switch (code)
2193 case REG:
2194 case CONST_INT:
2195 case CONST_DOUBLE:
2196 case CONST_FIXED:
2197 case CONST_VECTOR:
2198 case SYMBOL_REF:
2199 case CODE_LABEL:
2200 case PC:
2201 case CC0:
2202 case SCRATCH:
2203 /* SCRATCH must be shared because they represent distinct values. */
2204 return x;
2205 case CLOBBER:
2206 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2207 return x;
2208 break;
2210 case CONST:
2211 if (shared_const_p (x))
2212 return x;
2213 break;
2215 case MEM:
2216 mem_mode = GET_MODE (x);
2217 break;
2219 case PRE_INC:
2220 case PRE_DEC:
2221 case POST_INC:
2222 case POST_DEC:
2223 gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode);
2224 if (after == (code == PRE_INC || code == PRE_DEC))
2225 x = cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode);
2226 else
2227 x = gen_rtx_PLUS (GET_MODE (x),
2228 cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode),
2229 GEN_INT ((code == PRE_INC || code == POST_INC)
2230 ? GET_MODE_SIZE (mem_mode)
2231 : -GET_MODE_SIZE (mem_mode)));
2232 return x;
2234 case PRE_MODIFY:
2235 case POST_MODIFY:
2236 if (after == (code == PRE_MODIFY))
2237 x = XEXP (x, 0);
2238 else
2239 x = XEXP (x, 1);
2240 return cleanup_auto_inc_dec (x, after, mem_mode);
2242 default:
2243 break;
2246 /* Copy the various flags, fields, and other information. We assume
2247 that all fields need copying, and then clear the fields that should
2248 not be copied. That is the sensible default behavior, and forces
2249 us to explicitly document why we are *not* copying a flag. */
2250 x = shallow_copy_rtx (x);
2252 /* We do not copy the USED flag, which is used as a mark bit during
2253 walks over the RTL. */
2254 RTX_FLAG (x, used) = 0;
2256 /* We do not copy FRAME_RELATED for INSNs. */
2257 if (INSN_P (x))
2258 RTX_FLAG (x, frame_related) = 0;
2260 fmt = GET_RTX_FORMAT (code);
2261 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2262 if (fmt[i] == 'e')
2263 XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), after, mem_mode);
2264 else if (fmt[i] == 'E' || fmt[i] == 'V')
2266 int j;
2267 XVEC (x, i) = rtvec_alloc (XVECLEN (x, i));
2268 for (j = 0; j < XVECLEN (x, i); j++)
2269 XVECEXP (x, i, j)
2270 = cleanup_auto_inc_dec (XVECEXP (src, i, j), after, mem_mode);
2273 return x;
2276 /* Auxiliary data structure for propagate_for_debug_stmt. */
2278 struct rtx_subst_pair
2280 rtx to;
2281 bool adjusted;
2282 bool after;
2285 /* DATA points to an rtx_subst_pair. Return the value that should be
2286 substituted. */
2288 static rtx
2289 propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
2291 struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data;
2293 if (!rtx_equal_p (from, old_rtx))
2294 return NULL_RTX;
2295 if (!pair->adjusted)
2297 pair->adjusted = true;
2298 pair->to = cleanup_auto_inc_dec (pair->to, pair->after, VOIDmode);
2299 return pair->to;
2301 return copy_rtx (pair->to);
2303 #endif
2305 /* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN
2306 and LAST. If MOVE holds, debug insns must also be moved past
2307 LAST. */
2309 static void
2310 propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src, bool move)
2312 rtx next, move_pos = move ? last : NULL_RTX, loc;
2314 #ifdef AUTO_INC_DEC
2315 struct rtx_subst_pair p;
2316 p.to = src;
2317 p.adjusted = false;
2318 p.after = move;
2319 #endif
2321 next = NEXT_INSN (insn);
2322 while (next != last)
2324 insn = next;
2325 next = NEXT_INSN (insn);
2326 if (DEBUG_INSN_P (insn))
2328 #ifdef AUTO_INC_DEC
2329 loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
2330 dest, propagate_for_debug_subst, &p);
2331 #else
2332 loc = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), dest, src);
2333 #endif
2334 if (loc == INSN_VAR_LOCATION_LOC (insn))
2335 continue;
2336 INSN_VAR_LOCATION_LOC (insn) = loc;
2337 if (move_pos)
2339 remove_insn (insn);
2340 PREV_INSN (insn) = NEXT_INSN (insn) = NULL_RTX;
2341 move_pos = emit_debug_insn_after (insn, move_pos);
2343 else
2344 df_insn_rescan (insn);
2349 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
2350 Note that the INSN should be deleted *after* removing dead edges, so
2351 that the kept edge is the fallthrough edge for a (set (pc) (pc))
2352 but not for a (set (pc) (label_ref FOO)). */
2354 static void
2355 update_cfg_for_uncondjump (rtx insn)
2357 basic_block bb = BLOCK_FOR_INSN (insn);
2358 bool at_end = (BB_END (bb) == insn);
2360 if (at_end)
2361 purge_dead_edges (bb);
2363 delete_insn (insn);
2364 if (at_end && EDGE_COUNT (bb->succs) == 1)
2365 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2369 /* Try to combine the insns I1 and I2 into I3.
2370 Here I1 and I2 appear earlier than I3.
2371 I1 can be zero; then we combine just I2 into I3.
2373 If we are combining three insns and the resulting insn is not recognized,
2374 try splitting it into two insns. If that happens, I2 and I3 are retained
2375 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
2376 are pseudo-deleted.
2378 Return 0 if the combination does not work. Then nothing is changed.
2379 If we did the combination, return the insn at which combine should
2380 resume scanning.
2382 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2383 new direct jump instruction. */
2385 static rtx
2386 try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
2388 /* New patterns for I3 and I2, respectively. */
2389 rtx newpat, newi2pat = 0;
2390 rtvec newpat_vec_with_clobbers = 0;
2391 int substed_i2 = 0, substed_i1 = 0;
2392 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
2393 int added_sets_1, added_sets_2;
2394 /* Total number of SETs to put into I3. */
2395 int total_sets;
2396 /* Nonzero if I2's body now appears in I3. */
2397 int i2_is_used;
2398 /* INSN_CODEs for new I3, new I2, and user of condition code. */
2399 int insn_code_number, i2_code_number = 0, other_code_number = 0;
2400 /* Contains I3 if the destination of I3 is used in its source, which means
2401 that the old life of I3 is being killed. If that usage is placed into
2402 I2 and not in I3, a REG_DEAD note must be made. */
2403 rtx i3dest_killed = 0;
2404 /* SET_DEST and SET_SRC of I2 and I1. */
2405 rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0;
2406 /* Set if I2DEST was reused as a scratch register. */
2407 bool i2scratch = false;
2408 /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases. */
2409 rtx i1pat = 0, i2pat = 0;
2410 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
2411 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
2412 int i2dest_killed = 0, i1dest_killed = 0;
2413 int i1_feeds_i3 = 0;
2414 /* Notes that must be added to REG_NOTES in I3 and I2. */
2415 rtx new_i3_notes, new_i2_notes;
2416 /* Notes that we substituted I3 into I2 instead of the normal case. */
2417 int i3_subst_into_i2 = 0;
2418 /* Notes that I1, I2 or I3 is a MULT operation. */
2419 int have_mult = 0;
2420 int swap_i2i3 = 0;
2421 int changed_i3_dest = 0;
2423 int maxreg;
2424 rtx temp;
2425 rtx link;
2426 rtx other_pat = 0;
2427 rtx new_other_notes;
2428 int i;
2430 /* Exit early if one of the insns involved can't be used for
2431 combinations. */
2432 if (cant_combine_insn_p (i3)
2433 || cant_combine_insn_p (i2)
2434 || (i1 && cant_combine_insn_p (i1))
2435 || likely_spilled_retval_p (i3))
2436 return 0;
2438 combine_attempts++;
2439 undobuf.other_insn = 0;
2441 /* Reset the hard register usage information. */
2442 CLEAR_HARD_REG_SET (newpat_used_regs);
2444 if (dump_file && (dump_flags & TDF_DETAILS))
2446 if (i1)
2447 fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2448 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2449 else
2450 fprintf (dump_file, "\nTrying %d -> %d:\n",
2451 INSN_UID (i2), INSN_UID (i3));
2454 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
2455 code below, set I1 to be the earlier of the two insns. */
2456 if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2457 temp = i1, i1 = i2, i2 = temp;
2459 added_links_insn = 0;
2461 /* First check for one important special-case that the code below will
2462 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
2463 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2464 we may be able to replace that destination with the destination of I3.
2465 This occurs in the common code where we compute both a quotient and
2466 remainder into a structure, in which case we want to do the computation
2467 directly into the structure to avoid register-register copies.
2469 Note that this case handles both multiple sets in I2 and also
2470 cases where I2 has a number of CLOBBER or PARALLELs.
2472 We make very conservative checks below and only try to handle the
2473 most common cases of this. For example, we only handle the case
2474 where I2 and I3 are adjacent to avoid making difficult register
2475 usage tests. */
2477 if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2478 && REG_P (SET_SRC (PATTERN (i3)))
2479 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2480 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2481 && GET_CODE (PATTERN (i2)) == PARALLEL
2482 && ! side_effects_p (SET_DEST (PATTERN (i3)))
2483 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2484 below would need to check what is inside (and reg_overlap_mentioned_p
2485 doesn't support those codes anyway). Don't allow those destinations;
2486 the resulting insn isn't likely to be recognized anyway. */
2487 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2488 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2489 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2490 SET_DEST (PATTERN (i3)))
2491 && next_active_insn (i2) == i3)
2493 rtx p2 = PATTERN (i2);
2495 /* Make sure that the destination of I3,
2496 which we are going to substitute into one output of I2,
2497 is not used within another output of I2. We must avoid making this:
2498 (parallel [(set (mem (reg 69)) ...)
2499 (set (reg 69) ...)])
2500 which is not well-defined as to order of actions.
2501 (Besides, reload can't handle output reloads for this.)
2503 The problem can also happen if the dest of I3 is a memory ref,
2504 if another dest in I2 is an indirect memory ref. */
2505 for (i = 0; i < XVECLEN (p2, 0); i++)
2506 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2507 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2508 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2509 SET_DEST (XVECEXP (p2, 0, i))))
2510 break;
2512 if (i == XVECLEN (p2, 0))
2513 for (i = 0; i < XVECLEN (p2, 0); i++)
2514 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2515 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2516 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2518 combine_merges++;
2520 subst_insn = i3;
2521 subst_low_luid = DF_INSN_LUID (i2);
2523 added_sets_2 = added_sets_1 = 0;
2524 i2src = SET_DEST (PATTERN (i3));
2525 i2dest = SET_SRC (PATTERN (i3));
2526 i2dest_killed = dead_or_set_p (i2, i2dest);
2528 /* Replace the dest in I2 with our dest and make the resulting
2529 insn the new pattern for I3. Then skip to where we
2530 validate the pattern. Everything was set up above. */
2531 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
2532 SET_DEST (PATTERN (i3)));
2534 newpat = p2;
2535 i3_subst_into_i2 = 1;
2536 goto validate_replacement;
2540 /* If I2 is setting a pseudo to a constant and I3 is setting some
2541 sub-part of it to another constant, merge them by making a new
2542 constant. */
2543 if (i1 == 0
2544 && (temp = single_set (i2)) != 0
2545 && (CONST_INT_P (SET_SRC (temp))
2546 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
2547 && GET_CODE (PATTERN (i3)) == SET
2548 && (CONST_INT_P (SET_SRC (PATTERN (i3)))
2549 || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
2550 && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
2552 rtx dest = SET_DEST (PATTERN (i3));
2553 int offset = -1;
2554 int width = 0;
2556 if (GET_CODE (dest) == ZERO_EXTRACT)
2558 if (CONST_INT_P (XEXP (dest, 1))
2559 && CONST_INT_P (XEXP (dest, 2)))
2561 width = INTVAL (XEXP (dest, 1));
2562 offset = INTVAL (XEXP (dest, 2));
2563 dest = XEXP (dest, 0);
2564 if (BITS_BIG_ENDIAN)
2565 offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
2568 else
2570 if (GET_CODE (dest) == STRICT_LOW_PART)
2571 dest = XEXP (dest, 0);
2572 width = GET_MODE_BITSIZE (GET_MODE (dest));
2573 offset = 0;
2576 if (offset >= 0)
2578 /* If this is the low part, we're done. */
2579 if (subreg_lowpart_p (dest))
2581 /* Handle the case where inner is twice the size of outer. */
2582 else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2583 == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
2584 offset += GET_MODE_BITSIZE (GET_MODE (dest));
2585 /* Otherwise give up for now. */
2586 else
2587 offset = -1;
2590 if (offset >= 0
2591 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2592 <= HOST_BITS_PER_WIDE_INT * 2))
2594 HOST_WIDE_INT mhi, ohi, ihi;
2595 HOST_WIDE_INT mlo, olo, ilo;
2596 rtx inner = SET_SRC (PATTERN (i3));
2597 rtx outer = SET_SRC (temp);
2599 if (CONST_INT_P (outer))
2601 olo = INTVAL (outer);
2602 ohi = olo < 0 ? -1 : 0;
2604 else
2606 olo = CONST_DOUBLE_LOW (outer);
2607 ohi = CONST_DOUBLE_HIGH (outer);
2610 if (CONST_INT_P (inner))
2612 ilo = INTVAL (inner);
2613 ihi = ilo < 0 ? -1 : 0;
2615 else
2617 ilo = CONST_DOUBLE_LOW (inner);
2618 ihi = CONST_DOUBLE_HIGH (inner);
2621 if (width < HOST_BITS_PER_WIDE_INT)
2623 mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2624 mhi = 0;
2626 else if (width < HOST_BITS_PER_WIDE_INT * 2)
2628 mhi = ((unsigned HOST_WIDE_INT) 1
2629 << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2630 mlo = -1;
2632 else
2634 mlo = -1;
2635 mhi = -1;
2638 ilo &= mlo;
2639 ihi &= mhi;
2641 if (offset >= HOST_BITS_PER_WIDE_INT)
2643 mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2644 mlo = 0;
2645 ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2646 ilo = 0;
2648 else if (offset > 0)
2650 mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2651 >> (HOST_BITS_PER_WIDE_INT - offset));
2652 mlo = mlo << offset;
2653 ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2654 >> (HOST_BITS_PER_WIDE_INT - offset));
2655 ilo = ilo << offset;
2658 olo = (olo & ~mlo) | ilo;
2659 ohi = (ohi & ~mhi) | ihi;
2661 combine_merges++;
2662 subst_insn = i3;
2663 subst_low_luid = DF_INSN_LUID (i2);
2664 added_sets_2 = added_sets_1 = 0;
2665 i2dest = SET_DEST (temp);
2666 i2dest_killed = dead_or_set_p (i2, i2dest);
2668 /* Replace the source in I2 with the new constant and make the
2669 resulting insn the new pattern for I3. Then skip to where we
2670 validate the pattern. Everything was set up above. */
2671 SUBST (SET_SRC (temp),
2672 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2674 newpat = PATTERN (i2);
2676 /* The dest of I3 has been replaced with the dest of I2. */
2677 changed_i3_dest = 1;
2678 goto validate_replacement;
2682 #ifndef HAVE_cc0
2683 /* If we have no I1 and I2 looks like:
2684 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2685 (set Y OP)])
2686 make up a dummy I1 that is
2687 (set Y OP)
2688 and change I2 to be
2689 (set (reg:CC X) (compare:CC Y (const_int 0)))
2691 (We can ignore any trailing CLOBBERs.)
2693 This undoes a previous combination and allows us to match a branch-and-
2694 decrement insn. */
2696 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2697 && XVECLEN (PATTERN (i2), 0) >= 2
2698 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2699 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2700 == MODE_CC)
2701 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2702 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2703 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2704 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2705 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2706 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2708 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2709 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2710 break;
2712 if (i == 1)
2714 /* We make I1 with the same INSN_UID as I2. This gives it
2715 the same DF_INSN_LUID for value tracking. Our fake I1 will
2716 never appear in the insn stream so giving it the same INSN_UID
2717 as I2 will not cause a problem. */
2719 i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2720 BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
2721 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX);
2723 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2724 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2725 SET_DEST (PATTERN (i1)));
2728 #endif
2730 /* Verify that I2 and I1 are valid for combining. */
2731 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2732 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
2734 undo_all ();
2735 return 0;
2738 /* Record whether I2DEST is used in I2SRC and similarly for the other
2739 cases. Knowing this will help in register status updating below. */
2740 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2741 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2742 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2743 i2dest_killed = dead_or_set_p (i2, i2dest);
2744 i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2746 /* See if I1 directly feeds into I3. It does if I1DEST is not used
2747 in I2SRC. */
2748 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2750 /* Ensure that I3's pattern can be the destination of combines. */
2751 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2752 i1 && i2dest_in_i1src && i1_feeds_i3,
2753 &i3dest_killed))
2755 undo_all ();
2756 return 0;
2759 /* See if any of the insns is a MULT operation. Unless one is, we will
2760 reject a combination that is, since it must be slower. Be conservative
2761 here. */
2762 if (GET_CODE (i2src) == MULT
2763 || (i1 != 0 && GET_CODE (i1src) == MULT)
2764 || (GET_CODE (PATTERN (i3)) == SET
2765 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2766 have_mult = 1;
2768 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2769 We used to do this EXCEPT in one case: I3 has a post-inc in an
2770 output operand. However, that exception can give rise to insns like
2771 mov r3,(r3)+
2772 which is a famous insn on the PDP-11 where the value of r3 used as the
2773 source was model-dependent. Avoid this sort of thing. */
2775 #if 0
2776 if (!(GET_CODE (PATTERN (i3)) == SET
2777 && REG_P (SET_SRC (PATTERN (i3)))
2778 && MEM_P (SET_DEST (PATTERN (i3)))
2779 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2780 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2781 /* It's not the exception. */
2782 #endif
2783 #ifdef AUTO_INC_DEC
2784 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2785 if (REG_NOTE_KIND (link) == REG_INC
2786 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2787 || (i1 != 0
2788 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2790 undo_all ();
2791 return 0;
2793 #endif
2795 /* See if the SETs in I1 or I2 need to be kept around in the merged
2796 instruction: whenever the value set there is still needed past I3.
2797 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2799 For the SET in I1, we have two cases: If I1 and I2 independently
2800 feed into I3, the set in I1 needs to be kept around if I1DEST dies
2801 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
2802 in I1 needs to be kept around unless I1DEST dies or is set in either
2803 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
2804 I1DEST. If so, we know I1 feeds into I2. */
2806 added_sets_2 = ! dead_or_set_p (i3, i2dest);
2808 added_sets_1
2809 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2810 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2812 /* If the set in I2 needs to be kept around, we must make a copy of
2813 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2814 PATTERN (I2), we are only substituting for the original I1DEST, not into
2815 an already-substituted copy. This also prevents making self-referential
2816 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2817 I2DEST. */
2819 if (added_sets_2)
2821 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2822 i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2823 else
2824 i2pat = copy_rtx (PATTERN (i2));
2827 if (added_sets_1)
2829 if (GET_CODE (PATTERN (i1)) == PARALLEL)
2830 i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2831 else
2832 i1pat = copy_rtx (PATTERN (i1));
2835 combine_merges++;
2837 /* Substitute in the latest insn for the regs set by the earlier ones. */
2839 maxreg = max_reg_num ();
2841 subst_insn = i3;
2843 #ifndef HAVE_cc0
2844 /* Many machines that don't use CC0 have insns that can both perform an
2845 arithmetic operation and set the condition code. These operations will
2846 be represented as a PARALLEL with the first element of the vector
2847 being a COMPARE of an arithmetic operation with the constant zero.
2848 The second element of the vector will set some pseudo to the result
2849 of the same arithmetic operation. If we simplify the COMPARE, we won't
2850 match such a pattern and so will generate an extra insn. Here we test
2851 for this case, where both the comparison and the operation result are
2852 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2853 I2SRC. Later we will make the PARALLEL that contains I2. */
2855 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2856 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2857 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2858 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2860 #ifdef SELECT_CC_MODE
2861 rtx *cc_use;
2862 enum machine_mode compare_mode;
2863 #endif
2865 newpat = PATTERN (i3);
2866 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2868 i2_is_used = 1;
2870 #ifdef SELECT_CC_MODE
2871 /* See if a COMPARE with the operand we substituted in should be done
2872 with the mode that is currently being used. If not, do the same
2873 processing we do in `subst' for a SET; namely, if the destination
2874 is used only once, try to replace it with a register of the proper
2875 mode and also replace the COMPARE. */
2876 if (undobuf.other_insn == 0
2877 && (cc_use = find_single_use (SET_DEST (newpat), i3,
2878 &undobuf.other_insn))
2879 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2880 i2src, const0_rtx))
2881 != GET_MODE (SET_DEST (newpat))))
2883 if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2884 compare_mode))
2886 unsigned int regno = REGNO (SET_DEST (newpat));
2887 rtx new_dest;
2889 if (regno < FIRST_PSEUDO_REGISTER)
2890 new_dest = gen_rtx_REG (compare_mode, regno);
2891 else
2893 SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2894 new_dest = regno_reg_rtx[regno];
2897 SUBST (SET_DEST (newpat), new_dest);
2898 SUBST (XEXP (*cc_use, 0), new_dest);
2899 SUBST (SET_SRC (newpat),
2900 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2902 else
2903 undobuf.other_insn = 0;
2905 #endif
2907 else
2908 #endif
2910 /* It is possible that the source of I2 or I1 may be performing
2911 an unneeded operation, such as a ZERO_EXTEND of something
2912 that is known to have the high part zero. Handle that case
2913 by letting subst look at the innermost one of them.
2915 Another way to do this would be to have a function that tries
2916 to simplify a single insn instead of merging two or more
2917 insns. We don't do this because of the potential of infinite
2918 loops and because of the potential extra memory required.
2919 However, doing it the way we are is a bit of a kludge and
2920 doesn't catch all cases.
2922 But only do this if -fexpensive-optimizations since it slows
2923 things down and doesn't usually win.
2925 This is not done in the COMPARE case above because the
2926 unmodified I2PAT is used in the PARALLEL and so a pattern
2927 with a modified I2SRC would not match. */
2929 if (flag_expensive_optimizations)
2931 /* Pass pc_rtx so no substitutions are done, just
2932 simplifications. */
2933 if (i1)
2935 subst_low_luid = DF_INSN_LUID (i1);
2936 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2938 else
2940 subst_low_luid = DF_INSN_LUID (i2);
2941 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2945 n_occurrences = 0; /* `subst' counts here */
2947 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2948 need to make a unique copy of I2SRC each time we substitute it
2949 to avoid self-referential rtl. */
2951 subst_low_luid = DF_INSN_LUID (i2);
2952 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2953 ! i1_feeds_i3 && i1dest_in_i1src);
2954 substed_i2 = 1;
2956 /* Record whether i2's body now appears within i3's body. */
2957 i2_is_used = n_occurrences;
2960 /* If we already got a failure, don't try to do more. Otherwise,
2961 try to substitute in I1 if we have it. */
2963 if (i1 && GET_CODE (newpat) != CLOBBER)
2965 /* Check that an autoincrement side-effect on I1 has not been lost.
2966 This happens if I1DEST is mentioned in I2 and dies there, and
2967 has disappeared from the new pattern. */
2968 if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2969 && !i1_feeds_i3
2970 && dead_or_set_p (i2, i1dest)
2971 && !reg_overlap_mentioned_p (i1dest, newpat))
2972 /* Before we can do this substitution, we must redo the test done
2973 above (see detailed comments there) that ensures that I1DEST
2974 isn't mentioned in any SETs in NEWPAT that are field assignments. */
2975 || !combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, 0, 0))
2977 undo_all ();
2978 return 0;
2981 n_occurrences = 0;
2982 subst_low_luid = DF_INSN_LUID (i1);
2983 newpat = subst (newpat, i1dest, i1src, 0, 0);
2984 substed_i1 = 1;
2987 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2988 to count all the ways that I2SRC and I1SRC can be used. */
2989 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2990 && i2_is_used + added_sets_2 > 1)
2991 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2992 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2993 > 1))
2994 /* Fail if we tried to make a new register. */
2995 || max_reg_num () != maxreg
2996 /* Fail if we couldn't do something and have a CLOBBER. */
2997 || GET_CODE (newpat) == CLOBBER
2998 /* Fail if this new pattern is a MULT and we didn't have one before
2999 at the outer level. */
3000 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3001 && ! have_mult))
3003 undo_all ();
3004 return 0;
3007 /* If the actions of the earlier insns must be kept
3008 in addition to substituting them into the latest one,
3009 we must make a new PARALLEL for the latest insn
3010 to hold additional the SETs. */
3012 if (added_sets_1 || added_sets_2)
3014 combine_extras++;
3016 if (GET_CODE (newpat) == PARALLEL)
3018 rtvec old = XVEC (newpat, 0);
3019 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
3020 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3021 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3022 sizeof (old->elem[0]) * old->num_elem);
3024 else
3026 rtx old = newpat;
3027 total_sets = 1 + added_sets_1 + added_sets_2;
3028 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3029 XVECEXP (newpat, 0, 0) = old;
3032 if (added_sets_1)
3033 XVECEXP (newpat, 0, --total_sets) = i1pat;
3035 if (added_sets_2)
3037 /* If there is no I1, use I2's body as is. We used to also not do
3038 the subst call below if I2 was substituted into I3,
3039 but that could lose a simplification. */
3040 if (i1 == 0)
3041 XVECEXP (newpat, 0, --total_sets) = i2pat;
3042 else
3043 /* See comment where i2pat is assigned. */
3044 XVECEXP (newpat, 0, --total_sets)
3045 = subst (i2pat, i1dest, i1src, 0, 0);
3049 validate_replacement:
3051 /* Note which hard regs this insn has as inputs. */
3052 mark_used_regs_combine (newpat);
3054 /* If recog_for_combine fails, it strips existing clobbers. If we'll
3055 consider splitting this pattern, we might need these clobbers. */
3056 if (i1 && GET_CODE (newpat) == PARALLEL
3057 && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3059 int len = XVECLEN (newpat, 0);
3061 newpat_vec_with_clobbers = rtvec_alloc (len);
3062 for (i = 0; i < len; i++)
3063 RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3066 /* Is the result of combination a valid instruction? */
3067 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3069 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
3070 the second SET's destination is a register that is unused and isn't
3071 marked as an instruction that might trap in an EH region. In that case,
3072 we just need the first SET. This can occur when simplifying a divmod
3073 insn. We *must* test for this case here because the code below that
3074 splits two independent SETs doesn't handle this case correctly when it
3075 updates the register status.
3077 It's pointless doing this if we originally had two sets, one from
3078 i3, and one from i2. Combining then splitting the parallel results
3079 in the original i2 again plus an invalid insn (which we delete).
3080 The net effect is only to move instructions around, which makes
3081 debug info less accurate.
3083 Also check the case where the first SET's destination is unused.
3084 That would not cause incorrect code, but does cause an unneeded
3085 insn to remain. */
3087 if (insn_code_number < 0
3088 && !(added_sets_2 && i1 == 0)
3089 && GET_CODE (newpat) == PARALLEL
3090 && XVECLEN (newpat, 0) == 2
3091 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3092 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3093 && asm_noperands (newpat) < 0)
3095 rtx set0 = XVECEXP (newpat, 0, 0);
3096 rtx set1 = XVECEXP (newpat, 0, 1);
3098 if (((REG_P (SET_DEST (set1))
3099 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3100 || (GET_CODE (SET_DEST (set1)) == SUBREG
3101 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3102 && insn_nothrow_p (i3)
3103 && !side_effects_p (SET_SRC (set1)))
3105 newpat = set0;
3106 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3109 else if (((REG_P (SET_DEST (set0))
3110 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3111 || (GET_CODE (SET_DEST (set0)) == SUBREG
3112 && find_reg_note (i3, REG_UNUSED,
3113 SUBREG_REG (SET_DEST (set0)))))
3114 && insn_nothrow_p (i3)
3115 && !side_effects_p (SET_SRC (set0)))
3117 newpat = set1;
3118 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3120 if (insn_code_number >= 0)
3121 changed_i3_dest = 1;
3125 /* If we were combining three insns and the result is a simple SET
3126 with no ASM_OPERANDS that wasn't recognized, try to split it into two
3127 insns. There are two ways to do this. It can be split using a
3128 machine-specific method (like when you have an addition of a large
3129 constant) or by combine in the function find_split_point. */
3131 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3132 && asm_noperands (newpat) < 0)
3134 rtx parallel, m_split, *split;
3136 /* See if the MD file can split NEWPAT. If it can't, see if letting it
3137 use I2DEST as a scratch register will help. In the latter case,
3138 convert I2DEST to the mode of the source of NEWPAT if we can. */
3140 m_split = combine_split_insns (newpat, i3);
3142 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3143 inputs of NEWPAT. */
3145 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3146 possible to try that as a scratch reg. This would require adding
3147 more code to make it work though. */
3149 if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3151 enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3153 /* First try to split using the original register as a
3154 scratch register. */
3155 parallel = gen_rtx_PARALLEL (VOIDmode,
3156 gen_rtvec (2, newpat,
3157 gen_rtx_CLOBBER (VOIDmode,
3158 i2dest)));
3159 m_split = combine_split_insns (parallel, i3);
3161 /* If that didn't work, try changing the mode of I2DEST if
3162 we can. */
3163 if (m_split == 0
3164 && new_mode != GET_MODE (i2dest)
3165 && new_mode != VOIDmode
3166 && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3168 enum machine_mode old_mode = GET_MODE (i2dest);
3169 rtx ni2dest;
3171 if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3172 ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3173 else
3175 SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
3176 ni2dest = regno_reg_rtx[REGNO (i2dest)];
3179 parallel = (gen_rtx_PARALLEL
3180 (VOIDmode,
3181 gen_rtvec (2, newpat,
3182 gen_rtx_CLOBBER (VOIDmode,
3183 ni2dest))));
3184 m_split = combine_split_insns (parallel, i3);
3186 if (m_split == 0
3187 && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3189 struct undo *buf;
3191 adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3192 buf = undobuf.undos;
3193 undobuf.undos = buf->next;
3194 buf->next = undobuf.frees;
3195 undobuf.frees = buf;
3199 i2scratch = m_split != 0;
3202 /* If recog_for_combine has discarded clobbers, try to use them
3203 again for the split. */
3204 if (m_split == 0 && newpat_vec_with_clobbers)
3206 parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3207 m_split = combine_split_insns (parallel, i3);
3210 if (m_split && NEXT_INSN (m_split) == NULL_RTX)
3212 m_split = PATTERN (m_split);
3213 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
3214 if (insn_code_number >= 0)
3215 newpat = m_split;
3217 else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
3218 && (next_real_insn (i2) == i3
3219 || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
3221 rtx i2set, i3set;
3222 rtx newi3pat = PATTERN (NEXT_INSN (m_split));
3223 newi2pat = PATTERN (m_split);
3225 i3set = single_set (NEXT_INSN (m_split));
3226 i2set = single_set (m_split);
3228 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3230 /* If I2 or I3 has multiple SETs, we won't know how to track
3231 register status, so don't use these insns. If I2's destination
3232 is used between I2 and I3, we also can't use these insns. */
3234 if (i2_code_number >= 0 && i2set && i3set
3235 && (next_real_insn (i2) == i3
3236 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3237 insn_code_number = recog_for_combine (&newi3pat, i3,
3238 &new_i3_notes);
3239 if (insn_code_number >= 0)
3240 newpat = newi3pat;
3242 /* It is possible that both insns now set the destination of I3.
3243 If so, we must show an extra use of it. */
3245 if (insn_code_number >= 0)
3247 rtx new_i3_dest = SET_DEST (i3set);
3248 rtx new_i2_dest = SET_DEST (i2set);
3250 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3251 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3252 || GET_CODE (new_i3_dest) == SUBREG)
3253 new_i3_dest = XEXP (new_i3_dest, 0);
3255 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3256 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3257 || GET_CODE (new_i2_dest) == SUBREG)
3258 new_i2_dest = XEXP (new_i2_dest, 0);
3260 if (REG_P (new_i3_dest)
3261 && REG_P (new_i2_dest)
3262 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
3263 INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3267 /* If we can split it and use I2DEST, go ahead and see if that
3268 helps things be recognized. Verify that none of the registers
3269 are set between I2 and I3. */
3270 if (insn_code_number < 0
3271 && (split = find_split_point (&newpat, i3, false)) != 0
3272 #ifdef HAVE_cc0
3273 && REG_P (i2dest)
3274 #endif
3275 /* We need I2DEST in the proper mode. If it is a hard register
3276 or the only use of a pseudo, we can change its mode.
3277 Make sure we don't change a hard register to have a mode that
3278 isn't valid for it, or change the number of registers. */
3279 && (GET_MODE (*split) == GET_MODE (i2dest)
3280 || GET_MODE (*split) == VOIDmode
3281 || can_change_dest_mode (i2dest, added_sets_2,
3282 GET_MODE (*split)))
3283 && (next_real_insn (i2) == i3
3284 || ! use_crosses_set_p (*split, DF_INSN_LUID (i2)))
3285 /* We can't overwrite I2DEST if its value is still used by
3286 NEWPAT. */
3287 && ! reg_referenced_p (i2dest, newpat))
3289 rtx newdest = i2dest;
3290 enum rtx_code split_code = GET_CODE (*split);
3291 enum machine_mode split_mode = GET_MODE (*split);
3292 bool subst_done = false;
3293 newi2pat = NULL_RTX;
3295 i2scratch = true;
3297 /* Get NEWDEST as a register in the proper mode. We have already
3298 validated that we can do this. */
3299 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3301 if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3302 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3303 else
3305 SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
3306 newdest = regno_reg_rtx[REGNO (i2dest)];
3310 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3311 an ASHIFT. This can occur if it was inside a PLUS and hence
3312 appeared to be a memory address. This is a kludge. */
3313 if (split_code == MULT
3314 && CONST_INT_P (XEXP (*split, 1))
3315 && INTVAL (XEXP (*split, 1)) > 0
3316 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
3318 SUBST (*split, gen_rtx_ASHIFT (split_mode,
3319 XEXP (*split, 0), GEN_INT (i)));
3320 /* Update split_code because we may not have a multiply
3321 anymore. */
3322 split_code = GET_CODE (*split);
3325 #ifdef INSN_SCHEDULING
3326 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3327 be written as a ZERO_EXTEND. */
3328 if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3330 #ifdef LOAD_EXTEND_OP
3331 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3332 what it really is. */
3333 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
3334 == SIGN_EXTEND)
3335 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3336 SUBREG_REG (*split)));
3337 else
3338 #endif
3339 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3340 SUBREG_REG (*split)));
3342 #endif
3344 /* Attempt to split binary operators using arithmetic identities. */
3345 if (BINARY_P (SET_SRC (newpat))
3346 && split_mode == GET_MODE (SET_SRC (newpat))
3347 && ! side_effects_p (SET_SRC (newpat)))
3349 rtx setsrc = SET_SRC (newpat);
3350 enum machine_mode mode = GET_MODE (setsrc);
3351 enum rtx_code code = GET_CODE (setsrc);
3352 rtx src_op0 = XEXP (setsrc, 0);
3353 rtx src_op1 = XEXP (setsrc, 1);
3355 /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3356 if (rtx_equal_p (src_op0, src_op1))
3358 newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
3359 SUBST (XEXP (setsrc, 0), newdest);
3360 SUBST (XEXP (setsrc, 1), newdest);
3361 subst_done = true;
3363 /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3364 else if ((code == PLUS || code == MULT)
3365 && GET_CODE (src_op0) == code
3366 && GET_CODE (XEXP (src_op0, 0)) == code
3367 && (INTEGRAL_MODE_P (mode)
3368 || (FLOAT_MODE_P (mode)
3369 && flag_unsafe_math_optimizations)))
3371 rtx p = XEXP (XEXP (src_op0, 0), 0);
3372 rtx q = XEXP (XEXP (src_op0, 0), 1);
3373 rtx r = XEXP (src_op0, 1);
3374 rtx s = src_op1;
3376 /* Split both "((X op Y) op X) op Y" and
3377 "((X op Y) op Y) op X" as "T op T" where T is
3378 "X op Y". */
3379 if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3380 || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3382 newi2pat = gen_rtx_SET (VOIDmode, newdest,
3383 XEXP (src_op0, 0));
3384 SUBST (XEXP (setsrc, 0), newdest);
3385 SUBST (XEXP (setsrc, 1), newdest);
3386 subst_done = true;
3388 /* Split "((X op X) op Y) op Y)" as "T op T" where
3389 T is "X op Y". */
3390 else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3392 rtx tmp = simplify_gen_binary (code, mode, p, r);
3393 newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
3394 SUBST (XEXP (setsrc, 0), newdest);
3395 SUBST (XEXP (setsrc, 1), newdest);
3396 subst_done = true;
3401 if (!subst_done)
3403 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
3404 SUBST (*split, newdest);
3407 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3409 /* recog_for_combine might have added CLOBBERs to newi2pat.
3410 Make sure NEWPAT does not depend on the clobbered regs. */
3411 if (GET_CODE (newi2pat) == PARALLEL)
3412 for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3413 if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3415 rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3416 if (reg_overlap_mentioned_p (reg, newpat))
3418 undo_all ();
3419 return 0;
3423 /* If the split point was a MULT and we didn't have one before,
3424 don't use one now. */
3425 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3426 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3430 /* Check for a case where we loaded from memory in a narrow mode and
3431 then sign extended it, but we need both registers. In that case,
3432 we have a PARALLEL with both loads from the same memory location.
3433 We can split this into a load from memory followed by a register-register
3434 copy. This saves at least one insn, more if register allocation can
3435 eliminate the copy.
3437 We cannot do this if the destination of the first assignment is a
3438 condition code register or cc0. We eliminate this case by making sure
3439 the SET_DEST and SET_SRC have the same mode.
3441 We cannot do this if the destination of the second assignment is
3442 a register that we have already assumed is zero-extended. Similarly
3443 for a SUBREG of such a register. */
3445 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3446 && GET_CODE (newpat) == PARALLEL
3447 && XVECLEN (newpat, 0) == 2
3448 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3449 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3450 && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3451 == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3452 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3453 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3454 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3455 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3456 DF_INSN_LUID (i2))
3457 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3458 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3459 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
3460 (REG_P (temp)
3461 && VEC_index (reg_stat_type, reg_stat,
3462 REGNO (temp))->nonzero_bits != 0
3463 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3464 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3465 && (VEC_index (reg_stat_type, reg_stat,
3466 REGNO (temp))->nonzero_bits
3467 != GET_MODE_MASK (word_mode))))
3468 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3469 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3470 (REG_P (temp)
3471 && VEC_index (reg_stat_type, reg_stat,
3472 REGNO (temp))->nonzero_bits != 0
3473 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3474 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3475 && (VEC_index (reg_stat_type, reg_stat,
3476 REGNO (temp))->nonzero_bits
3477 != GET_MODE_MASK (word_mode)))))
3478 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3479 SET_SRC (XVECEXP (newpat, 0, 1)))
3480 && ! find_reg_note (i3, REG_UNUSED,
3481 SET_DEST (XVECEXP (newpat, 0, 0))))
3483 rtx ni2dest;
3485 newi2pat = XVECEXP (newpat, 0, 0);
3486 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3487 newpat = XVECEXP (newpat, 0, 1);
3488 SUBST (SET_SRC (newpat),
3489 gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3490 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3492 if (i2_code_number >= 0)
3493 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3495 if (insn_code_number >= 0)
3496 swap_i2i3 = 1;
3499 /* Similarly, check for a case where we have a PARALLEL of two independent
3500 SETs but we started with three insns. In this case, we can do the sets
3501 as two separate insns. This case occurs when some SET allows two
3502 other insns to combine, but the destination of that SET is still live. */
3504 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3505 && GET_CODE (newpat) == PARALLEL
3506 && XVECLEN (newpat, 0) == 2
3507 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3508 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
3509 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
3510 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3511 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3512 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3513 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3514 DF_INSN_LUID (i2))
3515 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3516 XVECEXP (newpat, 0, 0))
3517 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
3518 XVECEXP (newpat, 0, 1))
3519 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
3520 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
3521 #ifdef HAVE_cc0
3522 /* We cannot split the parallel into two sets if both sets
3523 reference cc0. */
3524 && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
3525 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
3526 #endif
3529 /* Normally, it doesn't matter which of the two is done first,
3530 but it does if one references cc0. In that case, it has to
3531 be first. */
3532 #ifdef HAVE_cc0
3533 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
3535 newi2pat = XVECEXP (newpat, 0, 0);
3536 newpat = XVECEXP (newpat, 0, 1);
3538 else
3539 #endif
3541 newi2pat = XVECEXP (newpat, 0, 1);
3542 newpat = XVECEXP (newpat, 0, 0);
3545 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3547 if (i2_code_number >= 0)
3548 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3551 /* If it still isn't recognized, fail and change things back the way they
3552 were. */
3553 if ((insn_code_number < 0
3554 /* Is the result a reasonable ASM_OPERANDS? */
3555 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
3557 undo_all ();
3558 return 0;
3561 /* If we had to change another insn, make sure it is valid also. */
3562 if (undobuf.other_insn)
3564 CLEAR_HARD_REG_SET (newpat_used_regs);
3566 other_pat = PATTERN (undobuf.other_insn);
3567 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
3568 &new_other_notes);
3570 if (other_code_number < 0 && ! check_asm_operands (other_pat))
3572 undo_all ();
3573 return 0;
3577 #ifdef HAVE_cc0
3578 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3579 they are adjacent to each other or not. */
3581 rtx p = prev_nonnote_insn (i3);
3582 if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3583 && sets_cc0_p (newi2pat))
3585 undo_all ();
3586 return 0;
3589 #endif
3591 /* Only allow this combination if insn_rtx_costs reports that the
3592 replacement instructions are cheaper than the originals. */
3593 if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat, other_pat))
3595 undo_all ();
3596 return 0;
3599 if (MAY_HAVE_DEBUG_INSNS)
3601 struct undo *undo;
3603 for (undo = undobuf.undos; undo; undo = undo->next)
3604 if (undo->kind == UNDO_MODE)
3606 rtx reg = *undo->where.r;
3607 enum machine_mode new_mode = GET_MODE (reg);
3608 enum machine_mode old_mode = undo->old_contents.m;
3610 /* Temporarily revert mode back. */
3611 adjust_reg_mode (reg, old_mode);
3613 if (reg == i2dest && i2scratch)
3615 /* If we used i2dest as a scratch register with a
3616 different mode, substitute it for the original
3617 i2src while its original mode is temporarily
3618 restored, and then clear i2scratch so that we don't
3619 do it again later. */
3620 propagate_for_debug (i2, i3, reg, i2src, false);
3621 i2scratch = false;
3622 /* Put back the new mode. */
3623 adjust_reg_mode (reg, new_mode);
3625 else
3627 rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
3628 rtx first, last;
3630 if (reg == i2dest)
3632 first = i2;
3633 last = i3;
3635 else
3637 first = i3;
3638 last = undobuf.other_insn;
3639 gcc_assert (last);
3642 /* We're dealing with a reg that changed mode but not
3643 meaning, so we want to turn it into a subreg for
3644 the new mode. However, because of REG sharing and
3645 because its mode had already changed, we have to do
3646 it in two steps. First, replace any debug uses of
3647 reg, with its original mode temporarily restored,
3648 with this copy we have created; then, replace the
3649 copy with the SUBREG of the original shared reg,
3650 once again changed to the new mode. */
3651 propagate_for_debug (first, last, reg, tempreg, false);
3652 adjust_reg_mode (reg, new_mode);
3653 propagate_for_debug (first, last, tempreg,
3654 lowpart_subreg (old_mode, reg, new_mode),
3655 false);
3660 /* If we will be able to accept this, we have made a
3661 change to the destination of I3. This requires us to
3662 do a few adjustments. */
3664 if (changed_i3_dest)
3666 PATTERN (i3) = newpat;
3667 adjust_for_new_dest (i3);
3670 /* We now know that we can do this combination. Merge the insns and
3671 update the status of registers and LOG_LINKS. */
3673 if (undobuf.other_insn)
3675 rtx note, next;
3677 PATTERN (undobuf.other_insn) = other_pat;
3679 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
3680 are still valid. Then add any non-duplicate notes added by
3681 recog_for_combine. */
3682 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
3684 next = XEXP (note, 1);
3686 if (REG_NOTE_KIND (note) == REG_UNUSED
3687 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
3688 remove_note (undobuf.other_insn, note);
3691 distribute_notes (new_other_notes, undobuf.other_insn,
3692 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3695 if (swap_i2i3)
3697 rtx insn;
3698 rtx link;
3699 rtx ni2dest;
3701 /* I3 now uses what used to be its destination and which is now
3702 I2's destination. This requires us to do a few adjustments. */
3703 PATTERN (i3) = newpat;
3704 adjust_for_new_dest (i3);
3706 /* We need a LOG_LINK from I3 to I2. But we used to have one,
3707 so we still will.
3709 However, some later insn might be using I2's dest and have
3710 a LOG_LINK pointing at I3. We must remove this link.
3711 The simplest way to remove the link is to point it at I1,
3712 which we know will be a NOTE. */
3714 /* newi2pat is usually a SET here; however, recog_for_combine might
3715 have added some clobbers. */
3716 if (GET_CODE (newi2pat) == PARALLEL)
3717 ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3718 else
3719 ni2dest = SET_DEST (newi2pat);
3721 for (insn = NEXT_INSN (i3);
3722 insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3723 || insn != BB_HEAD (this_basic_block->next_bb));
3724 insn = NEXT_INSN (insn))
3726 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3728 for (link = LOG_LINKS (insn); link;
3729 link = XEXP (link, 1))
3730 if (XEXP (link, 0) == i3)
3731 XEXP (link, 0) = i1;
3733 break;
3739 rtx i3notes, i2notes, i1notes = 0;
3740 rtx i3links, i2links, i1links = 0;
3741 rtx midnotes = 0;
3742 unsigned int regno;
3743 /* Compute which registers we expect to eliminate. newi2pat may be setting
3744 either i3dest or i2dest, so we must check it. Also, i1dest may be the
3745 same as i3dest, in which case newi2pat may be setting i1dest. */
3746 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3747 || i2dest_in_i2src || i2dest_in_i1src
3748 || !i2dest_killed
3749 ? 0 : i2dest);
3750 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3751 || (newi2pat && reg_set_p (i1dest, newi2pat))
3752 || !i1dest_killed
3753 ? 0 : i1dest);
3755 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3756 clear them. */
3757 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3758 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3759 if (i1)
3760 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3762 /* Ensure that we do not have something that should not be shared but
3763 occurs multiple times in the new insns. Check this by first
3764 resetting all the `used' flags and then copying anything is shared. */
3766 reset_used_flags (i3notes);
3767 reset_used_flags (i2notes);
3768 reset_used_flags (i1notes);
3769 reset_used_flags (newpat);
3770 reset_used_flags (newi2pat);
3771 if (undobuf.other_insn)
3772 reset_used_flags (PATTERN (undobuf.other_insn));
3774 i3notes = copy_rtx_if_shared (i3notes);
3775 i2notes = copy_rtx_if_shared (i2notes);
3776 i1notes = copy_rtx_if_shared (i1notes);
3777 newpat = copy_rtx_if_shared (newpat);
3778 newi2pat = copy_rtx_if_shared (newi2pat);
3779 if (undobuf.other_insn)
3780 reset_used_flags (PATTERN (undobuf.other_insn));
3782 INSN_CODE (i3) = insn_code_number;
3783 PATTERN (i3) = newpat;
3785 if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
3787 rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3789 reset_used_flags (call_usage);
3790 call_usage = copy_rtx (call_usage);
3792 if (substed_i2)
3793 replace_rtx (call_usage, i2dest, i2src);
3795 if (substed_i1)
3796 replace_rtx (call_usage, i1dest, i1src);
3798 CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3801 if (undobuf.other_insn)
3802 INSN_CODE (undobuf.other_insn) = other_code_number;
3804 /* We had one special case above where I2 had more than one set and
3805 we replaced a destination of one of those sets with the destination
3806 of I3. In that case, we have to update LOG_LINKS of insns later
3807 in this basic block. Note that this (expensive) case is rare.
3809 Also, in this case, we must pretend that all REG_NOTEs for I2
3810 actually came from I3, so that REG_UNUSED notes from I2 will be
3811 properly handled. */
3813 if (i3_subst_into_i2)
3815 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
3816 if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3817 || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
3818 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
3819 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3820 && ! find_reg_note (i2, REG_UNUSED,
3821 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3822 for (temp = NEXT_INSN (i2);
3823 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3824 || BB_HEAD (this_basic_block) != temp);
3825 temp = NEXT_INSN (temp))
3826 if (temp != i3 && INSN_P (temp))
3827 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3828 if (XEXP (link, 0) == i2)
3829 XEXP (link, 0) = i3;
3831 if (i3notes)
3833 rtx link = i3notes;
3834 while (XEXP (link, 1))
3835 link = XEXP (link, 1);
3836 XEXP (link, 1) = i2notes;
3838 else
3839 i3notes = i2notes;
3840 i2notes = 0;
3843 LOG_LINKS (i3) = 0;
3844 REG_NOTES (i3) = 0;
3845 LOG_LINKS (i2) = 0;
3846 REG_NOTES (i2) = 0;
3848 if (newi2pat)
3850 if (MAY_HAVE_DEBUG_INSNS && i2scratch)
3851 propagate_for_debug (i2, i3, i2dest, i2src, false);
3852 INSN_CODE (i2) = i2_code_number;
3853 PATTERN (i2) = newi2pat;
3855 else
3857 if (MAY_HAVE_DEBUG_INSNS && i2src)
3858 propagate_for_debug (i2, i3, i2dest, i2src, i3_subst_into_i2);
3859 SET_INSN_DELETED (i2);
3862 if (i1)
3864 LOG_LINKS (i1) = 0;
3865 REG_NOTES (i1) = 0;
3866 if (MAY_HAVE_DEBUG_INSNS)
3867 propagate_for_debug (i1, i3, i1dest, i1src, false);
3868 SET_INSN_DELETED (i1);
3871 /* Get death notes for everything that is now used in either I3 or
3872 I2 and used to die in a previous insn. If we built two new
3873 patterns, move from I1 to I2 then I2 to I3 so that we get the
3874 proper movement on registers that I2 modifies. */
3876 if (newi2pat)
3878 move_deaths (newi2pat, NULL_RTX, DF_INSN_LUID (i1), i2, &midnotes);
3879 move_deaths (newpat, newi2pat, DF_INSN_LUID (i1), i3, &midnotes);
3881 else
3882 move_deaths (newpat, NULL_RTX, i1 ? DF_INSN_LUID (i1) : DF_INSN_LUID (i2),
3883 i3, &midnotes);
3885 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
3886 if (i3notes)
3887 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3888 elim_i2, elim_i1);
3889 if (i2notes)
3890 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3891 elim_i2, elim_i1);
3892 if (i1notes)
3893 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3894 elim_i2, elim_i1);
3895 if (midnotes)
3896 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3897 elim_i2, elim_i1);
3899 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
3900 know these are REG_UNUSED and want them to go to the desired insn,
3901 so we always pass it as i3. */
3903 if (newi2pat && new_i2_notes)
3904 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3906 if (new_i3_notes)
3907 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3909 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
3910 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
3911 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
3912 in that case, it might delete I2. Similarly for I2 and I1.
3913 Show an additional death due to the REG_DEAD note we make here. If
3914 we discard it in distribute_notes, we will decrement it again. */
3916 if (i3dest_killed)
3918 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3919 distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3920 NULL_RTX),
3921 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3922 else
3923 distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3924 NULL_RTX),
3925 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3926 elim_i2, elim_i1);
3929 if (i2dest_in_i2src)
3931 if (newi2pat && reg_set_p (i2dest, newi2pat))
3932 distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3933 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3934 else
3935 distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3936 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3937 NULL_RTX, NULL_RTX);
3940 if (i1dest_in_i1src)
3942 if (newi2pat && reg_set_p (i1dest, newi2pat))
3943 distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3944 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3945 else
3946 distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3947 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3948 NULL_RTX, NULL_RTX);
3951 distribute_links (i3links);
3952 distribute_links (i2links);
3953 distribute_links (i1links);
3955 if (REG_P (i2dest))
3957 rtx link;
3958 rtx i2_insn = 0, i2_val = 0, set;
3960 /* The insn that used to set this register doesn't exist, and
3961 this life of the register may not exist either. See if one of
3962 I3's links points to an insn that sets I2DEST. If it does,
3963 that is now the last known value for I2DEST. If we don't update
3964 this and I2 set the register to a value that depended on its old
3965 contents, we will get confused. If this insn is used, thing
3966 will be set correctly in combine_instructions. */
3968 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3969 if ((set = single_set (XEXP (link, 0))) != 0
3970 && rtx_equal_p (i2dest, SET_DEST (set)))
3971 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3973 record_value_for_reg (i2dest, i2_insn, i2_val);
3975 /* If the reg formerly set in I2 died only once and that was in I3,
3976 zero its use count so it won't make `reload' do any work. */
3977 if (! added_sets_2
3978 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3979 && ! i2dest_in_i2src)
3981 regno = REGNO (i2dest);
3982 INC_REG_N_SETS (regno, -1);
3986 if (i1 && REG_P (i1dest))
3988 rtx link;
3989 rtx i1_insn = 0, i1_val = 0, set;
3991 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3992 if ((set = single_set (XEXP (link, 0))) != 0
3993 && rtx_equal_p (i1dest, SET_DEST (set)))
3994 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3996 record_value_for_reg (i1dest, i1_insn, i1_val);
3998 regno = REGNO (i1dest);
3999 if (! added_sets_1 && ! i1dest_in_i1src)
4000 INC_REG_N_SETS (regno, -1);
4003 /* Update reg_stat[].nonzero_bits et al for any changes that may have
4004 been made to this insn. The order of
4005 set_nonzero_bits_and_sign_copies() is important. Because newi2pat
4006 can affect nonzero_bits of newpat */
4007 if (newi2pat)
4008 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4009 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4012 if (undobuf.other_insn != NULL_RTX)
4014 if (dump_file)
4016 fprintf (dump_file, "modifying other_insn ");
4017 dump_insn_slim (dump_file, undobuf.other_insn);
4019 df_insn_rescan (undobuf.other_insn);
4022 if (i1 && !(NOTE_P(i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4024 if (dump_file)
4026 fprintf (dump_file, "modifying insn i1 ");
4027 dump_insn_slim (dump_file, i1);
4029 df_insn_rescan (i1);
4032 if (i2 && !(NOTE_P(i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4034 if (dump_file)
4036 fprintf (dump_file, "modifying insn i2 ");
4037 dump_insn_slim (dump_file, i2);
4039 df_insn_rescan (i2);
4042 if (i3 && !(NOTE_P(i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4044 if (dump_file)
4046 fprintf (dump_file, "modifying insn i3 ");
4047 dump_insn_slim (dump_file, i3);
4049 df_insn_rescan (i3);
4052 /* Set new_direct_jump_p if a new return or simple jump instruction
4053 has been created. Adjust the CFG accordingly. */
4055 if (returnjump_p (i3) || any_uncondjump_p (i3))
4057 *new_direct_jump_p = 1;
4058 mark_jump_label (PATTERN (i3), i3, 0);
4059 update_cfg_for_uncondjump (i3);
4062 if (undobuf.other_insn != NULL_RTX
4063 && (returnjump_p (undobuf.other_insn)
4064 || any_uncondjump_p (undobuf.other_insn)))
4066 *new_direct_jump_p = 1;
4067 update_cfg_for_uncondjump (undobuf.other_insn);
4070 /* A noop might also need cleaning up of CFG, if it comes from the
4071 simplification of a jump. */
4072 if (GET_CODE (newpat) == SET
4073 && SET_SRC (newpat) == pc_rtx
4074 && SET_DEST (newpat) == pc_rtx)
4076 *new_direct_jump_p = 1;
4077 update_cfg_for_uncondjump (i3);
4080 combine_successes++;
4081 undo_commit ();
4083 if (added_links_insn
4084 && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2))
4085 && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i3))
4086 return added_links_insn;
4087 else
4088 return newi2pat ? i2 : i3;
4091 /* Undo all the modifications recorded in undobuf. */
4093 static void
4094 undo_all (void)
4096 struct undo *undo, *next;
4098 for (undo = undobuf.undos; undo; undo = next)
4100 next = undo->next;
4101 switch (undo->kind)
4103 case UNDO_RTX:
4104 *undo->where.r = undo->old_contents.r;
4105 break;
4106 case UNDO_INT:
4107 *undo->where.i = undo->old_contents.i;
4108 break;
4109 case UNDO_MODE:
4110 adjust_reg_mode (*undo->where.r, undo->old_contents.m);
4111 break;
4112 default:
4113 gcc_unreachable ();
4116 undo->next = undobuf.frees;
4117 undobuf.frees = undo;
4120 undobuf.undos = 0;
4123 /* We've committed to accepting the changes we made. Move all
4124 of the undos to the free list. */
4126 static void
4127 undo_commit (void)
4129 struct undo *undo, *next;
4131 for (undo = undobuf.undos; undo; undo = next)
4133 next = undo->next;
4134 undo->next = undobuf.frees;
4135 undobuf.frees = undo;
4137 undobuf.undos = 0;
4140 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
4141 where we have an arithmetic expression and return that point. LOC will
4142 be inside INSN.
4144 try_combine will call this function to see if an insn can be split into
4145 two insns. */
4147 static rtx *
4148 find_split_point (rtx *loc, rtx insn, bool set_src)
4150 rtx x = *loc;
4151 enum rtx_code code = GET_CODE (x);
4152 rtx *split;
4153 unsigned HOST_WIDE_INT len = 0;
4154 HOST_WIDE_INT pos = 0;
4155 int unsignedp = 0;
4156 rtx inner = NULL_RTX;
4158 /* First special-case some codes. */
4159 switch (code)
4161 case SUBREG:
4162 #ifdef INSN_SCHEDULING
4163 /* If we are making a paradoxical SUBREG invalid, it becomes a split
4164 point. */
4165 if (MEM_P (SUBREG_REG (x)))
4166 return loc;
4167 #endif
4168 return find_split_point (&SUBREG_REG (x), insn, false);
4170 case MEM:
4171 #ifdef HAVE_lo_sum
4172 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4173 using LO_SUM and HIGH. */
4174 if (GET_CODE (XEXP (x, 0)) == CONST
4175 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
4177 enum machine_mode address_mode
4178 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
4180 SUBST (XEXP (x, 0),
4181 gen_rtx_LO_SUM (address_mode,
4182 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4183 XEXP (x, 0)));
4184 return &XEXP (XEXP (x, 0), 0);
4186 #endif
4188 /* If we have a PLUS whose second operand is a constant and the
4189 address is not valid, perhaps will can split it up using
4190 the machine-specific way to split large constants. We use
4191 the first pseudo-reg (one of the virtual regs) as a placeholder;
4192 it will not remain in the result. */
4193 if (GET_CODE (XEXP (x, 0)) == PLUS
4194 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4195 && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4196 MEM_ADDR_SPACE (x)))
4198 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4199 rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
4200 XEXP (x, 0)),
4201 subst_insn);
4203 /* This should have produced two insns, each of which sets our
4204 placeholder. If the source of the second is a valid address,
4205 we can make put both sources together and make a split point
4206 in the middle. */
4208 if (seq
4209 && NEXT_INSN (seq) != NULL_RTX
4210 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4211 && NONJUMP_INSN_P (seq)
4212 && GET_CODE (PATTERN (seq)) == SET
4213 && SET_DEST (PATTERN (seq)) == reg
4214 && ! reg_mentioned_p (reg,
4215 SET_SRC (PATTERN (seq)))
4216 && NONJUMP_INSN_P (NEXT_INSN (seq))
4217 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4218 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4219 && memory_address_addr_space_p
4220 (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4221 MEM_ADDR_SPACE (x)))
4223 rtx src1 = SET_SRC (PATTERN (seq));
4224 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4226 /* Replace the placeholder in SRC2 with SRC1. If we can
4227 find where in SRC2 it was placed, that can become our
4228 split point and we can replace this address with SRC2.
4229 Just try two obvious places. */
4231 src2 = replace_rtx (src2, reg, src1);
4232 split = 0;
4233 if (XEXP (src2, 0) == src1)
4234 split = &XEXP (src2, 0);
4235 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4236 && XEXP (XEXP (src2, 0), 0) == src1)
4237 split = &XEXP (XEXP (src2, 0), 0);
4239 if (split)
4241 SUBST (XEXP (x, 0), src2);
4242 return split;
4246 /* If that didn't work, perhaps the first operand is complex and
4247 needs to be computed separately, so make a split point there.
4248 This will occur on machines that just support REG + CONST
4249 and have a constant moved through some previous computation. */
4251 else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
4252 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4253 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4254 return &XEXP (XEXP (x, 0), 0);
4257 /* If we have a PLUS whose first operand is complex, try computing it
4258 separately by making a split there. */
4259 if (GET_CODE (XEXP (x, 0)) == PLUS
4260 && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4261 MEM_ADDR_SPACE (x))
4262 && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
4263 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4264 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4265 return &XEXP (XEXP (x, 0), 0);
4266 break;
4268 case SET:
4269 #ifdef HAVE_cc0
4270 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
4271 ZERO_EXTRACT, the most likely reason why this doesn't match is that
4272 we need to put the operand into a register. So split at that
4273 point. */
4275 if (SET_DEST (x) == cc0_rtx
4276 && GET_CODE (SET_SRC (x)) != COMPARE
4277 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
4278 && !OBJECT_P (SET_SRC (x))
4279 && ! (GET_CODE (SET_SRC (x)) == SUBREG
4280 && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
4281 return &SET_SRC (x);
4282 #endif
4284 /* See if we can split SET_SRC as it stands. */
4285 split = find_split_point (&SET_SRC (x), insn, true);
4286 if (split && split != &SET_SRC (x))
4287 return split;
4289 /* See if we can split SET_DEST as it stands. */
4290 split = find_split_point (&SET_DEST (x), insn, false);
4291 if (split && split != &SET_DEST (x))
4292 return split;
4294 /* See if this is a bitfield assignment with everything constant. If
4295 so, this is an IOR of an AND, so split it into that. */
4296 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4297 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
4298 <= HOST_BITS_PER_WIDE_INT)
4299 && CONST_INT_P (XEXP (SET_DEST (x), 1))
4300 && CONST_INT_P (XEXP (SET_DEST (x), 2))
4301 && CONST_INT_P (SET_SRC (x))
4302 && ((INTVAL (XEXP (SET_DEST (x), 1))
4303 + INTVAL (XEXP (SET_DEST (x), 2)))
4304 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
4305 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
4307 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
4308 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
4309 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
4310 rtx dest = XEXP (SET_DEST (x), 0);
4311 enum machine_mode mode = GET_MODE (dest);
4312 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
4313 rtx or_mask;
4315 if (BITS_BIG_ENDIAN)
4316 pos = GET_MODE_BITSIZE (mode) - len - pos;
4318 or_mask = gen_int_mode (src << pos, mode);
4319 if (src == mask)
4320 SUBST (SET_SRC (x),
4321 simplify_gen_binary (IOR, mode, dest, or_mask));
4322 else
4324 rtx negmask = gen_int_mode (~(mask << pos), mode);
4325 SUBST (SET_SRC (x),
4326 simplify_gen_binary (IOR, mode,
4327 simplify_gen_binary (AND, mode,
4328 dest, negmask),
4329 or_mask));
4332 SUBST (SET_DEST (x), dest);
4334 split = find_split_point (&SET_SRC (x), insn, true);
4335 if (split && split != &SET_SRC (x))
4336 return split;
4339 /* Otherwise, see if this is an operation that we can split into two.
4340 If so, try to split that. */
4341 code = GET_CODE (SET_SRC (x));
4343 switch (code)
4345 case AND:
4346 /* If we are AND'ing with a large constant that is only a single
4347 bit and the result is only being used in a context where we
4348 need to know if it is zero or nonzero, replace it with a bit
4349 extraction. This will avoid the large constant, which might
4350 have taken more than one insn to make. If the constant were
4351 not a valid argument to the AND but took only one insn to make,
4352 this is no worse, but if it took more than one insn, it will
4353 be better. */
4355 if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4356 && REG_P (XEXP (SET_SRC (x), 0))
4357 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
4358 && REG_P (SET_DEST (x))
4359 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
4360 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
4361 && XEXP (*split, 0) == SET_DEST (x)
4362 && XEXP (*split, 1) == const0_rtx)
4364 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
4365 XEXP (SET_SRC (x), 0),
4366 pos, NULL_RTX, 1, 1, 0, 0);
4367 if (extraction != 0)
4369 SUBST (SET_SRC (x), extraction);
4370 return find_split_point (loc, insn, false);
4373 break;
4375 case NE:
4376 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
4377 is known to be on, this can be converted into a NEG of a shift. */
4378 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
4379 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4380 && 1 <= (pos = exact_log2
4381 (nonzero_bits (XEXP (SET_SRC (x), 0),
4382 GET_MODE (XEXP (SET_SRC (x), 0))))))
4384 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
4386 SUBST (SET_SRC (x),
4387 gen_rtx_NEG (mode,
4388 gen_rtx_LSHIFTRT (mode,
4389 XEXP (SET_SRC (x), 0),
4390 GEN_INT (pos))));
4392 split = find_split_point (&SET_SRC (x), insn, true);
4393 if (split && split != &SET_SRC (x))
4394 return split;
4396 break;
4398 case SIGN_EXTEND:
4399 inner = XEXP (SET_SRC (x), 0);
4401 /* We can't optimize if either mode is a partial integer
4402 mode as we don't know how many bits are significant
4403 in those modes. */
4404 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
4405 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
4406 break;
4408 pos = 0;
4409 len = GET_MODE_BITSIZE (GET_MODE (inner));
4410 unsignedp = 0;
4411 break;
4413 case SIGN_EXTRACT:
4414 case ZERO_EXTRACT:
4415 if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4416 && CONST_INT_P (XEXP (SET_SRC (x), 2)))
4418 inner = XEXP (SET_SRC (x), 0);
4419 len = INTVAL (XEXP (SET_SRC (x), 1));
4420 pos = INTVAL (XEXP (SET_SRC (x), 2));
4422 if (BITS_BIG_ENDIAN)
4423 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
4424 unsignedp = (code == ZERO_EXTRACT);
4426 break;
4428 default:
4429 break;
4432 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
4434 enum machine_mode mode = GET_MODE (SET_SRC (x));
4436 /* For unsigned, we have a choice of a shift followed by an
4437 AND or two shifts. Use two shifts for field sizes where the
4438 constant might be too large. We assume here that we can
4439 always at least get 8-bit constants in an AND insn, which is
4440 true for every current RISC. */
4442 if (unsignedp && len <= 8)
4444 SUBST (SET_SRC (x),
4445 gen_rtx_AND (mode,
4446 gen_rtx_LSHIFTRT
4447 (mode, gen_lowpart (mode, inner),
4448 GEN_INT (pos)),
4449 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
4451 split = find_split_point (&SET_SRC (x), insn, true);
4452 if (split && split != &SET_SRC (x))
4453 return split;
4455 else
4457 SUBST (SET_SRC (x),
4458 gen_rtx_fmt_ee
4459 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
4460 gen_rtx_ASHIFT (mode,
4461 gen_lowpart (mode, inner),
4462 GEN_INT (GET_MODE_BITSIZE (mode)
4463 - len - pos)),
4464 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
4466 split = find_split_point (&SET_SRC (x), insn, true);
4467 if (split && split != &SET_SRC (x))
4468 return split;
4472 /* See if this is a simple operation with a constant as the second
4473 operand. It might be that this constant is out of range and hence
4474 could be used as a split point. */
4475 if (BINARY_P (SET_SRC (x))
4476 && CONSTANT_P (XEXP (SET_SRC (x), 1))
4477 && (OBJECT_P (XEXP (SET_SRC (x), 0))
4478 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
4479 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
4480 return &XEXP (SET_SRC (x), 1);
4482 /* Finally, see if this is a simple operation with its first operand
4483 not in a register. The operation might require this operand in a
4484 register, so return it as a split point. We can always do this
4485 because if the first operand were another operation, we would have
4486 already found it as a split point. */
4487 if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
4488 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
4489 return &XEXP (SET_SRC (x), 0);
4491 return 0;
4493 case AND:
4494 case IOR:
4495 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4496 it is better to write this as (not (ior A B)) so we can split it.
4497 Similarly for IOR. */
4498 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
4500 SUBST (*loc,
4501 gen_rtx_NOT (GET_MODE (x),
4502 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
4503 GET_MODE (x),
4504 XEXP (XEXP (x, 0), 0),
4505 XEXP (XEXP (x, 1), 0))));
4506 return find_split_point (loc, insn, set_src);
4509 /* Many RISC machines have a large set of logical insns. If the
4510 second operand is a NOT, put it first so we will try to split the
4511 other operand first. */
4512 if (GET_CODE (XEXP (x, 1)) == NOT)
4514 rtx tem = XEXP (x, 0);
4515 SUBST (XEXP (x, 0), XEXP (x, 1));
4516 SUBST (XEXP (x, 1), tem);
4518 break;
4520 case PLUS:
4521 case MINUS:
4522 /* Split at a multiply-accumulate instruction. However if this is
4523 the SET_SRC, we likely do not have such an instruction and it's
4524 worthless to try this split. */
4525 if (!set_src && GET_CODE (XEXP (x, 0)) == MULT)
4526 return loc;
4528 default:
4529 break;
4532 /* Otherwise, select our actions depending on our rtx class. */
4533 switch (GET_RTX_CLASS (code))
4535 case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
4536 case RTX_TERNARY:
4537 split = find_split_point (&XEXP (x, 2), insn, false);
4538 if (split)
4539 return split;
4540 /* ... fall through ... */
4541 case RTX_BIN_ARITH:
4542 case RTX_COMM_ARITH:
4543 case RTX_COMPARE:
4544 case RTX_COMM_COMPARE:
4545 split = find_split_point (&XEXP (x, 1), insn, false);
4546 if (split)
4547 return split;
4548 /* ... fall through ... */
4549 case RTX_UNARY:
4550 /* Some machines have (and (shift ...) ...) insns. If X is not
4551 an AND, but XEXP (X, 0) is, use it as our split point. */
4552 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
4553 return &XEXP (x, 0);
4555 split = find_split_point (&XEXP (x, 0), insn, false);
4556 if (split)
4557 return split;
4558 return loc;
4560 default:
4561 /* Otherwise, we don't have a split point. */
4562 return 0;
4566 /* Throughout X, replace FROM with TO, and return the result.
4567 The result is TO if X is FROM;
4568 otherwise the result is X, but its contents may have been modified.
4569 If they were modified, a record was made in undobuf so that
4570 undo_all will (among other things) return X to its original state.
4572 If the number of changes necessary is too much to record to undo,
4573 the excess changes are not made, so the result is invalid.
4574 The changes already made can still be undone.
4575 undobuf.num_undo is incremented for such changes, so by testing that
4576 the caller can tell whether the result is valid.
4578 `n_occurrences' is incremented each time FROM is replaced.
4580 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
4582 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
4583 by copying if `n_occurrences' is nonzero. */
4585 static rtx
4586 subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
4588 enum rtx_code code = GET_CODE (x);
4589 enum machine_mode op0_mode = VOIDmode;
4590 const char *fmt;
4591 int len, i;
4592 rtx new_rtx;
4594 /* Two expressions are equal if they are identical copies of a shared
4595 RTX or if they are both registers with the same register number
4596 and mode. */
4598 #define COMBINE_RTX_EQUAL_P(X,Y) \
4599 ((X) == (Y) \
4600 || (REG_P (X) && REG_P (Y) \
4601 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
4603 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
4605 n_occurrences++;
4606 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
4609 /* If X and FROM are the same register but different modes, they
4610 will not have been seen as equal above. However, the log links code
4611 will make a LOG_LINKS entry for that case. If we do nothing, we
4612 will try to rerecognize our original insn and, when it succeeds,
4613 we will delete the feeding insn, which is incorrect.
4615 So force this insn not to match in this (rare) case. */
4616 if (! in_dest && code == REG && REG_P (from)
4617 && reg_overlap_mentioned_p (x, from))
4618 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
4620 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
4621 of which may contain things that can be combined. */
4622 if (code != MEM && code != LO_SUM && OBJECT_P (x))
4623 return x;
4625 /* It is possible to have a subexpression appear twice in the insn.
4626 Suppose that FROM is a register that appears within TO.
4627 Then, after that subexpression has been scanned once by `subst',
4628 the second time it is scanned, TO may be found. If we were
4629 to scan TO here, we would find FROM within it and create a
4630 self-referent rtl structure which is completely wrong. */
4631 if (COMBINE_RTX_EQUAL_P (x, to))
4632 return to;
4634 /* Parallel asm_operands need special attention because all of the
4635 inputs are shared across the arms. Furthermore, unsharing the
4636 rtl results in recognition failures. Failure to handle this case
4637 specially can result in circular rtl.
4639 Solve this by doing a normal pass across the first entry of the
4640 parallel, and only processing the SET_DESTs of the subsequent
4641 entries. Ug. */
4643 if (code == PARALLEL
4644 && GET_CODE (XVECEXP (x, 0, 0)) == SET
4645 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
4647 new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
4649 /* If this substitution failed, this whole thing fails. */
4650 if (GET_CODE (new_rtx) == CLOBBER
4651 && XEXP (new_rtx, 0) == const0_rtx)
4652 return new_rtx;
4654 SUBST (XVECEXP (x, 0, 0), new_rtx);
4656 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
4658 rtx dest = SET_DEST (XVECEXP (x, 0, i));
4660 if (!REG_P (dest)
4661 && GET_CODE (dest) != CC0
4662 && GET_CODE (dest) != PC)
4664 new_rtx = subst (dest, from, to, 0, unique_copy);
4666 /* If this substitution failed, this whole thing fails. */
4667 if (GET_CODE (new_rtx) == CLOBBER
4668 && XEXP (new_rtx, 0) == const0_rtx)
4669 return new_rtx;
4671 SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
4675 else
4677 len = GET_RTX_LENGTH (code);
4678 fmt = GET_RTX_FORMAT (code);
4680 /* We don't need to process a SET_DEST that is a register, CC0,
4681 or PC, so set up to skip this common case. All other cases
4682 where we want to suppress replacing something inside a
4683 SET_SRC are handled via the IN_DEST operand. */
4684 if (code == SET
4685 && (REG_P (SET_DEST (x))
4686 || GET_CODE (SET_DEST (x)) == CC0
4687 || GET_CODE (SET_DEST (x)) == PC))
4688 fmt = "ie";
4690 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
4691 constant. */
4692 if (fmt[0] == 'e')
4693 op0_mode = GET_MODE (XEXP (x, 0));
4695 for (i = 0; i < len; i++)
4697 if (fmt[i] == 'E')
4699 int j;
4700 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4702 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4704 new_rtx = (unique_copy && n_occurrences
4705 ? copy_rtx (to) : to);
4706 n_occurrences++;
4708 else
4710 new_rtx = subst (XVECEXP (x, i, j), from, to, 0,
4711 unique_copy);
4713 /* If this substitution failed, this whole thing
4714 fails. */
4715 if (GET_CODE (new_rtx) == CLOBBER
4716 && XEXP (new_rtx, 0) == const0_rtx)
4717 return new_rtx;
4720 SUBST (XVECEXP (x, i, j), new_rtx);
4723 else if (fmt[i] == 'e')
4725 /* If this is a register being set, ignore it. */
4726 new_rtx = XEXP (x, i);
4727 if (in_dest
4728 && i == 0
4729 && (((code == SUBREG || code == ZERO_EXTRACT)
4730 && REG_P (new_rtx))
4731 || code == STRICT_LOW_PART))
4734 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4736 /* In general, don't install a subreg involving two
4737 modes not tieable. It can worsen register
4738 allocation, and can even make invalid reload
4739 insns, since the reg inside may need to be copied
4740 from in the outside mode, and that may be invalid
4741 if it is an fp reg copied in integer mode.
4743 We allow two exceptions to this: It is valid if
4744 it is inside another SUBREG and the mode of that
4745 SUBREG and the mode of the inside of TO is
4746 tieable and it is valid if X is a SET that copies
4747 FROM to CC0. */
4749 if (GET_CODE (to) == SUBREG
4750 && ! MODES_TIEABLE_P (GET_MODE (to),
4751 GET_MODE (SUBREG_REG (to)))
4752 && ! (code == SUBREG
4753 && MODES_TIEABLE_P (GET_MODE (x),
4754 GET_MODE (SUBREG_REG (to))))
4755 #ifdef HAVE_cc0
4756 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
4757 #endif
4759 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4761 #ifdef CANNOT_CHANGE_MODE_CLASS
4762 if (code == SUBREG
4763 && REG_P (to)
4764 && REGNO (to) < FIRST_PSEUDO_REGISTER
4765 && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4766 GET_MODE (to),
4767 GET_MODE (x)))
4768 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4769 #endif
4771 new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4772 n_occurrences++;
4774 else
4775 /* If we are in a SET_DEST, suppress most cases unless we
4776 have gone inside a MEM, in which case we want to
4777 simplify the address. We assume here that things that
4778 are actually part of the destination have their inner
4779 parts in the first expression. This is true for SUBREG,
4780 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4781 things aside from REG and MEM that should appear in a
4782 SET_DEST. */
4783 new_rtx = subst (XEXP (x, i), from, to,
4784 (((in_dest
4785 && (code == SUBREG || code == STRICT_LOW_PART
4786 || code == ZERO_EXTRACT))
4787 || code == SET)
4788 && i == 0), unique_copy);
4790 /* If we found that we will have to reject this combination,
4791 indicate that by returning the CLOBBER ourselves, rather than
4792 an expression containing it. This will speed things up as
4793 well as prevent accidents where two CLOBBERs are considered
4794 to be equal, thus producing an incorrect simplification. */
4796 if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
4797 return new_rtx;
4799 if (GET_CODE (x) == SUBREG
4800 && (CONST_INT_P (new_rtx)
4801 || GET_CODE (new_rtx) == CONST_DOUBLE))
4803 enum machine_mode mode = GET_MODE (x);
4805 x = simplify_subreg (GET_MODE (x), new_rtx,
4806 GET_MODE (SUBREG_REG (x)),
4807 SUBREG_BYTE (x));
4808 if (! x)
4809 x = gen_rtx_CLOBBER (mode, const0_rtx);
4811 else if (CONST_INT_P (new_rtx)
4812 && GET_CODE (x) == ZERO_EXTEND)
4814 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4815 new_rtx, GET_MODE (XEXP (x, 0)));
4816 gcc_assert (x);
4818 else
4819 SUBST (XEXP (x, i), new_rtx);
4824 /* Check if we are loading something from the constant pool via float
4825 extension; in this case we would undo compress_float_constant
4826 optimization and degenerate constant load to an immediate value. */
4827 if (GET_CODE (x) == FLOAT_EXTEND
4828 && MEM_P (XEXP (x, 0))
4829 && MEM_READONLY_P (XEXP (x, 0)))
4831 rtx tmp = avoid_constant_pool_reference (x);
4832 if (x != tmp)
4833 return x;
4836 /* Try to simplify X. If the simplification changed the code, it is likely
4837 that further simplification will help, so loop, but limit the number
4838 of repetitions that will be performed. */
4840 for (i = 0; i < 4; i++)
4842 /* If X is sufficiently simple, don't bother trying to do anything
4843 with it. */
4844 if (code != CONST_INT && code != REG && code != CLOBBER)
4845 x = combine_simplify_rtx (x, op0_mode, in_dest);
4847 if (GET_CODE (x) == code)
4848 break;
4850 code = GET_CODE (x);
4852 /* We no longer know the original mode of operand 0 since we
4853 have changed the form of X) */
4854 op0_mode = VOIDmode;
4857 return x;
4860 /* Simplify X, a piece of RTL. We just operate on the expression at the
4861 outer level; call `subst' to simplify recursively. Return the new
4862 expression.
4864 OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero
4865 if we are inside a SET_DEST. */
4867 static rtx
4868 combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
4870 enum rtx_code code = GET_CODE (x);
4871 enum machine_mode mode = GET_MODE (x);
4872 rtx temp;
4873 int i;
4875 /* If this is a commutative operation, put a constant last and a complex
4876 expression first. We don't need to do this for comparisons here. */
4877 if (COMMUTATIVE_ARITH_P (x)
4878 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4880 temp = XEXP (x, 0);
4881 SUBST (XEXP (x, 0), XEXP (x, 1));
4882 SUBST (XEXP (x, 1), temp);
4885 /* If this is a simple operation applied to an IF_THEN_ELSE, try
4886 applying it to the arms of the IF_THEN_ELSE. This often simplifies
4887 things. Check for cases where both arms are testing the same
4888 condition.
4890 Don't do anything if all operands are very simple. */
4892 if ((BINARY_P (x)
4893 && ((!OBJECT_P (XEXP (x, 0))
4894 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4895 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4896 || (!OBJECT_P (XEXP (x, 1))
4897 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
4898 && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4899 || (UNARY_P (x)
4900 && (!OBJECT_P (XEXP (x, 0))
4901 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4902 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
4904 rtx cond, true_rtx, false_rtx;
4906 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
4907 if (cond != 0
4908 /* If everything is a comparison, what we have is highly unlikely
4909 to be simpler, so don't use it. */
4910 && ! (COMPARISON_P (x)
4911 && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
4913 rtx cop1 = const0_rtx;
4914 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4916 if (cond_code == NE && COMPARISON_P (cond))
4917 return x;
4919 /* Simplify the alternative arms; this may collapse the true and
4920 false arms to store-flag values. Be careful to use copy_rtx
4921 here since true_rtx or false_rtx might share RTL with x as a
4922 result of the if_then_else_cond call above. */
4923 true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4924 false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
4926 /* If true_rtx and false_rtx are not general_operands, an if_then_else
4927 is unlikely to be simpler. */
4928 if (general_operand (true_rtx, VOIDmode)
4929 && general_operand (false_rtx, VOIDmode))
4931 enum rtx_code reversed;
4933 /* Restarting if we generate a store-flag expression will cause
4934 us to loop. Just drop through in this case. */
4936 /* If the result values are STORE_FLAG_VALUE and zero, we can
4937 just make the comparison operation. */
4938 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
4939 x = simplify_gen_relational (cond_code, mode, VOIDmode,
4940 cond, cop1);
4941 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
4942 && ((reversed = reversed_comparison_code_parts
4943 (cond_code, cond, cop1, NULL))
4944 != UNKNOWN))
4945 x = simplify_gen_relational (reversed, mode, VOIDmode,
4946 cond, cop1);
4948 /* Likewise, we can make the negate of a comparison operation
4949 if the result values are - STORE_FLAG_VALUE and zero. */
4950 else if (CONST_INT_P (true_rtx)
4951 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4952 && false_rtx == const0_rtx)
4953 x = simplify_gen_unary (NEG, mode,
4954 simplify_gen_relational (cond_code,
4955 mode, VOIDmode,
4956 cond, cop1),
4957 mode);
4958 else if (CONST_INT_P (false_rtx)
4959 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
4960 && true_rtx == const0_rtx
4961 && ((reversed = reversed_comparison_code_parts
4962 (cond_code, cond, cop1, NULL))
4963 != UNKNOWN))
4964 x = simplify_gen_unary (NEG, mode,
4965 simplify_gen_relational (reversed,
4966 mode, VOIDmode,
4967 cond, cop1),
4968 mode);
4969 else
4970 return gen_rtx_IF_THEN_ELSE (mode,
4971 simplify_gen_relational (cond_code,
4972 mode,
4973 VOIDmode,
4974 cond,
4975 cop1),
4976 true_rtx, false_rtx);
4978 code = GET_CODE (x);
4979 op0_mode = VOIDmode;
4984 /* Try to fold this expression in case we have constants that weren't
4985 present before. */
4986 temp = 0;
4987 switch (GET_RTX_CLASS (code))
4989 case RTX_UNARY:
4990 if (op0_mode == VOIDmode)
4991 op0_mode = GET_MODE (XEXP (x, 0));
4992 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4993 break;
4994 case RTX_COMPARE:
4995 case RTX_COMM_COMPARE:
4997 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4998 if (cmp_mode == VOIDmode)
5000 cmp_mode = GET_MODE (XEXP (x, 1));
5001 if (cmp_mode == VOIDmode)
5002 cmp_mode = op0_mode;
5004 temp = simplify_relational_operation (code, mode, cmp_mode,
5005 XEXP (x, 0), XEXP (x, 1));
5007 break;
5008 case RTX_COMM_ARITH:
5009 case RTX_BIN_ARITH:
5010 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5011 break;
5012 case RTX_BITFIELD_OPS:
5013 case RTX_TERNARY:
5014 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5015 XEXP (x, 1), XEXP (x, 2));
5016 break;
5017 default:
5018 break;
5021 if (temp)
5023 x = temp;
5024 code = GET_CODE (temp);
5025 op0_mode = VOIDmode;
5026 mode = GET_MODE (temp);
5029 /* First see if we can apply the inverse distributive law. */
5030 if (code == PLUS || code == MINUS
5031 || code == AND || code == IOR || code == XOR)
5033 x = apply_distributive_law (x);
5034 code = GET_CODE (x);
5035 op0_mode = VOIDmode;
5038 /* If CODE is an associative operation not otherwise handled, see if we
5039 can associate some operands. This can win if they are constants or
5040 if they are logically related (i.e. (a & b) & a). */
5041 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5042 || code == AND || code == IOR || code == XOR
5043 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5044 && ((INTEGRAL_MODE_P (mode) && code != DIV)
5045 || (flag_associative_math && FLOAT_MODE_P (mode))))
5047 if (GET_CODE (XEXP (x, 0)) == code)
5049 rtx other = XEXP (XEXP (x, 0), 0);
5050 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5051 rtx inner_op1 = XEXP (x, 1);
5052 rtx inner;
5054 /* Make sure we pass the constant operand if any as the second
5055 one if this is a commutative operation. */
5056 if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5058 rtx tem = inner_op0;
5059 inner_op0 = inner_op1;
5060 inner_op1 = tem;
5062 inner = simplify_binary_operation (code == MINUS ? PLUS
5063 : code == DIV ? MULT
5064 : code,
5065 mode, inner_op0, inner_op1);
5067 /* For commutative operations, try the other pair if that one
5068 didn't simplify. */
5069 if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5071 other = XEXP (XEXP (x, 0), 1);
5072 inner = simplify_binary_operation (code, mode,
5073 XEXP (XEXP (x, 0), 0),
5074 XEXP (x, 1));
5077 if (inner)
5078 return simplify_gen_binary (code, mode, other, inner);
5082 /* A little bit of algebraic simplification here. */
5083 switch (code)
5085 case MEM:
5086 /* Ensure that our address has any ASHIFTs converted to MULT in case
5087 address-recognizing predicates are called later. */
5088 temp = make_compound_operation (XEXP (x, 0), MEM);
5089 SUBST (XEXP (x, 0), temp);
5090 break;
5092 case SUBREG:
5093 if (op0_mode == VOIDmode)
5094 op0_mode = GET_MODE (SUBREG_REG (x));
5096 /* See if this can be moved to simplify_subreg. */
5097 if (CONSTANT_P (SUBREG_REG (x))
5098 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
5099 /* Don't call gen_lowpart if the inner mode
5100 is VOIDmode and we cannot simplify it, as SUBREG without
5101 inner mode is invalid. */
5102 && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5103 || gen_lowpart_common (mode, SUBREG_REG (x))))
5104 return gen_lowpart (mode, SUBREG_REG (x));
5106 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5107 break;
5109 rtx temp;
5110 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5111 SUBREG_BYTE (x));
5112 if (temp)
5113 return temp;
5116 /* Don't change the mode of the MEM if that would change the meaning
5117 of the address. */
5118 if (MEM_P (SUBREG_REG (x))
5119 && (MEM_VOLATILE_P (SUBREG_REG (x))
5120 || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
5121 return gen_rtx_CLOBBER (mode, const0_rtx);
5123 /* Note that we cannot do any narrowing for non-constants since
5124 we might have been counting on using the fact that some bits were
5125 zero. We now do this in the SET. */
5127 break;
5129 case NEG:
5130 temp = expand_compound_operation (XEXP (x, 0));
5132 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
5133 replaced by (lshiftrt X C). This will convert
5134 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
5136 if (GET_CODE (temp) == ASHIFTRT
5137 && CONST_INT_P (XEXP (temp, 1))
5138 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
5139 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
5140 INTVAL (XEXP (temp, 1)));
5142 /* If X has only a single bit that might be nonzero, say, bit I, convert
5143 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
5144 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
5145 (sign_extract X 1 Y). But only do this if TEMP isn't a register
5146 or a SUBREG of one since we'd be making the expression more
5147 complex if it was just a register. */
5149 if (!REG_P (temp)
5150 && ! (GET_CODE (temp) == SUBREG
5151 && REG_P (SUBREG_REG (temp)))
5152 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
5154 rtx temp1 = simplify_shift_const
5155 (NULL_RTX, ASHIFTRT, mode,
5156 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
5157 GET_MODE_BITSIZE (mode) - 1 - i),
5158 GET_MODE_BITSIZE (mode) - 1 - i);
5160 /* If all we did was surround TEMP with the two shifts, we
5161 haven't improved anything, so don't use it. Otherwise,
5162 we are better off with TEMP1. */
5163 if (GET_CODE (temp1) != ASHIFTRT
5164 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
5165 || XEXP (XEXP (temp1, 0), 0) != temp)
5166 return temp1;
5168 break;
5170 case TRUNCATE:
5171 /* We can't handle truncation to a partial integer mode here
5172 because we don't know the real bitsize of the partial
5173 integer mode. */
5174 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
5175 break;
5177 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5178 SUBST (XEXP (x, 0),
5179 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
5180 GET_MODE_MASK (mode), 0));
5182 /* We can truncate a constant value and return it. */
5183 if (CONST_INT_P (XEXP (x, 0)))
5184 return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
5186 /* Similarly to what we do in simplify-rtx.c, a truncate of a register
5187 whose value is a comparison can be replaced with a subreg if
5188 STORE_FLAG_VALUE permits. */
5189 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5190 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
5191 && (temp = get_last_value (XEXP (x, 0)))
5192 && COMPARISON_P (temp))
5193 return gen_lowpart (mode, XEXP (x, 0));
5194 break;
5196 case CONST:
5197 /* (const (const X)) can become (const X). Do it this way rather than
5198 returning the inner CONST since CONST can be shared with a
5199 REG_EQUAL note. */
5200 if (GET_CODE (XEXP (x, 0)) == CONST)
5201 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
5202 break;
5204 #ifdef HAVE_lo_sum
5205 case LO_SUM:
5206 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
5207 can add in an offset. find_split_point will split this address up
5208 again if it doesn't match. */
5209 if (GET_CODE (XEXP (x, 0)) == HIGH
5210 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5211 return XEXP (x, 1);
5212 break;
5213 #endif
5215 case PLUS:
5216 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
5217 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
5218 bit-field and can be replaced by either a sign_extend or a
5219 sign_extract. The `and' may be a zero_extend and the two
5220 <c>, -<c> constants may be reversed. */
5221 if (GET_CODE (XEXP (x, 0)) == XOR
5222 && CONST_INT_P (XEXP (x, 1))
5223 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
5224 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
5225 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5226 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5227 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5228 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
5229 && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5230 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5231 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
5232 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
5233 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
5234 == (unsigned int) i + 1))))
5235 return simplify_shift_const
5236 (NULL_RTX, ASHIFTRT, mode,
5237 simplify_shift_const (NULL_RTX, ASHIFT, mode,
5238 XEXP (XEXP (XEXP (x, 0), 0), 0),
5239 GET_MODE_BITSIZE (mode) - (i + 1)),
5240 GET_MODE_BITSIZE (mode) - (i + 1));
5242 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
5243 can become (ashiftrt (ashift (xor x 1) C) C) where C is
5244 the bitsize of the mode - 1. This allows simplification of
5245 "a = (b & 8) == 0;" */
5246 if (XEXP (x, 1) == constm1_rtx
5247 && !REG_P (XEXP (x, 0))
5248 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5249 && REG_P (SUBREG_REG (XEXP (x, 0))))
5250 && nonzero_bits (XEXP (x, 0), mode) == 1)
5251 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
5252 simplify_shift_const (NULL_RTX, ASHIFT, mode,
5253 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
5254 GET_MODE_BITSIZE (mode) - 1),
5255 GET_MODE_BITSIZE (mode) - 1);
5257 /* If we are adding two things that have no bits in common, convert
5258 the addition into an IOR. This will often be further simplified,
5259 for example in cases like ((a & 1) + (a & 2)), which can
5260 become a & 3. */
5262 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5263 && (nonzero_bits (XEXP (x, 0), mode)
5264 & nonzero_bits (XEXP (x, 1), mode)) == 0)
5266 /* Try to simplify the expression further. */
5267 rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
5268 temp = combine_simplify_rtx (tor, mode, in_dest);
5270 /* If we could, great. If not, do not go ahead with the IOR
5271 replacement, since PLUS appears in many special purpose
5272 address arithmetic instructions. */
5273 if (GET_CODE (temp) != CLOBBER && temp != tor)
5274 return temp;
5276 break;
5278 case MINUS:
5279 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5280 (and <foo> (const_int pow2-1)) */
5281 if (GET_CODE (XEXP (x, 1)) == AND
5282 && CONST_INT_P (XEXP (XEXP (x, 1), 1))
5283 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
5284 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
5285 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
5286 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
5287 break;
5289 case MULT:
5290 /* If we have (mult (plus A B) C), apply the distributive law and then
5291 the inverse distributive law to see if things simplify. This
5292 occurs mostly in addresses, often when unrolling loops. */
5294 if (GET_CODE (XEXP (x, 0)) == PLUS)
5296 rtx result = distribute_and_simplify_rtx (x, 0);
5297 if (result)
5298 return result;
5301 /* Try simplify a*(b/c) as (a*b)/c. */
5302 if (FLOAT_MODE_P (mode) && flag_associative_math
5303 && GET_CODE (XEXP (x, 0)) == DIV)
5305 rtx tem = simplify_binary_operation (MULT, mode,
5306 XEXP (XEXP (x, 0), 0),
5307 XEXP (x, 1));
5308 if (tem)
5309 return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
5311 break;
5313 case UDIV:
5314 /* If this is a divide by a power of two, treat it as a shift if
5315 its first operand is a shift. */
5316 if (CONST_INT_P (XEXP (x, 1))
5317 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
5318 && (GET_CODE (XEXP (x, 0)) == ASHIFT
5319 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
5320 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
5321 || GET_CODE (XEXP (x, 0)) == ROTATE
5322 || GET_CODE (XEXP (x, 0)) == ROTATERT))
5323 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
5324 break;
5326 case EQ: case NE:
5327 case GT: case GTU: case GE: case GEU:
5328 case LT: case LTU: case LE: case LEU:
5329 case UNEQ: case LTGT:
5330 case UNGT: case UNGE:
5331 case UNLT: case UNLE:
5332 case UNORDERED: case ORDERED:
5333 /* If the first operand is a condition code, we can't do anything
5334 with it. */
5335 if (GET_CODE (XEXP (x, 0)) == COMPARE
5336 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
5337 && ! CC0_P (XEXP (x, 0))))
5339 rtx op0 = XEXP (x, 0);
5340 rtx op1 = XEXP (x, 1);
5341 enum rtx_code new_code;
5343 if (GET_CODE (op0) == COMPARE)
5344 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5346 /* Simplify our comparison, if possible. */
5347 new_code = simplify_comparison (code, &op0, &op1);
5349 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
5350 if only the low-order bit is possibly nonzero in X (such as when
5351 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
5352 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
5353 known to be either 0 or -1, NE becomes a NEG and EQ becomes
5354 (plus X 1).
5356 Remove any ZERO_EXTRACT we made when thinking this was a
5357 comparison. It may now be simpler to use, e.g., an AND. If a
5358 ZERO_EXTRACT is indeed appropriate, it will be placed back by
5359 the call to make_compound_operation in the SET case. */
5361 if (STORE_FLAG_VALUE == 1
5362 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5363 && op1 == const0_rtx
5364 && mode == GET_MODE (op0)
5365 && nonzero_bits (op0, mode) == 1)
5366 return gen_lowpart (mode,
5367 expand_compound_operation (op0));
5369 else if (STORE_FLAG_VALUE == 1
5370 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5371 && op1 == const0_rtx
5372 && mode == GET_MODE (op0)
5373 && (num_sign_bit_copies (op0, mode)
5374 == GET_MODE_BITSIZE (mode)))
5376 op0 = expand_compound_operation (op0);
5377 return simplify_gen_unary (NEG, mode,
5378 gen_lowpart (mode, op0),
5379 mode);
5382 else if (STORE_FLAG_VALUE == 1
5383 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5384 && op1 == const0_rtx
5385 && mode == GET_MODE (op0)
5386 && nonzero_bits (op0, mode) == 1)
5388 op0 = expand_compound_operation (op0);
5389 return simplify_gen_binary (XOR, mode,
5390 gen_lowpart (mode, op0),
5391 const1_rtx);
5394 else if (STORE_FLAG_VALUE == 1
5395 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5396 && op1 == const0_rtx
5397 && mode == GET_MODE (op0)
5398 && (num_sign_bit_copies (op0, mode)
5399 == GET_MODE_BITSIZE (mode)))
5401 op0 = expand_compound_operation (op0);
5402 return plus_constant (gen_lowpart (mode, op0), 1);
5405 /* If STORE_FLAG_VALUE is -1, we have cases similar to
5406 those above. */
5407 if (STORE_FLAG_VALUE == -1
5408 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5409 && op1 == const0_rtx
5410 && (num_sign_bit_copies (op0, mode)
5411 == GET_MODE_BITSIZE (mode)))
5412 return gen_lowpart (mode,
5413 expand_compound_operation (op0));
5415 else if (STORE_FLAG_VALUE == -1
5416 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5417 && op1 == const0_rtx
5418 && mode == GET_MODE (op0)
5419 && nonzero_bits (op0, mode) == 1)
5421 op0 = expand_compound_operation (op0);
5422 return simplify_gen_unary (NEG, mode,
5423 gen_lowpart (mode, op0),
5424 mode);
5427 else if (STORE_FLAG_VALUE == -1
5428 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5429 && op1 == const0_rtx
5430 && mode == GET_MODE (op0)
5431 && (num_sign_bit_copies (op0, mode)
5432 == GET_MODE_BITSIZE (mode)))
5434 op0 = expand_compound_operation (op0);
5435 return simplify_gen_unary (NOT, mode,
5436 gen_lowpart (mode, op0),
5437 mode);
5440 /* If X is 0/1, (eq X 0) is X-1. */
5441 else if (STORE_FLAG_VALUE == -1
5442 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5443 && op1 == const0_rtx
5444 && mode == GET_MODE (op0)
5445 && nonzero_bits (op0, mode) == 1)
5447 op0 = expand_compound_operation (op0);
5448 return plus_constant (gen_lowpart (mode, op0), -1);
5451 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
5452 one bit that might be nonzero, we can convert (ne x 0) to
5453 (ashift x c) where C puts the bit in the sign bit. Remove any
5454 AND with STORE_FLAG_VALUE when we are done, since we are only
5455 going to test the sign bit. */
5456 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5457 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5458 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5459 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5460 && op1 == const0_rtx
5461 && mode == GET_MODE (op0)
5462 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
5464 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5465 expand_compound_operation (op0),
5466 GET_MODE_BITSIZE (mode) - 1 - i);
5467 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
5468 return XEXP (x, 0);
5469 else
5470 return x;
5473 /* If the code changed, return a whole new comparison. */
5474 if (new_code != code)
5475 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
5477 /* Otherwise, keep this operation, but maybe change its operands.
5478 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
5479 SUBST (XEXP (x, 0), op0);
5480 SUBST (XEXP (x, 1), op1);
5482 break;
5484 case IF_THEN_ELSE:
5485 return simplify_if_then_else (x);
5487 case ZERO_EXTRACT:
5488 case SIGN_EXTRACT:
5489 case ZERO_EXTEND:
5490 case SIGN_EXTEND:
5491 /* If we are processing SET_DEST, we are done. */
5492 if (in_dest)
5493 return x;
5495 return expand_compound_operation (x);
5497 case SET:
5498 return simplify_set (x);
5500 case AND:
5501 case IOR:
5502 return simplify_logical (x);
5504 case ASHIFT:
5505 case LSHIFTRT:
5506 case ASHIFTRT:
5507 case ROTATE:
5508 case ROTATERT:
5509 /* If this is a shift by a constant amount, simplify it. */
5510 if (CONST_INT_P (XEXP (x, 1)))
5511 return simplify_shift_const (x, code, mode, XEXP (x, 0),
5512 INTVAL (XEXP (x, 1)));
5514 else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
5515 SUBST (XEXP (x, 1),
5516 force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
5517 ((HOST_WIDE_INT) 1
5518 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
5519 - 1,
5520 0));
5521 break;
5523 default:
5524 break;
5527 return x;
5530 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5532 static rtx
5533 simplify_if_then_else (rtx x)
5535 enum machine_mode mode = GET_MODE (x);
5536 rtx cond = XEXP (x, 0);
5537 rtx true_rtx = XEXP (x, 1);
5538 rtx false_rtx = XEXP (x, 2);
5539 enum rtx_code true_code = GET_CODE (cond);
5540 int comparison_p = COMPARISON_P (cond);
5541 rtx temp;
5542 int i;
5543 enum rtx_code false_code;
5544 rtx reversed;
5546 /* Simplify storing of the truth value. */
5547 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
5548 return simplify_gen_relational (true_code, mode, VOIDmode,
5549 XEXP (cond, 0), XEXP (cond, 1));
5551 /* Also when the truth value has to be reversed. */
5552 if (comparison_p
5553 && true_rtx == const0_rtx && false_rtx == const_true_rtx
5554 && (reversed = reversed_comparison (cond, mode)))
5555 return reversed;
5557 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
5558 in it is being compared against certain values. Get the true and false
5559 comparisons and see if that says anything about the value of each arm. */
5561 if (comparison_p
5562 && ((false_code = reversed_comparison_code (cond, NULL))
5563 != UNKNOWN)
5564 && REG_P (XEXP (cond, 0)))
5566 HOST_WIDE_INT nzb;
5567 rtx from = XEXP (cond, 0);
5568 rtx true_val = XEXP (cond, 1);
5569 rtx false_val = true_val;
5570 int swapped = 0;
5572 /* If FALSE_CODE is EQ, swap the codes and arms. */
5574 if (false_code == EQ)
5576 swapped = 1, true_code = EQ, false_code = NE;
5577 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5580 /* If we are comparing against zero and the expression being tested has
5581 only a single bit that might be nonzero, that is its value when it is
5582 not equal to zero. Similarly if it is known to be -1 or 0. */
5584 if (true_code == EQ && true_val == const0_rtx
5585 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
5587 false_code = EQ;
5588 false_val = GEN_INT (trunc_int_for_mode (nzb, GET_MODE (from)));
5590 else if (true_code == EQ && true_val == const0_rtx
5591 && (num_sign_bit_copies (from, GET_MODE (from))
5592 == GET_MODE_BITSIZE (GET_MODE (from))))
5594 false_code = EQ;
5595 false_val = constm1_rtx;
5598 /* Now simplify an arm if we know the value of the register in the
5599 branch and it is used in the arm. Be careful due to the potential
5600 of locally-shared RTL. */
5602 if (reg_mentioned_p (from, true_rtx))
5603 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
5604 from, true_val),
5605 pc_rtx, pc_rtx, 0, 0);
5606 if (reg_mentioned_p (from, false_rtx))
5607 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
5608 from, false_val),
5609 pc_rtx, pc_rtx, 0, 0);
5611 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
5612 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
5614 true_rtx = XEXP (x, 1);
5615 false_rtx = XEXP (x, 2);
5616 true_code = GET_CODE (cond);
5619 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
5620 reversed, do so to avoid needing two sets of patterns for
5621 subtract-and-branch insns. Similarly if we have a constant in the true
5622 arm, the false arm is the same as the first operand of the comparison, or
5623 the false arm is more complicated than the true arm. */
5625 if (comparison_p
5626 && reversed_comparison_code (cond, NULL) != UNKNOWN
5627 && (true_rtx == pc_rtx
5628 || (CONSTANT_P (true_rtx)
5629 && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
5630 || true_rtx == const0_rtx
5631 || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5632 || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5633 && !OBJECT_P (false_rtx))
5634 || reg_mentioned_p (true_rtx, false_rtx)
5635 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5637 true_code = reversed_comparison_code (cond, NULL);
5638 SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5639 SUBST (XEXP (x, 1), false_rtx);
5640 SUBST (XEXP (x, 2), true_rtx);
5642 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5643 cond = XEXP (x, 0);
5645 /* It is possible that the conditional has been simplified out. */
5646 true_code = GET_CODE (cond);
5647 comparison_p = COMPARISON_P (cond);
5650 /* If the two arms are identical, we don't need the comparison. */
5652 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5653 return true_rtx;
5655 /* Convert a == b ? b : a to "a". */
5656 if (true_code == EQ && ! side_effects_p (cond)
5657 && !HONOR_NANS (mode)
5658 && rtx_equal_p (XEXP (cond, 0), false_rtx)
5659 && rtx_equal_p (XEXP (cond, 1), true_rtx))
5660 return false_rtx;
5661 else if (true_code == NE && ! side_effects_p (cond)
5662 && !HONOR_NANS (mode)
5663 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5664 && rtx_equal_p (XEXP (cond, 1), false_rtx))
5665 return true_rtx;
5667 /* Look for cases where we have (abs x) or (neg (abs X)). */
5669 if (GET_MODE_CLASS (mode) == MODE_INT
5670 && comparison_p
5671 && XEXP (cond, 1) == const0_rtx
5672 && GET_CODE (false_rtx) == NEG
5673 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5674 && rtx_equal_p (true_rtx, XEXP (cond, 0))
5675 && ! side_effects_p (true_rtx))
5676 switch (true_code)
5678 case GT:
5679 case GE:
5680 return simplify_gen_unary (ABS, mode, true_rtx, mode);
5681 case LT:
5682 case LE:
5683 return
5684 simplify_gen_unary (NEG, mode,
5685 simplify_gen_unary (ABS, mode, true_rtx, mode),
5686 mode);
5687 default:
5688 break;
5691 /* Look for MIN or MAX. */
5693 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5694 && comparison_p
5695 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5696 && rtx_equal_p (XEXP (cond, 1), false_rtx)
5697 && ! side_effects_p (cond))
5698 switch (true_code)
5700 case GE:
5701 case GT:
5702 return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5703 case LE:
5704 case LT:
5705 return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5706 case GEU:
5707 case GTU:
5708 return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5709 case LEU:
5710 case LTU:
5711 return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5712 default:
5713 break;
5716 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5717 second operand is zero, this can be done as (OP Z (mult COND C2)) where
5718 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5719 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5720 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5721 neither 1 or -1, but it isn't worth checking for. */
5723 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5724 && comparison_p
5725 && GET_MODE_CLASS (mode) == MODE_INT
5726 && ! side_effects_p (x))
5728 rtx t = make_compound_operation (true_rtx, SET);
5729 rtx f = make_compound_operation (false_rtx, SET);
5730 rtx cond_op0 = XEXP (cond, 0);
5731 rtx cond_op1 = XEXP (cond, 1);
5732 enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5733 enum machine_mode m = mode;
5734 rtx z = 0, c1 = NULL_RTX;
5736 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5737 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5738 || GET_CODE (t) == ASHIFT
5739 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5740 && rtx_equal_p (XEXP (t, 0), f))
5741 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5743 /* If an identity-zero op is commutative, check whether there
5744 would be a match if we swapped the operands. */
5745 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5746 || GET_CODE (t) == XOR)
5747 && rtx_equal_p (XEXP (t, 1), f))
5748 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5749 else if (GET_CODE (t) == SIGN_EXTEND
5750 && (GET_CODE (XEXP (t, 0)) == PLUS
5751 || GET_CODE (XEXP (t, 0)) == MINUS
5752 || GET_CODE (XEXP (t, 0)) == IOR
5753 || GET_CODE (XEXP (t, 0)) == XOR
5754 || GET_CODE (XEXP (t, 0)) == ASHIFT
5755 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5756 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5757 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5758 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5759 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5760 && (num_sign_bit_copies (f, GET_MODE (f))
5761 > (unsigned int)
5762 (GET_MODE_BITSIZE (mode)
5763 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5765 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5766 extend_op = SIGN_EXTEND;
5767 m = GET_MODE (XEXP (t, 0));
5769 else if (GET_CODE (t) == SIGN_EXTEND
5770 && (GET_CODE (XEXP (t, 0)) == PLUS
5771 || GET_CODE (XEXP (t, 0)) == IOR
5772 || GET_CODE (XEXP (t, 0)) == XOR)
5773 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5774 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5775 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5776 && (num_sign_bit_copies (f, GET_MODE (f))
5777 > (unsigned int)
5778 (GET_MODE_BITSIZE (mode)
5779 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5781 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5782 extend_op = SIGN_EXTEND;
5783 m = GET_MODE (XEXP (t, 0));
5785 else if (GET_CODE (t) == ZERO_EXTEND
5786 && (GET_CODE (XEXP (t, 0)) == PLUS
5787 || GET_CODE (XEXP (t, 0)) == MINUS
5788 || GET_CODE (XEXP (t, 0)) == IOR
5789 || GET_CODE (XEXP (t, 0)) == XOR
5790 || GET_CODE (XEXP (t, 0)) == ASHIFT
5791 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5792 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5793 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5794 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5795 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5796 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5797 && ((nonzero_bits (f, GET_MODE (f))
5798 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5799 == 0))
5801 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5802 extend_op = ZERO_EXTEND;
5803 m = GET_MODE (XEXP (t, 0));
5805 else if (GET_CODE (t) == ZERO_EXTEND
5806 && (GET_CODE (XEXP (t, 0)) == PLUS
5807 || GET_CODE (XEXP (t, 0)) == IOR
5808 || GET_CODE (XEXP (t, 0)) == XOR)
5809 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5810 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5811 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5812 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5813 && ((nonzero_bits (f, GET_MODE (f))
5814 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5815 == 0))
5817 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5818 extend_op = ZERO_EXTEND;
5819 m = GET_MODE (XEXP (t, 0));
5822 if (z)
5824 temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5825 cond_op0, cond_op1),
5826 pc_rtx, pc_rtx, 0, 0);
5827 temp = simplify_gen_binary (MULT, m, temp,
5828 simplify_gen_binary (MULT, m, c1,
5829 const_true_rtx));
5830 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5831 temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5833 if (extend_op != UNKNOWN)
5834 temp = simplify_gen_unary (extend_op, mode, temp, m);
5836 return temp;
5840 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5841 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5842 negation of a single bit, we can convert this operation to a shift. We
5843 can actually do this more generally, but it doesn't seem worth it. */
5845 if (true_code == NE && XEXP (cond, 1) == const0_rtx
5846 && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5847 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5848 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5849 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5850 == GET_MODE_BITSIZE (mode))
5851 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5852 return
5853 simplify_shift_const (NULL_RTX, ASHIFT, mode,
5854 gen_lowpart (mode, XEXP (cond, 0)), i);
5856 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
5857 if (true_code == NE && XEXP (cond, 1) == const0_rtx
5858 && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5859 && GET_MODE (XEXP (cond, 0)) == mode
5860 && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5861 == nonzero_bits (XEXP (cond, 0), mode)
5862 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5863 return XEXP (cond, 0);
5865 return x;
5868 /* Simplify X, a SET expression. Return the new expression. */
5870 static rtx
5871 simplify_set (rtx x)
5873 rtx src = SET_SRC (x);
5874 rtx dest = SET_DEST (x);
5875 enum machine_mode mode
5876 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5877 rtx other_insn;
5878 rtx *cc_use;
5880 /* (set (pc) (return)) gets written as (return). */
5881 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5882 return src;
5884 /* Now that we know for sure which bits of SRC we are using, see if we can
5885 simplify the expression for the object knowing that we only need the
5886 low-order bits. */
5888 if (GET_MODE_CLASS (mode) == MODE_INT
5889 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5891 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
5892 SUBST (SET_SRC (x), src);
5895 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5896 the comparison result and try to simplify it unless we already have used
5897 undobuf.other_insn. */
5898 if ((GET_MODE_CLASS (mode) == MODE_CC
5899 || GET_CODE (src) == COMPARE
5900 || CC0_P (dest))
5901 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5902 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5903 && COMPARISON_P (*cc_use)
5904 && rtx_equal_p (XEXP (*cc_use, 0), dest))
5906 enum rtx_code old_code = GET_CODE (*cc_use);
5907 enum rtx_code new_code;
5908 rtx op0, op1, tmp;
5909 int other_changed = 0;
5910 enum machine_mode compare_mode = GET_MODE (dest);
5912 if (GET_CODE (src) == COMPARE)
5913 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5914 else
5915 op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5917 tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5918 op0, op1);
5919 if (!tmp)
5920 new_code = old_code;
5921 else if (!CONSTANT_P (tmp))
5923 new_code = GET_CODE (tmp);
5924 op0 = XEXP (tmp, 0);
5925 op1 = XEXP (tmp, 1);
5927 else
5929 rtx pat = PATTERN (other_insn);
5930 undobuf.other_insn = other_insn;
5931 SUBST (*cc_use, tmp);
5933 /* Attempt to simplify CC user. */
5934 if (GET_CODE (pat) == SET)
5936 rtx new_rtx = simplify_rtx (SET_SRC (pat));
5937 if (new_rtx != NULL_RTX)
5938 SUBST (SET_SRC (pat), new_rtx);
5941 /* Convert X into a no-op move. */
5942 SUBST (SET_DEST (x), pc_rtx);
5943 SUBST (SET_SRC (x), pc_rtx);
5944 return x;
5947 /* Simplify our comparison, if possible. */
5948 new_code = simplify_comparison (new_code, &op0, &op1);
5950 #ifdef SELECT_CC_MODE
5951 /* If this machine has CC modes other than CCmode, check to see if we
5952 need to use a different CC mode here. */
5953 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5954 compare_mode = GET_MODE (op0);
5955 else
5956 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5958 #ifndef HAVE_cc0
5959 /* If the mode changed, we have to change SET_DEST, the mode in the
5960 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5961 a hard register, just build new versions with the proper mode. If it
5962 is a pseudo, we lose unless it is only time we set the pseudo, in
5963 which case we can safely change its mode. */
5964 if (compare_mode != GET_MODE (dest))
5966 if (can_change_dest_mode (dest, 0, compare_mode))
5968 unsigned int regno = REGNO (dest);
5969 rtx new_dest;
5971 if (regno < FIRST_PSEUDO_REGISTER)
5972 new_dest = gen_rtx_REG (compare_mode, regno);
5973 else
5975 SUBST_MODE (regno_reg_rtx[regno], compare_mode);
5976 new_dest = regno_reg_rtx[regno];
5979 SUBST (SET_DEST (x), new_dest);
5980 SUBST (XEXP (*cc_use, 0), new_dest);
5981 other_changed = 1;
5983 dest = new_dest;
5986 #endif /* cc0 */
5987 #endif /* SELECT_CC_MODE */
5989 /* If the code changed, we have to build a new comparison in
5990 undobuf.other_insn. */
5991 if (new_code != old_code)
5993 int other_changed_previously = other_changed;
5994 unsigned HOST_WIDE_INT mask;
5995 rtx old_cc_use = *cc_use;
5997 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5998 dest, const0_rtx));
5999 other_changed = 1;
6001 /* If the only change we made was to change an EQ into an NE or
6002 vice versa, OP0 has only one bit that might be nonzero, and OP1
6003 is zero, check if changing the user of the condition code will
6004 produce a valid insn. If it won't, we can keep the original code
6005 in that insn by surrounding our operation with an XOR. */
6007 if (((old_code == NE && new_code == EQ)
6008 || (old_code == EQ && new_code == NE))
6009 && ! other_changed_previously && op1 == const0_rtx
6010 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
6011 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
6013 rtx pat = PATTERN (other_insn), note = 0;
6015 if ((recog_for_combine (&pat, other_insn, &note) < 0
6016 && ! check_asm_operands (pat)))
6018 *cc_use = old_cc_use;
6019 other_changed = 0;
6021 op0 = simplify_gen_binary (XOR, GET_MODE (op0),
6022 op0, GEN_INT (mask));
6027 if (other_changed)
6028 undobuf.other_insn = other_insn;
6030 /* Otherwise, if we didn't previously have a COMPARE in the
6031 correct mode, we need one. */
6032 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
6034 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6035 src = SET_SRC (x);
6037 else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6039 SUBST (SET_SRC (x), op0);
6040 src = SET_SRC (x);
6042 /* Otherwise, update the COMPARE if needed. */
6043 else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
6045 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6046 src = SET_SRC (x);
6049 else
6051 /* Get SET_SRC in a form where we have placed back any
6052 compound expressions. Then do the checks below. */
6053 src = make_compound_operation (src, SET);
6054 SUBST (SET_SRC (x), src);
6057 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
6058 and X being a REG or (subreg (reg)), we may be able to convert this to
6059 (set (subreg:m2 x) (op)).
6061 We can always do this if M1 is narrower than M2 because that means that
6062 we only care about the low bits of the result.
6064 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
6065 perform a narrower operation than requested since the high-order bits will
6066 be undefined. On machine where it is defined, this transformation is safe
6067 as long as M1 and M2 have the same number of words. */
6069 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6070 && !OBJECT_P (SUBREG_REG (src))
6071 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
6072 / UNITS_PER_WORD)
6073 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6074 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
6075 #ifndef WORD_REGISTER_OPERATIONS
6076 && (GET_MODE_SIZE (GET_MODE (src))
6077 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6078 #endif
6079 #ifdef CANNOT_CHANGE_MODE_CLASS
6080 && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
6081 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
6082 GET_MODE (SUBREG_REG (src)),
6083 GET_MODE (src)))
6084 #endif
6085 && (REG_P (dest)
6086 || (GET_CODE (dest) == SUBREG
6087 && REG_P (SUBREG_REG (dest)))))
6089 SUBST (SET_DEST (x),
6090 gen_lowpart (GET_MODE (SUBREG_REG (src)),
6091 dest));
6092 SUBST (SET_SRC (x), SUBREG_REG (src));
6094 src = SET_SRC (x), dest = SET_DEST (x);
6097 #ifdef HAVE_cc0
6098 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
6099 in SRC. */
6100 if (dest == cc0_rtx
6101 && GET_CODE (src) == SUBREG
6102 && subreg_lowpart_p (src)
6103 && (GET_MODE_BITSIZE (GET_MODE (src))
6104 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
6106 rtx inner = SUBREG_REG (src);
6107 enum machine_mode inner_mode = GET_MODE (inner);
6109 /* Here we make sure that we don't have a sign bit on. */
6110 if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
6111 && (nonzero_bits (inner, inner_mode)
6112 < ((unsigned HOST_WIDE_INT) 1
6113 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
6115 SUBST (SET_SRC (x), inner);
6116 src = SET_SRC (x);
6119 #endif
6121 #ifdef LOAD_EXTEND_OP
6122 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
6123 would require a paradoxical subreg. Replace the subreg with a
6124 zero_extend to avoid the reload that would otherwise be required. */
6126 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6127 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (src)))
6128 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
6129 && SUBREG_BYTE (src) == 0
6130 && (GET_MODE_SIZE (GET_MODE (src))
6131 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6132 && MEM_P (SUBREG_REG (src)))
6134 SUBST (SET_SRC (x),
6135 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
6136 GET_MODE (src), SUBREG_REG (src)));
6138 src = SET_SRC (x);
6140 #endif
6142 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
6143 are comparing an item known to be 0 or -1 against 0, use a logical
6144 operation instead. Check for one of the arms being an IOR of the other
6145 arm with some value. We compute three terms to be IOR'ed together. In
6146 practice, at most two will be nonzero. Then we do the IOR's. */
6148 if (GET_CODE (dest) != PC
6149 && GET_CODE (src) == IF_THEN_ELSE
6150 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
6151 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
6152 && XEXP (XEXP (src, 0), 1) == const0_rtx
6153 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
6154 #ifdef HAVE_conditional_move
6155 && ! can_conditionally_move_p (GET_MODE (src))
6156 #endif
6157 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
6158 GET_MODE (XEXP (XEXP (src, 0), 0)))
6159 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
6160 && ! side_effects_p (src))
6162 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
6163 ? XEXP (src, 1) : XEXP (src, 2));
6164 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
6165 ? XEXP (src, 2) : XEXP (src, 1));
6166 rtx term1 = const0_rtx, term2, term3;
6168 if (GET_CODE (true_rtx) == IOR
6169 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
6170 term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
6171 else if (GET_CODE (true_rtx) == IOR
6172 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
6173 term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
6174 else if (GET_CODE (false_rtx) == IOR
6175 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
6176 term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
6177 else if (GET_CODE (false_rtx) == IOR
6178 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
6179 term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
6181 term2 = simplify_gen_binary (AND, GET_MODE (src),
6182 XEXP (XEXP (src, 0), 0), true_rtx);
6183 term3 = simplify_gen_binary (AND, GET_MODE (src),
6184 simplify_gen_unary (NOT, GET_MODE (src),
6185 XEXP (XEXP (src, 0), 0),
6186 GET_MODE (src)),
6187 false_rtx);
6189 SUBST (SET_SRC (x),
6190 simplify_gen_binary (IOR, GET_MODE (src),
6191 simplify_gen_binary (IOR, GET_MODE (src),
6192 term1, term2),
6193 term3));
6195 src = SET_SRC (x);
6198 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
6199 whole thing fail. */
6200 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
6201 return src;
6202 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
6203 return dest;
6204 else
6205 /* Convert this into a field assignment operation, if possible. */
6206 return make_field_assignment (x);
6209 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
6210 result. */
6212 static rtx
6213 simplify_logical (rtx x)
6215 enum machine_mode mode = GET_MODE (x);
6216 rtx op0 = XEXP (x, 0);
6217 rtx op1 = XEXP (x, 1);
6219 switch (GET_CODE (x))
6221 case AND:
6222 /* We can call simplify_and_const_int only if we don't lose
6223 any (sign) bits when converting INTVAL (op1) to
6224 "unsigned HOST_WIDE_INT". */
6225 if (CONST_INT_P (op1)
6226 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6227 || INTVAL (op1) > 0))
6229 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
6230 if (GET_CODE (x) != AND)
6231 return x;
6233 op0 = XEXP (x, 0);
6234 op1 = XEXP (x, 1);
6237 /* If we have any of (and (ior A B) C) or (and (xor A B) C),
6238 apply the distributive law and then the inverse distributive
6239 law to see if things simplify. */
6240 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
6242 rtx result = distribute_and_simplify_rtx (x, 0);
6243 if (result)
6244 return result;
6246 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
6248 rtx result = distribute_and_simplify_rtx (x, 1);
6249 if (result)
6250 return result;
6252 break;
6254 case IOR:
6255 /* If we have (ior (and A B) C), apply the distributive law and then
6256 the inverse distributive law to see if things simplify. */
6258 if (GET_CODE (op0) == AND)
6260 rtx result = distribute_and_simplify_rtx (x, 0);
6261 if (result)
6262 return result;
6265 if (GET_CODE (op1) == AND)
6267 rtx result = distribute_and_simplify_rtx (x, 1);
6268 if (result)
6269 return result;
6271 break;
6273 default:
6274 gcc_unreachable ();
6277 return x;
6280 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6281 operations" because they can be replaced with two more basic operations.
6282 ZERO_EXTEND is also considered "compound" because it can be replaced with
6283 an AND operation, which is simpler, though only one operation.
6285 The function expand_compound_operation is called with an rtx expression
6286 and will convert it to the appropriate shifts and AND operations,
6287 simplifying at each stage.
6289 The function make_compound_operation is called to convert an expression
6290 consisting of shifts and ANDs into the equivalent compound expression.
6291 It is the inverse of this function, loosely speaking. */
6293 static rtx
6294 expand_compound_operation (rtx x)
6296 unsigned HOST_WIDE_INT pos = 0, len;
6297 int unsignedp = 0;
6298 unsigned int modewidth;
6299 rtx tem;
6301 switch (GET_CODE (x))
6303 case ZERO_EXTEND:
6304 unsignedp = 1;
6305 case SIGN_EXTEND:
6306 /* We can't necessarily use a const_int for a multiword mode;
6307 it depends on implicitly extending the value.
6308 Since we don't know the right way to extend it,
6309 we can't tell whether the implicit way is right.
6311 Even for a mode that is no wider than a const_int,
6312 we can't win, because we need to sign extend one of its bits through
6313 the rest of it, and we don't know which bit. */
6314 if (CONST_INT_P (XEXP (x, 0)))
6315 return x;
6317 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6318 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
6319 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6320 reloaded. If not for that, MEM's would very rarely be safe.
6322 Reject MODEs bigger than a word, because we might not be able
6323 to reference a two-register group starting with an arbitrary register
6324 (and currently gen_lowpart might crash for a SUBREG). */
6326 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
6327 return x;
6329 /* Reject MODEs that aren't scalar integers because turning vector
6330 or complex modes into shifts causes problems. */
6332 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6333 return x;
6335 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
6336 /* If the inner object has VOIDmode (the only way this can happen
6337 is if it is an ASM_OPERANDS), we can't do anything since we don't
6338 know how much masking to do. */
6339 if (len == 0)
6340 return x;
6342 break;
6344 case ZERO_EXTRACT:
6345 unsignedp = 1;
6347 /* ... fall through ... */
6349 case SIGN_EXTRACT:
6350 /* If the operand is a CLOBBER, just return it. */
6351 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
6352 return XEXP (x, 0);
6354 if (!CONST_INT_P (XEXP (x, 1))
6355 || !CONST_INT_P (XEXP (x, 2))
6356 || GET_MODE (XEXP (x, 0)) == VOIDmode)
6357 return x;
6359 /* Reject MODEs that aren't scalar integers because turning vector
6360 or complex modes into shifts causes problems. */
6362 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6363 return x;
6365 len = INTVAL (XEXP (x, 1));
6366 pos = INTVAL (XEXP (x, 2));
6368 /* This should stay within the object being extracted, fail otherwise. */
6369 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
6370 return x;
6372 if (BITS_BIG_ENDIAN)
6373 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
6375 break;
6377 default:
6378 return x;
6380 /* Convert sign extension to zero extension, if we know that the high
6381 bit is not set, as this is easier to optimize. It will be converted
6382 back to cheaper alternative in make_extraction. */
6383 if (GET_CODE (x) == SIGN_EXTEND
6384 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6385 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6386 & ~(((unsigned HOST_WIDE_INT)
6387 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
6388 >> 1))
6389 == 0)))
6391 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
6392 rtx temp2 = expand_compound_operation (temp);
6394 /* Make sure this is a profitable operation. */
6395 if (rtx_cost (x, SET, optimize_this_for_speed_p)
6396 > rtx_cost (temp2, SET, optimize_this_for_speed_p))
6397 return temp2;
6398 else if (rtx_cost (x, SET, optimize_this_for_speed_p)
6399 > rtx_cost (temp, SET, optimize_this_for_speed_p))
6400 return temp;
6401 else
6402 return x;
6405 /* We can optimize some special cases of ZERO_EXTEND. */
6406 if (GET_CODE (x) == ZERO_EXTEND)
6408 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6409 know that the last value didn't have any inappropriate bits
6410 set. */
6411 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6412 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6413 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6414 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
6415 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6416 return XEXP (XEXP (x, 0), 0);
6418 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6419 if (GET_CODE (XEXP (x, 0)) == SUBREG
6420 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6421 && subreg_lowpart_p (XEXP (x, 0))
6422 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6423 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
6424 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6425 return SUBREG_REG (XEXP (x, 0));
6427 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6428 is a comparison and STORE_FLAG_VALUE permits. This is like
6429 the first case, but it works even when GET_MODE (x) is larger
6430 than HOST_WIDE_INT. */
6431 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6432 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6433 && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6434 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6435 <= HOST_BITS_PER_WIDE_INT)
6436 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6437 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6438 return XEXP (XEXP (x, 0), 0);
6440 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6441 if (GET_CODE (XEXP (x, 0)) == SUBREG
6442 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6443 && subreg_lowpart_p (XEXP (x, 0))
6444 && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6445 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6446 <= HOST_BITS_PER_WIDE_INT)
6447 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6448 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6449 return SUBREG_REG (XEXP (x, 0));
6453 /* If we reach here, we want to return a pair of shifts. The inner
6454 shift is a left shift of BITSIZE - POS - LEN bits. The outer
6455 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
6456 logical depending on the value of UNSIGNEDP.
6458 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6459 converted into an AND of a shift.
6461 We must check for the case where the left shift would have a negative
6462 count. This can happen in a case like (x >> 31) & 255 on machines
6463 that can't shift by a constant. On those machines, we would first
6464 combine the shift with the AND to produce a variable-position
6465 extraction. Then the constant of 31 would be substituted in to produce
6466 a such a position. */
6468 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6469 if (modewidth + len >= pos)
6471 enum machine_mode mode = GET_MODE (x);
6472 tem = gen_lowpart (mode, XEXP (x, 0));
6473 if (!tem || GET_CODE (tem) == CLOBBER)
6474 return x;
6475 tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
6476 tem, modewidth - pos - len);
6477 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6478 mode, tem, modewidth - len);
6480 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6481 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6482 simplify_shift_const (NULL_RTX, LSHIFTRT,
6483 GET_MODE (x),
6484 XEXP (x, 0), pos),
6485 ((HOST_WIDE_INT) 1 << len) - 1);
6486 else
6487 /* Any other cases we can't handle. */
6488 return x;
6490 /* If we couldn't do this for some reason, return the original
6491 expression. */
6492 if (GET_CODE (tem) == CLOBBER)
6493 return x;
6495 return tem;
6498 /* X is a SET which contains an assignment of one object into
6499 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6500 or certain SUBREGS). If possible, convert it into a series of
6501 logical operations.
6503 We half-heartedly support variable positions, but do not at all
6504 support variable lengths. */
6506 static const_rtx
6507 expand_field_assignment (const_rtx x)
6509 rtx inner;
6510 rtx pos; /* Always counts from low bit. */
6511 int len;
6512 rtx mask, cleared, masked;
6513 enum machine_mode compute_mode;
6515 /* Loop until we find something we can't simplify. */
6516 while (1)
6518 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6519 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6521 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6522 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6523 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6525 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6526 && CONST_INT_P (XEXP (SET_DEST (x), 1)))
6528 inner = XEXP (SET_DEST (x), 0);
6529 len = INTVAL (XEXP (SET_DEST (x), 1));
6530 pos = XEXP (SET_DEST (x), 2);
6532 /* A constant position should stay within the width of INNER. */
6533 if (CONST_INT_P (pos)
6534 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6535 break;
6537 if (BITS_BIG_ENDIAN)
6539 if (CONST_INT_P (pos))
6540 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6541 - INTVAL (pos));
6542 else if (GET_CODE (pos) == MINUS
6543 && CONST_INT_P (XEXP (pos, 1))
6544 && (INTVAL (XEXP (pos, 1))
6545 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6546 /* If position is ADJUST - X, new position is X. */
6547 pos = XEXP (pos, 0);
6548 else
6549 pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6550 GEN_INT (GET_MODE_BITSIZE (
6551 GET_MODE (inner))
6552 - len),
6553 pos);
6557 /* A SUBREG between two modes that occupy the same numbers of words
6558 can be done by moving the SUBREG to the source. */
6559 else if (GET_CODE (SET_DEST (x)) == SUBREG
6560 /* We need SUBREGs to compute nonzero_bits properly. */
6561 && nonzero_sign_valid
6562 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6563 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6564 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6565 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6567 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6568 gen_lowpart
6569 (GET_MODE (SUBREG_REG (SET_DEST (x))),
6570 SET_SRC (x)));
6571 continue;
6573 else
6574 break;
6576 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6577 inner = SUBREG_REG (inner);
6579 compute_mode = GET_MODE (inner);
6581 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
6582 if (! SCALAR_INT_MODE_P (compute_mode))
6584 enum machine_mode imode;
6586 /* Don't do anything for vector or complex integral types. */
6587 if (! FLOAT_MODE_P (compute_mode))
6588 break;
6590 /* Try to find an integral mode to pun with. */
6591 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6592 if (imode == BLKmode)
6593 break;
6595 compute_mode = imode;
6596 inner = gen_lowpart (imode, inner);
6599 /* Compute a mask of LEN bits, if we can do this on the host machine. */
6600 if (len >= HOST_BITS_PER_WIDE_INT)
6601 break;
6603 /* Now compute the equivalent expression. Make a copy of INNER
6604 for the SET_DEST in case it is a MEM into which we will substitute;
6605 we don't want shared RTL in that case. */
6606 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6607 cleared = simplify_gen_binary (AND, compute_mode,
6608 simplify_gen_unary (NOT, compute_mode,
6609 simplify_gen_binary (ASHIFT,
6610 compute_mode,
6611 mask, pos),
6612 compute_mode),
6613 inner);
6614 masked = simplify_gen_binary (ASHIFT, compute_mode,
6615 simplify_gen_binary (
6616 AND, compute_mode,
6617 gen_lowpart (compute_mode, SET_SRC (x)),
6618 mask),
6619 pos);
6621 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6622 simplify_gen_binary (IOR, compute_mode,
6623 cleared, masked));
6626 return x;
6629 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
6630 it is an RTX that represents a variable starting position; otherwise,
6631 POS is the (constant) starting bit position (counted from the LSB).
6633 UNSIGNEDP is nonzero for an unsigned reference and zero for a
6634 signed reference.
6636 IN_DEST is nonzero if this is a reference in the destination of a
6637 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
6638 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6639 be used.
6641 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
6642 ZERO_EXTRACT should be built even for bits starting at bit 0.
6644 MODE is the desired mode of the result (if IN_DEST == 0).
6646 The result is an RTX for the extraction or NULL_RTX if the target
6647 can't handle it. */
6649 static rtx
6650 make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6651 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6652 int in_dest, int in_compare)
6654 /* This mode describes the size of the storage area
6655 to fetch the overall value from. Within that, we
6656 ignore the POS lowest bits, etc. */
6657 enum machine_mode is_mode = GET_MODE (inner);
6658 enum machine_mode inner_mode;
6659 enum machine_mode wanted_inner_mode;
6660 enum machine_mode wanted_inner_reg_mode = word_mode;
6661 enum machine_mode pos_mode = word_mode;
6662 enum machine_mode extraction_mode = word_mode;
6663 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6664 rtx new_rtx = 0;
6665 rtx orig_pos_rtx = pos_rtx;
6666 HOST_WIDE_INT orig_pos;
6668 if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6670 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6671 consider just the QI as the memory to extract from.
6672 The subreg adds or removes high bits; its mode is
6673 irrelevant to the meaning of this extraction,
6674 since POS and LEN count from the lsb. */
6675 if (MEM_P (SUBREG_REG (inner)))
6676 is_mode = GET_MODE (SUBREG_REG (inner));
6677 inner = SUBREG_REG (inner);
6679 else if (GET_CODE (inner) == ASHIFT
6680 && CONST_INT_P (XEXP (inner, 1))
6681 && pos_rtx == 0 && pos == 0
6682 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6684 /* We're extracting the least significant bits of an rtx
6685 (ashift X (const_int C)), where LEN > C. Extract the
6686 least significant (LEN - C) bits of X, giving an rtx
6687 whose mode is MODE, then shift it left C times. */
6688 new_rtx = make_extraction (mode, XEXP (inner, 0),
6689 0, 0, len - INTVAL (XEXP (inner, 1)),
6690 unsignedp, in_dest, in_compare);
6691 if (new_rtx != 0)
6692 return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
6695 inner_mode = GET_MODE (inner);
6697 if (pos_rtx && CONST_INT_P (pos_rtx))
6698 pos = INTVAL (pos_rtx), pos_rtx = 0;
6700 /* See if this can be done without an extraction. We never can if the
6701 width of the field is not the same as that of some integer mode. For
6702 registers, we can only avoid the extraction if the position is at the
6703 low-order bit and this is either not in the destination or we have the
6704 appropriate STRICT_LOW_PART operation available.
6706 For MEM, we can avoid an extract if the field starts on an appropriate
6707 boundary and we can change the mode of the memory reference. */
6709 if (tmode != BLKmode
6710 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6711 && !MEM_P (inner)
6712 && (inner_mode == tmode
6713 || !REG_P (inner)
6714 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6715 GET_MODE_BITSIZE (inner_mode))
6716 || reg_truncated_to_mode (tmode, inner))
6717 && (! in_dest
6718 || (REG_P (inner)
6719 && have_insn_for (STRICT_LOW_PART, tmode))))
6720 || (MEM_P (inner) && pos_rtx == 0
6721 && (pos
6722 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6723 : BITS_PER_UNIT)) == 0
6724 /* We can't do this if we are widening INNER_MODE (it
6725 may not be aligned, for one thing). */
6726 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6727 && (inner_mode == tmode
6728 || (! mode_dependent_address_p (XEXP (inner, 0))
6729 && ! MEM_VOLATILE_P (inner))))))
6731 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6732 field. If the original and current mode are the same, we need not
6733 adjust the offset. Otherwise, we do if bytes big endian.
6735 If INNER is not a MEM, get a piece consisting of just the field
6736 of interest (in this case POS % BITS_PER_WORD must be 0). */
6738 if (MEM_P (inner))
6740 HOST_WIDE_INT offset;
6742 /* POS counts from lsb, but make OFFSET count in memory order. */
6743 if (BYTES_BIG_ENDIAN)
6744 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6745 else
6746 offset = pos / BITS_PER_UNIT;
6748 new_rtx = adjust_address_nv (inner, tmode, offset);
6750 else if (REG_P (inner))
6752 if (tmode != inner_mode)
6754 /* We can't call gen_lowpart in a DEST since we
6755 always want a SUBREG (see below) and it would sometimes
6756 return a new hard register. */
6757 if (pos || in_dest)
6759 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6761 if (WORDS_BIG_ENDIAN
6762 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6763 final_word = ((GET_MODE_SIZE (inner_mode)
6764 - GET_MODE_SIZE (tmode))
6765 / UNITS_PER_WORD) - final_word;
6767 final_word *= UNITS_PER_WORD;
6768 if (BYTES_BIG_ENDIAN &&
6769 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6770 final_word += (GET_MODE_SIZE (inner_mode)
6771 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6773 /* Avoid creating invalid subregs, for example when
6774 simplifying (x>>32)&255. */
6775 if (!validate_subreg (tmode, inner_mode, inner, final_word))
6776 return NULL_RTX;
6778 new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
6780 else
6781 new_rtx = gen_lowpart (tmode, inner);
6783 else
6784 new_rtx = inner;
6786 else
6787 new_rtx = force_to_mode (inner, tmode,
6788 len >= HOST_BITS_PER_WIDE_INT
6789 ? ~(unsigned HOST_WIDE_INT) 0
6790 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6793 /* If this extraction is going into the destination of a SET,
6794 make a STRICT_LOW_PART unless we made a MEM. */
6796 if (in_dest)
6797 return (MEM_P (new_rtx) ? new_rtx
6798 : (GET_CODE (new_rtx) != SUBREG
6799 ? gen_rtx_CLOBBER (tmode, const0_rtx)
6800 : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
6802 if (mode == tmode)
6803 return new_rtx;
6805 if (CONST_INT_P (new_rtx)
6806 || GET_CODE (new_rtx) == CONST_DOUBLE)
6807 return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6808 mode, new_rtx, tmode);
6810 /* If we know that no extraneous bits are set, and that the high
6811 bit is not set, convert the extraction to the cheaper of
6812 sign and zero extension, that are equivalent in these cases. */
6813 if (flag_expensive_optimizations
6814 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6815 && ((nonzero_bits (new_rtx, tmode)
6816 & ~(((unsigned HOST_WIDE_INT)
6817 GET_MODE_MASK (tmode))
6818 >> 1))
6819 == 0)))
6821 rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
6822 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
6824 /* Prefer ZERO_EXTENSION, since it gives more information to
6825 backends. */
6826 if (rtx_cost (temp, SET, optimize_this_for_speed_p)
6827 <= rtx_cost (temp1, SET, optimize_this_for_speed_p))
6828 return temp;
6829 return temp1;
6832 /* Otherwise, sign- or zero-extend unless we already are in the
6833 proper mode. */
6835 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6836 mode, new_rtx));
6839 /* Unless this is a COMPARE or we have a funny memory reference,
6840 don't do anything with zero-extending field extracts starting at
6841 the low-order bit since they are simple AND operations. */
6842 if (pos_rtx == 0 && pos == 0 && ! in_dest
6843 && ! in_compare && unsignedp)
6844 return 0;
6846 /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6847 if the position is not a constant and the length is not 1. In all
6848 other cases, we would only be going outside our object in cases when
6849 an original shift would have been undefined. */
6850 if (MEM_P (inner)
6851 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6852 || (pos_rtx != 0 && len != 1)))
6853 return 0;
6855 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6856 and the mode for the result. */
6857 if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6859 wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6860 pos_mode = mode_for_extraction (EP_insv, 2);
6861 extraction_mode = mode_for_extraction (EP_insv, 3);
6864 if (! in_dest && unsignedp
6865 && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6867 wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6868 pos_mode = mode_for_extraction (EP_extzv, 3);
6869 extraction_mode = mode_for_extraction (EP_extzv, 0);
6872 if (! in_dest && ! unsignedp
6873 && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6875 wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6876 pos_mode = mode_for_extraction (EP_extv, 3);
6877 extraction_mode = mode_for_extraction (EP_extv, 0);
6880 /* Never narrow an object, since that might not be safe. */
6882 if (mode != VOIDmode
6883 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6884 extraction_mode = mode;
6886 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6887 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6888 pos_mode = GET_MODE (pos_rtx);
6890 /* If this is not from memory, the desired mode is the preferred mode
6891 for an extraction pattern's first input operand, or word_mode if there
6892 is none. */
6893 if (!MEM_P (inner))
6894 wanted_inner_mode = wanted_inner_reg_mode;
6895 else
6897 /* Be careful not to go beyond the extracted object and maintain the
6898 natural alignment of the memory. */
6899 wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6900 while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6901 > GET_MODE_BITSIZE (wanted_inner_mode))
6903 wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6904 gcc_assert (wanted_inner_mode != VOIDmode);
6907 /* If we have to change the mode of memory and cannot, the desired mode
6908 is EXTRACTION_MODE. */
6909 if (inner_mode != wanted_inner_mode
6910 && (mode_dependent_address_p (XEXP (inner, 0))
6911 || MEM_VOLATILE_P (inner)
6912 || pos_rtx))
6913 wanted_inner_mode = extraction_mode;
6916 orig_pos = pos;
6918 if (BITS_BIG_ENDIAN)
6920 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6921 BITS_BIG_ENDIAN style. If position is constant, compute new
6922 position. Otherwise, build subtraction.
6923 Note that POS is relative to the mode of the original argument.
6924 If it's a MEM we need to recompute POS relative to that.
6925 However, if we're extracting from (or inserting into) a register,
6926 we want to recompute POS relative to wanted_inner_mode. */
6927 int width = (MEM_P (inner)
6928 ? GET_MODE_BITSIZE (is_mode)
6929 : GET_MODE_BITSIZE (wanted_inner_mode));
6931 if (pos_rtx == 0)
6932 pos = width - len - pos;
6933 else
6934 pos_rtx
6935 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6936 /* POS may be less than 0 now, but we check for that below.
6937 Note that it can only be less than 0 if !MEM_P (inner). */
6940 /* If INNER has a wider mode, and this is a constant extraction, try to
6941 make it smaller and adjust the byte to point to the byte containing
6942 the value. */
6943 if (wanted_inner_mode != VOIDmode
6944 && inner_mode != wanted_inner_mode
6945 && ! pos_rtx
6946 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6947 && MEM_P (inner)
6948 && ! mode_dependent_address_p (XEXP (inner, 0))
6949 && ! MEM_VOLATILE_P (inner))
6951 int offset = 0;
6953 /* The computations below will be correct if the machine is big
6954 endian in both bits and bytes or little endian in bits and bytes.
6955 If it is mixed, we must adjust. */
6957 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6958 adjust OFFSET to compensate. */
6959 if (BYTES_BIG_ENDIAN
6960 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6961 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6963 /* We can now move to the desired byte. */
6964 offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
6965 * GET_MODE_SIZE (wanted_inner_mode);
6966 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6968 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6969 && is_mode != wanted_inner_mode)
6970 offset = (GET_MODE_SIZE (is_mode)
6971 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6973 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6976 /* If INNER is not memory, get it into the proper mode. If we are changing
6977 its mode, POS must be a constant and smaller than the size of the new
6978 mode. */
6979 else if (!MEM_P (inner))
6981 /* On the LHS, don't create paradoxical subregs implicitely truncating
6982 the register unless TRULY_NOOP_TRUNCATION. */
6983 if (in_dest
6984 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (inner)),
6985 GET_MODE_BITSIZE (wanted_inner_mode)))
6986 return NULL_RTX;
6988 if (GET_MODE (inner) != wanted_inner_mode
6989 && (pos_rtx != 0
6990 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6991 return NULL_RTX;
6993 if (orig_pos < 0)
6994 return NULL_RTX;
6996 inner = force_to_mode (inner, wanted_inner_mode,
6997 pos_rtx
6998 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6999 ? ~(unsigned HOST_WIDE_INT) 0
7000 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
7001 << orig_pos),
7005 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
7006 have to zero extend. Otherwise, we can just use a SUBREG. */
7007 if (pos_rtx != 0
7008 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
7010 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
7012 /* If we know that no extraneous bits are set, and that the high
7013 bit is not set, convert extraction to cheaper one - either
7014 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7015 cases. */
7016 if (flag_expensive_optimizations
7017 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
7018 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7019 & ~(((unsigned HOST_WIDE_INT)
7020 GET_MODE_MASK (GET_MODE (pos_rtx)))
7021 >> 1))
7022 == 0)))
7024 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
7026 /* Prefer ZERO_EXTENSION, since it gives more information to
7027 backends. */
7028 if (rtx_cost (temp1, SET, optimize_this_for_speed_p)
7029 < rtx_cost (temp, SET, optimize_this_for_speed_p))
7030 temp = temp1;
7032 pos_rtx = temp;
7034 else if (pos_rtx != 0
7035 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
7036 pos_rtx = gen_lowpart (pos_mode, pos_rtx);
7038 /* Make POS_RTX unless we already have it and it is correct. If we don't
7039 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7040 be a CONST_INT. */
7041 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7042 pos_rtx = orig_pos_rtx;
7044 else if (pos_rtx == 0)
7045 pos_rtx = GEN_INT (pos);
7047 /* Make the required operation. See if we can use existing rtx. */
7048 new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
7049 extraction_mode, inner, GEN_INT (len), pos_rtx);
7050 if (! in_dest)
7051 new_rtx = gen_lowpart (mode, new_rtx);
7053 return new_rtx;
7056 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
7057 with any other operations in X. Return X without that shift if so. */
7059 static rtx
7060 extract_left_shift (rtx x, int count)
7062 enum rtx_code code = GET_CODE (x);
7063 enum machine_mode mode = GET_MODE (x);
7064 rtx tem;
7066 switch (code)
7068 case ASHIFT:
7069 /* This is the shift itself. If it is wide enough, we will return
7070 either the value being shifted if the shift count is equal to
7071 COUNT or a shift for the difference. */
7072 if (CONST_INT_P (XEXP (x, 1))
7073 && INTVAL (XEXP (x, 1)) >= count)
7074 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
7075 INTVAL (XEXP (x, 1)) - count);
7076 break;
7078 case NEG: case NOT:
7079 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7080 return simplify_gen_unary (code, mode, tem, mode);
7082 break;
7084 case PLUS: case IOR: case XOR: case AND:
7085 /* If we can safely shift this constant and we find the inner shift,
7086 make a new operation. */
7087 if (CONST_INT_P (XEXP (x, 1))
7088 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
7089 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7090 return simplify_gen_binary (code, mode, tem,
7091 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
7093 break;
7095 default:
7096 break;
7099 return 0;
7102 /* Look at the expression rooted at X. Look for expressions
7103 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
7104 Form these expressions.
7106 Return the new rtx, usually just X.
7108 Also, for machines like the VAX that don't have logical shift insns,
7109 try to convert logical to arithmetic shift operations in cases where
7110 they are equivalent. This undoes the canonicalizations to logical
7111 shifts done elsewhere.
7113 We try, as much as possible, to re-use rtl expressions to save memory.
7115 IN_CODE says what kind of expression we are processing. Normally, it is
7116 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
7117 being kludges), it is MEM. When processing the arguments of a comparison
7118 or a COMPARE against zero, it is COMPARE. */
7120 static rtx
7121 make_compound_operation (rtx x, enum rtx_code in_code)
7123 enum rtx_code code = GET_CODE (x);
7124 enum machine_mode mode = GET_MODE (x);
7125 int mode_width = GET_MODE_BITSIZE (mode);
7126 rtx rhs, lhs;
7127 enum rtx_code next_code;
7128 int i, j;
7129 rtx new_rtx = 0;
7130 rtx tem;
7131 const char *fmt;
7133 /* Select the code to be used in recursive calls. Once we are inside an
7134 address, we stay there. If we have a comparison, set to COMPARE,
7135 but once inside, go back to our default of SET. */
7137 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
7138 : ((code == COMPARE || COMPARISON_P (x))
7139 && XEXP (x, 1) == const0_rtx) ? COMPARE
7140 : in_code == COMPARE ? SET : in_code);
7142 /* Process depending on the code of this operation. If NEW is set
7143 nonzero, it will be returned. */
7145 switch (code)
7147 case ASHIFT:
7148 /* Convert shifts by constants into multiplications if inside
7149 an address. */
7150 if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
7151 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7152 && INTVAL (XEXP (x, 1)) >= 0)
7154 new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7155 new_rtx = gen_rtx_MULT (mode, new_rtx,
7156 GEN_INT ((HOST_WIDE_INT) 1
7157 << INTVAL (XEXP (x, 1))));
7159 break;
7161 case AND:
7162 /* If the second operand is not a constant, we can't do anything
7163 with it. */
7164 if (!CONST_INT_P (XEXP (x, 1)))
7165 break;
7167 /* If the constant is a power of two minus one and the first operand
7168 is a logical right shift, make an extraction. */
7169 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7170 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7172 new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7173 new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
7174 0, in_code == COMPARE);
7177 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
7178 else if (GET_CODE (XEXP (x, 0)) == SUBREG
7179 && subreg_lowpart_p (XEXP (x, 0))
7180 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
7181 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7183 new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
7184 next_code);
7185 new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
7186 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
7187 0, in_code == COMPARE);
7189 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
7190 else if ((GET_CODE (XEXP (x, 0)) == XOR
7191 || GET_CODE (XEXP (x, 0)) == IOR)
7192 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
7193 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
7194 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7196 /* Apply the distributive law, and then try to make extractions. */
7197 new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
7198 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
7199 XEXP (x, 1)),
7200 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
7201 XEXP (x, 1)));
7202 new_rtx = make_compound_operation (new_rtx, in_code);
7205 /* If we are have (and (rotate X C) M) and C is larger than the number
7206 of bits in M, this is an extraction. */
7208 else if (GET_CODE (XEXP (x, 0)) == ROTATE
7209 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7210 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
7211 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
7213 new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7214 new_rtx = make_extraction (mode, new_rtx,
7215 (GET_MODE_BITSIZE (mode)
7216 - INTVAL (XEXP (XEXP (x, 0), 1))),
7217 NULL_RTX, i, 1, 0, in_code == COMPARE);
7220 /* On machines without logical shifts, if the operand of the AND is
7221 a logical shift and our mask turns off all the propagated sign
7222 bits, we can replace the logical shift with an arithmetic shift. */
7223 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7224 && !have_insn_for (LSHIFTRT, mode)
7225 && have_insn_for (ASHIFTRT, mode)
7226 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7227 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7228 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7229 && mode_width <= HOST_BITS_PER_WIDE_INT)
7231 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7233 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
7234 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
7235 SUBST (XEXP (x, 0),
7236 gen_rtx_ASHIFTRT (mode,
7237 make_compound_operation
7238 (XEXP (XEXP (x, 0), 0), next_code),
7239 XEXP (XEXP (x, 0), 1)));
7242 /* If the constant is one less than a power of two, this might be
7243 representable by an extraction even if no shift is present.
7244 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
7245 we are in a COMPARE. */
7246 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7247 new_rtx = make_extraction (mode,
7248 make_compound_operation (XEXP (x, 0),
7249 next_code),
7250 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
7252 /* If we are in a comparison and this is an AND with a power of two,
7253 convert this into the appropriate bit extract. */
7254 else if (in_code == COMPARE
7255 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
7256 new_rtx = make_extraction (mode,
7257 make_compound_operation (XEXP (x, 0),
7258 next_code),
7259 i, NULL_RTX, 1, 1, 0, 1);
7261 break;
7263 case LSHIFTRT:
7264 /* If the sign bit is known to be zero, replace this with an
7265 arithmetic shift. */
7266 if (have_insn_for (ASHIFTRT, mode)
7267 && ! have_insn_for (LSHIFTRT, mode)
7268 && mode_width <= HOST_BITS_PER_WIDE_INT
7269 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
7271 new_rtx = gen_rtx_ASHIFTRT (mode,
7272 make_compound_operation (XEXP (x, 0),
7273 next_code),
7274 XEXP (x, 1));
7275 break;
7278 /* ... fall through ... */
7280 case ASHIFTRT:
7281 lhs = XEXP (x, 0);
7282 rhs = XEXP (x, 1);
7284 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7285 this is a SIGN_EXTRACT. */
7286 if (CONST_INT_P (rhs)
7287 && GET_CODE (lhs) == ASHIFT
7288 && CONST_INT_P (XEXP (lhs, 1))
7289 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
7290 && INTVAL (rhs) < mode_width)
7292 new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
7293 new_rtx = make_extraction (mode, new_rtx,
7294 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
7295 NULL_RTX, mode_width - INTVAL (rhs),
7296 code == LSHIFTRT, 0, in_code == COMPARE);
7297 break;
7300 /* See if we have operations between an ASHIFTRT and an ASHIFT.
7301 If so, try to merge the shifts into a SIGN_EXTEND. We could
7302 also do this for some cases of SIGN_EXTRACT, but it doesn't
7303 seem worth the effort; the case checked for occurs on Alpha. */
7305 if (!OBJECT_P (lhs)
7306 && ! (GET_CODE (lhs) == SUBREG
7307 && (OBJECT_P (SUBREG_REG (lhs))))
7308 && CONST_INT_P (rhs)
7309 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
7310 && INTVAL (rhs) < mode_width
7311 && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
7312 new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
7313 0, NULL_RTX, mode_width - INTVAL (rhs),
7314 code == LSHIFTRT, 0, in_code == COMPARE);
7316 break;
7318 case SUBREG:
7319 /* Call ourselves recursively on the inner expression. If we are
7320 narrowing the object and it has a different RTL code from
7321 what it originally did, do this SUBREG as a force_to_mode. */
7323 tem = make_compound_operation (SUBREG_REG (x), in_code);
7326 rtx simplified = simplify_subreg (mode, tem, GET_MODE (SUBREG_REG (x)),
7327 SUBREG_BYTE (x));
7329 if (simplified)
7330 tem = simplified;
7332 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
7333 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7334 && subreg_lowpart_p (x))
7336 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
7339 /* If we have something other than a SUBREG, we might have
7340 done an expansion, so rerun ourselves. */
7341 if (GET_CODE (newer) != SUBREG)
7342 newer = make_compound_operation (newer, in_code);
7344 /* force_to_mode can expand compounds. If it just re-expanded the
7345 compound use gen_lowpart instead to convert to the desired
7346 mode. */
7347 if (rtx_equal_p (newer, x))
7348 return gen_lowpart (GET_MODE (x), tem);
7350 return newer;
7353 if (simplified)
7354 return tem;
7356 break;
7358 default:
7359 break;
7362 if (new_rtx)
7364 x = gen_lowpart (mode, new_rtx);
7365 code = GET_CODE (x);
7368 /* Now recursively process each operand of this operation. */
7369 fmt = GET_RTX_FORMAT (code);
7370 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7371 if (fmt[i] == 'e')
7373 new_rtx = make_compound_operation (XEXP (x, i), next_code);
7374 SUBST (XEXP (x, i), new_rtx);
7376 else if (fmt[i] == 'E')
7377 for (j = 0; j < XVECLEN (x, i); j++)
7379 new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
7380 SUBST (XVECEXP (x, i, j), new_rtx);
7383 /* If this is a commutative operation, the changes to the operands
7384 may have made it noncanonical. */
7385 if (COMMUTATIVE_ARITH_P (x)
7386 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
7388 tem = XEXP (x, 0);
7389 SUBST (XEXP (x, 0), XEXP (x, 1));
7390 SUBST (XEXP (x, 1), tem);
7393 return x;
7396 /* Given M see if it is a value that would select a field of bits
7397 within an item, but not the entire word. Return -1 if not.
7398 Otherwise, return the starting position of the field, where 0 is the
7399 low-order bit.
7401 *PLEN is set to the length of the field. */
7403 static int
7404 get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
7406 /* Get the bit number of the first 1 bit from the right, -1 if none. */
7407 int pos = exact_log2 (m & -m);
7408 int len = 0;
7410 if (pos >= 0)
7411 /* Now shift off the low-order zero bits and see if we have a
7412 power of two minus 1. */
7413 len = exact_log2 ((m >> pos) + 1);
7415 if (len <= 0)
7416 pos = -1;
7418 *plen = len;
7419 return pos;
7422 /* If X refers to a register that equals REG in value, replace these
7423 references with REG. */
7424 static rtx
7425 canon_reg_for_combine (rtx x, rtx reg)
7427 rtx op0, op1, op2;
7428 const char *fmt;
7429 int i;
7430 bool copied;
7432 enum rtx_code code = GET_CODE (x);
7433 switch (GET_RTX_CLASS (code))
7435 case RTX_UNARY:
7436 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7437 if (op0 != XEXP (x, 0))
7438 return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
7439 GET_MODE (reg));
7440 break;
7442 case RTX_BIN_ARITH:
7443 case RTX_COMM_ARITH:
7444 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7445 op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7446 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7447 return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7448 break;
7450 case RTX_COMPARE:
7451 case RTX_COMM_COMPARE:
7452 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7453 op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7454 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7455 return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
7456 GET_MODE (op0), op0, op1);
7457 break;
7459 case RTX_TERNARY:
7460 case RTX_BITFIELD_OPS:
7461 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7462 op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7463 op2 = canon_reg_for_combine (XEXP (x, 2), reg);
7464 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
7465 return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
7466 GET_MODE (op0), op0, op1, op2);
7468 case RTX_OBJ:
7469 if (REG_P (x))
7471 if (rtx_equal_p (get_last_value (reg), x)
7472 || rtx_equal_p (reg, get_last_value (x)))
7473 return reg;
7474 else
7475 break;
7478 /* fall through */
7480 default:
7481 fmt = GET_RTX_FORMAT (code);
7482 copied = false;
7483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7484 if (fmt[i] == 'e')
7486 rtx op = canon_reg_for_combine (XEXP (x, i), reg);
7487 if (op != XEXP (x, i))
7489 if (!copied)
7491 copied = true;
7492 x = copy_rtx (x);
7494 XEXP (x, i) = op;
7497 else if (fmt[i] == 'E')
7499 int j;
7500 for (j = 0; j < XVECLEN (x, i); j++)
7502 rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
7503 if (op != XVECEXP (x, i, j))
7505 if (!copied)
7507 copied = true;
7508 x = copy_rtx (x);
7510 XVECEXP (x, i, j) = op;
7515 break;
7518 return x;
7521 /* Return X converted to MODE. If the value is already truncated to
7522 MODE we can just return a subreg even though in the general case we
7523 would need an explicit truncation. */
7525 static rtx
7526 gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
7528 if (!CONST_INT_P (x)
7529 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7530 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7531 GET_MODE_BITSIZE (GET_MODE (x)))
7532 && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
7534 /* Bit-cast X into an integer mode. */
7535 if (!SCALAR_INT_MODE_P (GET_MODE (x)))
7536 x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x);
7537 x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode),
7538 x, GET_MODE (x));
7541 return gen_lowpart (mode, x);
7544 /* See if X can be simplified knowing that we will only refer to it in
7545 MODE and will only refer to those bits that are nonzero in MASK.
7546 If other bits are being computed or if masking operations are done
7547 that select a superset of the bits in MASK, they can sometimes be
7548 ignored.
7550 Return a possibly simplified expression, but always convert X to
7551 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
7553 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7554 are all off in X. This is used when X will be complemented, by either
7555 NOT, NEG, or XOR. */
7557 static rtx
7558 force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
7559 int just_select)
7561 enum rtx_code code = GET_CODE (x);
7562 int next_select = just_select || code == XOR || code == NOT || code == NEG;
7563 enum machine_mode op_mode;
7564 unsigned HOST_WIDE_INT fuller_mask, nonzero;
7565 rtx op0, op1, temp;
7567 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
7568 code below will do the wrong thing since the mode of such an
7569 expression is VOIDmode.
7571 Also do nothing if X is a CLOBBER; this can happen if X was
7572 the return value from a call to gen_lowpart. */
7573 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
7574 return x;
7576 /* We want to perform the operation is its present mode unless we know
7577 that the operation is valid in MODE, in which case we do the operation
7578 in MODE. */
7579 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
7580 && have_insn_for (code, mode))
7581 ? mode : GET_MODE (x));
7583 /* It is not valid to do a right-shift in a narrower mode
7584 than the one it came in with. */
7585 if ((code == LSHIFTRT || code == ASHIFTRT)
7586 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7587 op_mode = GET_MODE (x);
7589 /* Truncate MASK to fit OP_MODE. */
7590 if (op_mode)
7591 mask &= GET_MODE_MASK (op_mode);
7593 /* When we have an arithmetic operation, or a shift whose count we
7594 do not know, we need to assume that all bits up to the highest-order
7595 bit in MASK will be needed. This is how we form such a mask. */
7596 if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7597 fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7598 else
7599 fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7600 - 1);
7602 /* Determine what bits of X are guaranteed to be (non)zero. */
7603 nonzero = nonzero_bits (x, mode);
7605 /* If none of the bits in X are needed, return a zero. */
7606 if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
7607 x = const0_rtx;
7609 /* If X is a CONST_INT, return a new one. Do this here since the
7610 test below will fail. */
7611 if (CONST_INT_P (x))
7613 if (SCALAR_INT_MODE_P (mode))
7614 return gen_int_mode (INTVAL (x) & mask, mode);
7615 else
7617 x = GEN_INT (INTVAL (x) & mask);
7618 return gen_lowpart_common (mode, x);
7622 /* If X is narrower than MODE and we want all the bits in X's mode, just
7623 get X in the proper mode. */
7624 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7625 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7626 return gen_lowpart (mode, x);
7628 /* We can ignore the effect of a SUBREG if it narrows the mode or
7629 if the constant masks to zero all the bits the mode doesn't have. */
7630 if (GET_CODE (x) == SUBREG
7631 && subreg_lowpart_p (x)
7632 && ((GET_MODE_SIZE (GET_MODE (x))
7633 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7634 || (0 == (mask
7635 & GET_MODE_MASK (GET_MODE (x))
7636 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7637 return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
7639 /* The arithmetic simplifications here only work for scalar integer modes. */
7640 if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x)))
7641 return gen_lowpart_or_truncate (mode, x);
7643 switch (code)
7645 case CLOBBER:
7646 /* If X is a (clobber (const_int)), return it since we know we are
7647 generating something that won't match. */
7648 return x;
7650 case SIGN_EXTEND:
7651 case ZERO_EXTEND:
7652 case ZERO_EXTRACT:
7653 case SIGN_EXTRACT:
7654 x = expand_compound_operation (x);
7655 if (GET_CODE (x) != code)
7656 return force_to_mode (x, mode, mask, next_select);
7657 break;
7659 case TRUNCATE:
7660 /* Similarly for a truncate. */
7661 return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7663 case AND:
7664 /* If this is an AND with a constant, convert it into an AND
7665 whose constant is the AND of that constant with MASK. If it
7666 remains an AND of MASK, delete it since it is redundant. */
7668 if (CONST_INT_P (XEXP (x, 1)))
7670 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7671 mask & INTVAL (XEXP (x, 1)));
7673 /* If X is still an AND, see if it is an AND with a mask that
7674 is just some low-order bits. If so, and it is MASK, we don't
7675 need it. */
7677 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7678 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7679 == mask))
7680 x = XEXP (x, 0);
7682 /* If it remains an AND, try making another AND with the bits
7683 in the mode mask that aren't in MASK turned on. If the
7684 constant in the AND is wide enough, this might make a
7685 cheaper constant. */
7687 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7688 && GET_MODE_MASK (GET_MODE (x)) != mask
7689 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7691 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7692 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7693 int width = GET_MODE_BITSIZE (GET_MODE (x));
7694 rtx y;
7696 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7697 number, sign extend it. */
7698 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7699 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7700 cval |= (HOST_WIDE_INT) -1 << width;
7702 y = simplify_gen_binary (AND, GET_MODE (x),
7703 XEXP (x, 0), GEN_INT (cval));
7704 if (rtx_cost (y, SET, optimize_this_for_speed_p)
7705 < rtx_cost (x, SET, optimize_this_for_speed_p))
7706 x = y;
7709 break;
7712 goto binop;
7714 case PLUS:
7715 /* In (and (plus FOO C1) M), if M is a mask that just turns off
7716 low-order bits (as in an alignment operation) and FOO is already
7717 aligned to that boundary, mask C1 to that boundary as well.
7718 This may eliminate that PLUS and, later, the AND. */
7721 unsigned int width = GET_MODE_BITSIZE (mode);
7722 unsigned HOST_WIDE_INT smask = mask;
7724 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7725 number, sign extend it. */
7727 if (width < HOST_BITS_PER_WIDE_INT
7728 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7729 smask |= (HOST_WIDE_INT) -1 << width;
7731 if (CONST_INT_P (XEXP (x, 1))
7732 && exact_log2 (- smask) >= 0
7733 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7734 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7735 return force_to_mode (plus_constant (XEXP (x, 0),
7736 (INTVAL (XEXP (x, 1)) & smask)),
7737 mode, smask, next_select);
7740 /* ... fall through ... */
7742 case MULT:
7743 /* For PLUS, MINUS and MULT, we need any bits less significant than the
7744 most significant bit in MASK since carries from those bits will
7745 affect the bits we are interested in. */
7746 mask = fuller_mask;
7747 goto binop;
7749 case MINUS:
7750 /* If X is (minus C Y) where C's least set bit is larger than any bit
7751 in the mask, then we may replace with (neg Y). */
7752 if (CONST_INT_P (XEXP (x, 0))
7753 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7754 & -INTVAL (XEXP (x, 0))))
7755 > mask))
7757 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7758 GET_MODE (x));
7759 return force_to_mode (x, mode, mask, next_select);
7762 /* Similarly, if C contains every bit in the fuller_mask, then we may
7763 replace with (not Y). */
7764 if (CONST_INT_P (XEXP (x, 0))
7765 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7766 == INTVAL (XEXP (x, 0))))
7768 x = simplify_gen_unary (NOT, GET_MODE (x),
7769 XEXP (x, 1), GET_MODE (x));
7770 return force_to_mode (x, mode, mask, next_select);
7773 mask = fuller_mask;
7774 goto binop;
7776 case IOR:
7777 case XOR:
7778 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7779 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7780 operation which may be a bitfield extraction. Ensure that the
7781 constant we form is not wider than the mode of X. */
7783 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7784 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7785 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7786 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7787 && CONST_INT_P (XEXP (x, 1))
7788 && ((INTVAL (XEXP (XEXP (x, 0), 1))
7789 + floor_log2 (INTVAL (XEXP (x, 1))))
7790 < GET_MODE_BITSIZE (GET_MODE (x)))
7791 && (INTVAL (XEXP (x, 1))
7792 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7794 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7795 << INTVAL (XEXP (XEXP (x, 0), 1)));
7796 temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7797 XEXP (XEXP (x, 0), 0), temp);
7798 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7799 XEXP (XEXP (x, 0), 1));
7800 return force_to_mode (x, mode, mask, next_select);
7803 binop:
7804 /* For most binary operations, just propagate into the operation and
7805 change the mode if we have an operation of that mode. */
7807 op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
7808 op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
7810 /* If we ended up truncating both operands, truncate the result of the
7811 operation instead. */
7812 if (GET_CODE (op0) == TRUNCATE
7813 && GET_CODE (op1) == TRUNCATE)
7815 op0 = XEXP (op0, 0);
7816 op1 = XEXP (op1, 0);
7819 op0 = gen_lowpart_or_truncate (op_mode, op0);
7820 op1 = gen_lowpart_or_truncate (op_mode, op1);
7822 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7823 x = simplify_gen_binary (code, op_mode, op0, op1);
7824 break;
7826 case ASHIFT:
7827 /* For left shifts, do the same, but just for the first operand.
7828 However, we cannot do anything with shifts where we cannot
7829 guarantee that the counts are smaller than the size of the mode
7830 because such a count will have a different meaning in a
7831 wider mode. */
7833 if (! (CONST_INT_P (XEXP (x, 1))
7834 && INTVAL (XEXP (x, 1)) >= 0
7835 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7836 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7837 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7838 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7839 break;
7841 /* If the shift count is a constant and we can do arithmetic in
7842 the mode of the shift, refine which bits we need. Otherwise, use the
7843 conservative form of the mask. */
7844 if (CONST_INT_P (XEXP (x, 1))
7845 && INTVAL (XEXP (x, 1)) >= 0
7846 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7847 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7848 mask >>= INTVAL (XEXP (x, 1));
7849 else
7850 mask = fuller_mask;
7852 op0 = gen_lowpart_or_truncate (op_mode,
7853 force_to_mode (XEXP (x, 0), op_mode,
7854 mask, next_select));
7856 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7857 x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7858 break;
7860 case LSHIFTRT:
7861 /* Here we can only do something if the shift count is a constant,
7862 this shift constant is valid for the host, and we can do arithmetic
7863 in OP_MODE. */
7865 if (CONST_INT_P (XEXP (x, 1))
7866 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7867 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7869 rtx inner = XEXP (x, 0);
7870 unsigned HOST_WIDE_INT inner_mask;
7872 /* Select the mask of the bits we need for the shift operand. */
7873 inner_mask = mask << INTVAL (XEXP (x, 1));
7875 /* We can only change the mode of the shift if we can do arithmetic
7876 in the mode of the shift and INNER_MASK is no wider than the
7877 width of X's mode. */
7878 if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7879 op_mode = GET_MODE (x);
7881 inner = force_to_mode (inner, op_mode, inner_mask, next_select);
7883 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7884 x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7887 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7888 shift and AND produces only copies of the sign bit (C2 is one less
7889 than a power of two), we can do this with just a shift. */
7891 if (GET_CODE (x) == LSHIFTRT
7892 && CONST_INT_P (XEXP (x, 1))
7893 /* The shift puts one of the sign bit copies in the least significant
7894 bit. */
7895 && ((INTVAL (XEXP (x, 1))
7896 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7897 >= GET_MODE_BITSIZE (GET_MODE (x)))
7898 && exact_log2 (mask + 1) >= 0
7899 /* Number of bits left after the shift must be more than the mask
7900 needs. */
7901 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7902 <= GET_MODE_BITSIZE (GET_MODE (x)))
7903 /* Must be more sign bit copies than the mask needs. */
7904 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7905 >= exact_log2 (mask + 1)))
7906 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7907 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7908 - exact_log2 (mask + 1)));
7910 goto shiftrt;
7912 case ASHIFTRT:
7913 /* If we are just looking for the sign bit, we don't need this shift at
7914 all, even if it has a variable count. */
7915 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7916 && (mask == ((unsigned HOST_WIDE_INT) 1
7917 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7918 return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7920 /* If this is a shift by a constant, get a mask that contains those bits
7921 that are not copies of the sign bit. We then have two cases: If
7922 MASK only includes those bits, this can be a logical shift, which may
7923 allow simplifications. If MASK is a single-bit field not within
7924 those bits, we are requesting a copy of the sign bit and hence can
7925 shift the sign bit to the appropriate location. */
7927 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
7928 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7930 int i;
7932 /* If the considered data is wider than HOST_WIDE_INT, we can't
7933 represent a mask for all its bits in a single scalar.
7934 But we only care about the lower bits, so calculate these. */
7936 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7938 nonzero = ~(HOST_WIDE_INT) 0;
7940 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7941 is the number of bits a full-width mask would have set.
7942 We need only shift if these are fewer than nonzero can
7943 hold. If not, we must keep all bits set in nonzero. */
7945 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7946 < HOST_BITS_PER_WIDE_INT)
7947 nonzero >>= INTVAL (XEXP (x, 1))
7948 + HOST_BITS_PER_WIDE_INT
7949 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7951 else
7953 nonzero = GET_MODE_MASK (GET_MODE (x));
7954 nonzero >>= INTVAL (XEXP (x, 1));
7957 if ((mask & ~nonzero) == 0)
7959 x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
7960 XEXP (x, 0), INTVAL (XEXP (x, 1)));
7961 if (GET_CODE (x) != ASHIFTRT)
7962 return force_to_mode (x, mode, mask, next_select);
7965 else if ((i = exact_log2 (mask)) >= 0)
7967 x = simplify_shift_const
7968 (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7969 GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7971 if (GET_CODE (x) != ASHIFTRT)
7972 return force_to_mode (x, mode, mask, next_select);
7976 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
7977 even if the shift count isn't a constant. */
7978 if (mask == 1)
7979 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7980 XEXP (x, 0), XEXP (x, 1));
7982 shiftrt:
7984 /* If this is a zero- or sign-extension operation that just affects bits
7985 we don't care about, remove it. Be sure the call above returned
7986 something that is still a shift. */
7988 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7989 && CONST_INT_P (XEXP (x, 1))
7990 && INTVAL (XEXP (x, 1)) >= 0
7991 && (INTVAL (XEXP (x, 1))
7992 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7993 && GET_CODE (XEXP (x, 0)) == ASHIFT
7994 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7995 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7996 next_select);
7998 break;
8000 case ROTATE:
8001 case ROTATERT:
8002 /* If the shift count is constant and we can do computations
8003 in the mode of X, compute where the bits we care about are.
8004 Otherwise, we can't do anything. Don't change the mode of
8005 the shift or propagate MODE into the shift, though. */
8006 if (CONST_INT_P (XEXP (x, 1))
8007 && INTVAL (XEXP (x, 1)) >= 0)
8009 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
8010 GET_MODE (x), GEN_INT (mask),
8011 XEXP (x, 1));
8012 if (temp && CONST_INT_P (temp))
8013 SUBST (XEXP (x, 0),
8014 force_to_mode (XEXP (x, 0), GET_MODE (x),
8015 INTVAL (temp), next_select));
8017 break;
8019 case NEG:
8020 /* If we just want the low-order bit, the NEG isn't needed since it
8021 won't change the low-order bit. */
8022 if (mask == 1)
8023 return force_to_mode (XEXP (x, 0), mode, mask, just_select);
8025 /* We need any bits less significant than the most significant bit in
8026 MASK since carries from those bits will affect the bits we are
8027 interested in. */
8028 mask = fuller_mask;
8029 goto unop;
8031 case NOT:
8032 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
8033 same as the XOR case above. Ensure that the constant we form is not
8034 wider than the mode of X. */
8036 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8037 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8038 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8039 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
8040 < GET_MODE_BITSIZE (GET_MODE (x)))
8041 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8043 temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
8044 GET_MODE (x));
8045 temp = simplify_gen_binary (XOR, GET_MODE (x),
8046 XEXP (XEXP (x, 0), 0), temp);
8047 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8048 temp, XEXP (XEXP (x, 0), 1));
8050 return force_to_mode (x, mode, mask, next_select);
8053 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
8054 use the full mask inside the NOT. */
8055 mask = fuller_mask;
8057 unop:
8058 op0 = gen_lowpart_or_truncate (op_mode,
8059 force_to_mode (XEXP (x, 0), mode, mask,
8060 next_select));
8061 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
8062 x = simplify_gen_unary (code, op_mode, op0, op_mode);
8063 break;
8065 case NE:
8066 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
8067 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
8068 which is equal to STORE_FLAG_VALUE. */
8069 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
8070 && GET_MODE (XEXP (x, 0)) == mode
8071 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
8072 && (nonzero_bits (XEXP (x, 0), mode)
8073 == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
8074 return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8076 break;
8078 case IF_THEN_ELSE:
8079 /* We have no way of knowing if the IF_THEN_ELSE can itself be
8080 written in a narrower mode. We play it safe and do not do so. */
8082 SUBST (XEXP (x, 1),
8083 gen_lowpart_or_truncate (GET_MODE (x),
8084 force_to_mode (XEXP (x, 1), mode,
8085 mask, next_select)));
8086 SUBST (XEXP (x, 2),
8087 gen_lowpart_or_truncate (GET_MODE (x),
8088 force_to_mode (XEXP (x, 2), mode,
8089 mask, next_select)));
8090 break;
8092 default:
8093 break;
8096 /* Ensure we return a value of the proper mode. */
8097 return gen_lowpart_or_truncate (mode, x);
8100 /* Return nonzero if X is an expression that has one of two values depending on
8101 whether some other value is zero or nonzero. In that case, we return the
8102 value that is being tested, *PTRUE is set to the value if the rtx being
8103 returned has a nonzero value, and *PFALSE is set to the other alternative.
8105 If we return zero, we set *PTRUE and *PFALSE to X. */
8107 static rtx
8108 if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
8110 enum machine_mode mode = GET_MODE (x);
8111 enum rtx_code code = GET_CODE (x);
8112 rtx cond0, cond1, true0, true1, false0, false1;
8113 unsigned HOST_WIDE_INT nz;
8115 /* If we are comparing a value against zero, we are done. */
8116 if ((code == NE || code == EQ)
8117 && XEXP (x, 1) == const0_rtx)
8119 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
8120 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
8121 return XEXP (x, 0);
8124 /* If this is a unary operation whose operand has one of two values, apply
8125 our opcode to compute those values. */
8126 else if (UNARY_P (x)
8127 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
8129 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
8130 *pfalse = simplify_gen_unary (code, mode, false0,
8131 GET_MODE (XEXP (x, 0)));
8132 return cond0;
8135 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
8136 make can't possibly match and would suppress other optimizations. */
8137 else if (code == COMPARE)
8140 /* If this is a binary operation, see if either side has only one of two
8141 values. If either one does or if both do and they are conditional on
8142 the same value, compute the new true and false values. */
8143 else if (BINARY_P (x))
8145 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
8146 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
8148 if ((cond0 != 0 || cond1 != 0)
8149 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
8151 /* If if_then_else_cond returned zero, then true/false are the
8152 same rtl. We must copy one of them to prevent invalid rtl
8153 sharing. */
8154 if (cond0 == 0)
8155 true0 = copy_rtx (true0);
8156 else if (cond1 == 0)
8157 true1 = copy_rtx (true1);
8159 if (COMPARISON_P (x))
8161 *ptrue = simplify_gen_relational (code, mode, VOIDmode,
8162 true0, true1);
8163 *pfalse = simplify_gen_relational (code, mode, VOIDmode,
8164 false0, false1);
8166 else
8168 *ptrue = simplify_gen_binary (code, mode, true0, true1);
8169 *pfalse = simplify_gen_binary (code, mode, false0, false1);
8172 return cond0 ? cond0 : cond1;
8175 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
8176 operands is zero when the other is nonzero, and vice-versa,
8177 and STORE_FLAG_VALUE is 1 or -1. */
8179 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8180 && (code == PLUS || code == IOR || code == XOR || code == MINUS
8181 || code == UMAX)
8182 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8184 rtx op0 = XEXP (XEXP (x, 0), 1);
8185 rtx op1 = XEXP (XEXP (x, 1), 1);
8187 cond0 = XEXP (XEXP (x, 0), 0);
8188 cond1 = XEXP (XEXP (x, 1), 0);
8190 if (COMPARISON_P (cond0)
8191 && COMPARISON_P (cond1)
8192 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8193 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8194 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8195 || ((swap_condition (GET_CODE (cond0))
8196 == reversed_comparison_code (cond1, NULL))
8197 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8198 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8199 && ! side_effects_p (x))
8201 *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
8202 *pfalse = simplify_gen_binary (MULT, mode,
8203 (code == MINUS
8204 ? simplify_gen_unary (NEG, mode,
8205 op1, mode)
8206 : op1),
8207 const_true_rtx);
8208 return cond0;
8212 /* Similarly for MULT, AND and UMIN, except that for these the result
8213 is always zero. */
8214 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8215 && (code == MULT || code == AND || code == UMIN)
8216 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8218 cond0 = XEXP (XEXP (x, 0), 0);
8219 cond1 = XEXP (XEXP (x, 1), 0);
8221 if (COMPARISON_P (cond0)
8222 && COMPARISON_P (cond1)
8223 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8224 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8225 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8226 || ((swap_condition (GET_CODE (cond0))
8227 == reversed_comparison_code (cond1, NULL))
8228 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8229 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8230 && ! side_effects_p (x))
8232 *ptrue = *pfalse = const0_rtx;
8233 return cond0;
8238 else if (code == IF_THEN_ELSE)
8240 /* If we have IF_THEN_ELSE already, extract the condition and
8241 canonicalize it if it is NE or EQ. */
8242 cond0 = XEXP (x, 0);
8243 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
8244 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
8245 return XEXP (cond0, 0);
8246 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
8248 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
8249 return XEXP (cond0, 0);
8251 else
8252 return cond0;
8255 /* If X is a SUBREG, we can narrow both the true and false values
8256 if the inner expression, if there is a condition. */
8257 else if (code == SUBREG
8258 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
8259 &true0, &false0)))
8261 true0 = simplify_gen_subreg (mode, true0,
8262 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8263 false0 = simplify_gen_subreg (mode, false0,
8264 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8265 if (true0 && false0)
8267 *ptrue = true0;
8268 *pfalse = false0;
8269 return cond0;
8273 /* If X is a constant, this isn't special and will cause confusions
8274 if we treat it as such. Likewise if it is equivalent to a constant. */
8275 else if (CONSTANT_P (x)
8276 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
8279 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
8280 will be least confusing to the rest of the compiler. */
8281 else if (mode == BImode)
8283 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
8284 return x;
8287 /* If X is known to be either 0 or -1, those are the true and
8288 false values when testing X. */
8289 else if (x == constm1_rtx || x == const0_rtx
8290 || (mode != VOIDmode
8291 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
8293 *ptrue = constm1_rtx, *pfalse = const0_rtx;
8294 return x;
8297 /* Likewise for 0 or a single bit. */
8298 else if (SCALAR_INT_MODE_P (mode)
8299 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8300 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
8302 *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
8303 return x;
8306 /* Otherwise fail; show no condition with true and false values the same. */
8307 *ptrue = *pfalse = x;
8308 return 0;
8311 /* Return the value of expression X given the fact that condition COND
8312 is known to be true when applied to REG as its first operand and VAL
8313 as its second. X is known to not be shared and so can be modified in
8314 place.
8316 We only handle the simplest cases, and specifically those cases that
8317 arise with IF_THEN_ELSE expressions. */
8319 static rtx
8320 known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
8322 enum rtx_code code = GET_CODE (x);
8323 rtx temp;
8324 const char *fmt;
8325 int i, j;
8327 if (side_effects_p (x))
8328 return x;
8330 /* If either operand of the condition is a floating point value,
8331 then we have to avoid collapsing an EQ comparison. */
8332 if (cond == EQ
8333 && rtx_equal_p (x, reg)
8334 && ! FLOAT_MODE_P (GET_MODE (x))
8335 && ! FLOAT_MODE_P (GET_MODE (val)))
8336 return val;
8338 if (cond == UNEQ && rtx_equal_p (x, reg))
8339 return val;
8341 /* If X is (abs REG) and we know something about REG's relationship
8342 with zero, we may be able to simplify this. */
8344 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
8345 switch (cond)
8347 case GE: case GT: case EQ:
8348 return XEXP (x, 0);
8349 case LT: case LE:
8350 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
8351 XEXP (x, 0),
8352 GET_MODE (XEXP (x, 0)));
8353 default:
8354 break;
8357 /* The only other cases we handle are MIN, MAX, and comparisons if the
8358 operands are the same as REG and VAL. */
8360 else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
8362 if (rtx_equal_p (XEXP (x, 0), val))
8363 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
8365 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
8367 if (COMPARISON_P (x))
8369 if (comparison_dominates_p (cond, code))
8370 return const_true_rtx;
8372 code = reversed_comparison_code (x, NULL);
8373 if (code != UNKNOWN
8374 && comparison_dominates_p (cond, code))
8375 return const0_rtx;
8376 else
8377 return x;
8379 else if (code == SMAX || code == SMIN
8380 || code == UMIN || code == UMAX)
8382 int unsignedp = (code == UMIN || code == UMAX);
8384 /* Do not reverse the condition when it is NE or EQ.
8385 This is because we cannot conclude anything about
8386 the value of 'SMAX (x, y)' when x is not equal to y,
8387 but we can when x equals y. */
8388 if ((code == SMAX || code == UMAX)
8389 && ! (cond == EQ || cond == NE))
8390 cond = reverse_condition (cond);
8392 switch (cond)
8394 case GE: case GT:
8395 return unsignedp ? x : XEXP (x, 1);
8396 case LE: case LT:
8397 return unsignedp ? x : XEXP (x, 0);
8398 case GEU: case GTU:
8399 return unsignedp ? XEXP (x, 1) : x;
8400 case LEU: case LTU:
8401 return unsignedp ? XEXP (x, 0) : x;
8402 default:
8403 break;
8408 else if (code == SUBREG)
8410 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
8411 rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
8413 if (SUBREG_REG (x) != r)
8415 /* We must simplify subreg here, before we lose track of the
8416 original inner_mode. */
8417 new_rtx = simplify_subreg (GET_MODE (x), r,
8418 inner_mode, SUBREG_BYTE (x));
8419 if (new_rtx)
8420 return new_rtx;
8421 else
8422 SUBST (SUBREG_REG (x), r);
8425 return x;
8427 /* We don't have to handle SIGN_EXTEND here, because even in the
8428 case of replacing something with a modeless CONST_INT, a
8429 CONST_INT is already (supposed to be) a valid sign extension for
8430 its narrower mode, which implies it's already properly
8431 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
8432 story is different. */
8433 else if (code == ZERO_EXTEND)
8435 enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
8436 rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
8438 if (XEXP (x, 0) != r)
8440 /* We must simplify the zero_extend here, before we lose
8441 track of the original inner_mode. */
8442 new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8443 r, inner_mode);
8444 if (new_rtx)
8445 return new_rtx;
8446 else
8447 SUBST (XEXP (x, 0), r);
8450 return x;
8453 fmt = GET_RTX_FORMAT (code);
8454 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8456 if (fmt[i] == 'e')
8457 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
8458 else if (fmt[i] == 'E')
8459 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8460 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
8461 cond, reg, val));
8464 return x;
8467 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
8468 assignment as a field assignment. */
8470 static int
8471 rtx_equal_for_field_assignment_p (rtx x, rtx y)
8473 if (x == y || rtx_equal_p (x, y))
8474 return 1;
8476 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
8477 return 0;
8479 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
8480 Note that all SUBREGs of MEM are paradoxical; otherwise they
8481 would have been rewritten. */
8482 if (MEM_P (x) && GET_CODE (y) == SUBREG
8483 && MEM_P (SUBREG_REG (y))
8484 && rtx_equal_p (SUBREG_REG (y),
8485 gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
8486 return 1;
8488 if (MEM_P (y) && GET_CODE (x) == SUBREG
8489 && MEM_P (SUBREG_REG (x))
8490 && rtx_equal_p (SUBREG_REG (x),
8491 gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
8492 return 1;
8494 /* We used to see if get_last_value of X and Y were the same but that's
8495 not correct. In one direction, we'll cause the assignment to have
8496 the wrong destination and in the case, we'll import a register into this
8497 insn that might have already have been dead. So fail if none of the
8498 above cases are true. */
8499 return 0;
8502 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
8503 Return that assignment if so.
8505 We only handle the most common cases. */
8507 static rtx
8508 make_field_assignment (rtx x)
8510 rtx dest = SET_DEST (x);
8511 rtx src = SET_SRC (x);
8512 rtx assign;
8513 rtx rhs, lhs;
8514 HOST_WIDE_INT c1;
8515 HOST_WIDE_INT pos;
8516 unsigned HOST_WIDE_INT len;
8517 rtx other;
8518 enum machine_mode mode;
8520 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
8521 a clear of a one-bit field. We will have changed it to
8522 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
8523 for a SUBREG. */
8525 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
8526 && CONST_INT_P (XEXP (XEXP (src, 0), 0))
8527 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
8528 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8530 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8531 1, 1, 1, 0);
8532 if (assign != 0)
8533 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8534 return x;
8537 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
8538 && subreg_lowpart_p (XEXP (src, 0))
8539 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
8540 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
8541 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
8542 && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
8543 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
8544 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8546 assign = make_extraction (VOIDmode, dest, 0,
8547 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
8548 1, 1, 1, 0);
8549 if (assign != 0)
8550 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8551 return x;
8554 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
8555 one-bit field. */
8556 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
8557 && XEXP (XEXP (src, 0), 0) == const1_rtx
8558 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8560 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8561 1, 1, 1, 0);
8562 if (assign != 0)
8563 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
8564 return x;
8567 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8568 SRC is an AND with all bits of that field set, then we can discard
8569 the AND. */
8570 if (GET_CODE (dest) == ZERO_EXTRACT
8571 && CONST_INT_P (XEXP (dest, 1))
8572 && GET_CODE (src) == AND
8573 && CONST_INT_P (XEXP (src, 1)))
8575 HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
8576 unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
8577 unsigned HOST_WIDE_INT ze_mask;
8579 if (width >= HOST_BITS_PER_WIDE_INT)
8580 ze_mask = -1;
8581 else
8582 ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
8584 /* Complete overlap. We can remove the source AND. */
8585 if ((and_mask & ze_mask) == ze_mask)
8586 return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8588 /* Partial overlap. We can reduce the source AND. */
8589 if ((and_mask & ze_mask) != and_mask)
8591 mode = GET_MODE (src);
8592 src = gen_rtx_AND (mode, XEXP (src, 0),
8593 gen_int_mode (and_mask & ze_mask, mode));
8594 return gen_rtx_SET (VOIDmode, dest, src);
8598 /* The other case we handle is assignments into a constant-position
8599 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
8600 a mask that has all one bits except for a group of zero bits and
8601 OTHER is known to have zeros where C1 has ones, this is such an
8602 assignment. Compute the position and length from C1. Shift OTHER
8603 to the appropriate position, force it to the required mode, and
8604 make the extraction. Check for the AND in both operands. */
8606 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8607 return x;
8609 rhs = expand_compound_operation (XEXP (src, 0));
8610 lhs = expand_compound_operation (XEXP (src, 1));
8612 if (GET_CODE (rhs) == AND
8613 && CONST_INT_P (XEXP (rhs, 1))
8614 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8615 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8616 else if (GET_CODE (lhs) == AND
8617 && CONST_INT_P (XEXP (lhs, 1))
8618 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8619 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8620 else
8621 return x;
8623 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8624 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8625 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8626 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8627 return x;
8629 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8630 if (assign == 0)
8631 return x;
8633 /* The mode to use for the source is the mode of the assignment, or of
8634 what is inside a possible STRICT_LOW_PART. */
8635 mode = (GET_CODE (assign) == STRICT_LOW_PART
8636 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8638 /* Shift OTHER right POS places and make it the source, restricting it
8639 to the proper length and mode. */
8641 src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
8642 GET_MODE (src),
8643 other, pos),
8644 dest);
8645 src = force_to_mode (src, mode,
8646 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8647 ? ~(unsigned HOST_WIDE_INT) 0
8648 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8651 /* If SRC is masked by an AND that does not make a difference in
8652 the value being stored, strip it. */
8653 if (GET_CODE (assign) == ZERO_EXTRACT
8654 && CONST_INT_P (XEXP (assign, 1))
8655 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8656 && GET_CODE (src) == AND
8657 && CONST_INT_P (XEXP (src, 1))
8658 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8659 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8660 src = XEXP (src, 0);
8662 return gen_rtx_SET (VOIDmode, assign, src);
8665 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8666 if so. */
8668 static rtx
8669 apply_distributive_law (rtx x)
8671 enum rtx_code code = GET_CODE (x);
8672 enum rtx_code inner_code;
8673 rtx lhs, rhs, other;
8674 rtx tem;
8676 /* Distributivity is not true for floating point as it can change the
8677 value. So we don't do it unless -funsafe-math-optimizations. */
8678 if (FLOAT_MODE_P (GET_MODE (x))
8679 && ! flag_unsafe_math_optimizations)
8680 return x;
8682 /* The outer operation can only be one of the following: */
8683 if (code != IOR && code != AND && code != XOR
8684 && code != PLUS && code != MINUS)
8685 return x;
8687 lhs = XEXP (x, 0);
8688 rhs = XEXP (x, 1);
8690 /* If either operand is a primitive we can't do anything, so get out
8691 fast. */
8692 if (OBJECT_P (lhs) || OBJECT_P (rhs))
8693 return x;
8695 lhs = expand_compound_operation (lhs);
8696 rhs = expand_compound_operation (rhs);
8697 inner_code = GET_CODE (lhs);
8698 if (inner_code != GET_CODE (rhs))
8699 return x;
8701 /* See if the inner and outer operations distribute. */
8702 switch (inner_code)
8704 case LSHIFTRT:
8705 case ASHIFTRT:
8706 case AND:
8707 case IOR:
8708 /* These all distribute except over PLUS. */
8709 if (code == PLUS || code == MINUS)
8710 return x;
8711 break;
8713 case MULT:
8714 if (code != PLUS && code != MINUS)
8715 return x;
8716 break;
8718 case ASHIFT:
8719 /* This is also a multiply, so it distributes over everything. */
8720 break;
8722 case SUBREG:
8723 /* Non-paradoxical SUBREGs distributes over all operations,
8724 provided the inner modes and byte offsets are the same, this
8725 is an extraction of a low-order part, we don't convert an fp
8726 operation to int or vice versa, this is not a vector mode,
8727 and we would not be converting a single-word operation into a
8728 multi-word operation. The latter test is not required, but
8729 it prevents generating unneeded multi-word operations. Some
8730 of the previous tests are redundant given the latter test,
8731 but are retained because they are required for correctness.
8733 We produce the result slightly differently in this case. */
8735 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8736 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8737 || ! subreg_lowpart_p (lhs)
8738 || (GET_MODE_CLASS (GET_MODE (lhs))
8739 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8740 || (GET_MODE_SIZE (GET_MODE (lhs))
8741 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8742 || VECTOR_MODE_P (GET_MODE (lhs))
8743 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8744 /* Result might need to be truncated. Don't change mode if
8745 explicit truncation is needed. */
8746 || !TRULY_NOOP_TRUNCATION
8747 (GET_MODE_BITSIZE (GET_MODE (x)),
8748 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
8749 return x;
8751 tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8752 SUBREG_REG (lhs), SUBREG_REG (rhs));
8753 return gen_lowpart (GET_MODE (x), tem);
8755 default:
8756 return x;
8759 /* Set LHS and RHS to the inner operands (A and B in the example
8760 above) and set OTHER to the common operand (C in the example).
8761 There is only one way to do this unless the inner operation is
8762 commutative. */
8763 if (COMMUTATIVE_ARITH_P (lhs)
8764 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8765 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8766 else if (COMMUTATIVE_ARITH_P (lhs)
8767 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8768 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8769 else if (COMMUTATIVE_ARITH_P (lhs)
8770 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8771 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8772 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8773 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8774 else
8775 return x;
8777 /* Form the new inner operation, seeing if it simplifies first. */
8778 tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8780 /* There is one exception to the general way of distributing:
8781 (a | c) ^ (b | c) -> (a ^ b) & ~c */
8782 if (code == XOR && inner_code == IOR)
8784 inner_code = AND;
8785 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8788 /* We may be able to continuing distributing the result, so call
8789 ourselves recursively on the inner operation before forming the
8790 outer operation, which we return. */
8791 return simplify_gen_binary (inner_code, GET_MODE (x),
8792 apply_distributive_law (tem), other);
8795 /* See if X is of the form (* (+ A B) C), and if so convert to
8796 (+ (* A C) (* B C)) and try to simplify.
8798 Most of the time, this results in no change. However, if some of
8799 the operands are the same or inverses of each other, simplifications
8800 will result.
8802 For example, (and (ior A B) (not B)) can occur as the result of
8803 expanding a bit field assignment. When we apply the distributive
8804 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8805 which then simplifies to (and (A (not B))).
8807 Note that no checks happen on the validity of applying the inverse
8808 distributive law. This is pointless since we can do it in the
8809 few places where this routine is called.
8811 N is the index of the term that is decomposed (the arithmetic operation,
8812 i.e. (+ A B) in the first example above). !N is the index of the term that
8813 is distributed, i.e. of C in the first example above. */
8814 static rtx
8815 distribute_and_simplify_rtx (rtx x, int n)
8817 enum machine_mode mode;
8818 enum rtx_code outer_code, inner_code;
8819 rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8821 /* Distributivity is not true for floating point as it can change the
8822 value. So we don't do it unless -funsafe-math-optimizations. */
8823 if (FLOAT_MODE_P (GET_MODE (x))
8824 && ! flag_unsafe_math_optimizations)
8825 return NULL_RTX;
8827 decomposed = XEXP (x, n);
8828 if (!ARITHMETIC_P (decomposed))
8829 return NULL_RTX;
8831 mode = GET_MODE (x);
8832 outer_code = GET_CODE (x);
8833 distributed = XEXP (x, !n);
8835 inner_code = GET_CODE (decomposed);
8836 inner_op0 = XEXP (decomposed, 0);
8837 inner_op1 = XEXP (decomposed, 1);
8839 /* Special case (and (xor B C) (not A)), which is equivalent to
8840 (xor (ior A B) (ior A C)) */
8841 if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8843 distributed = XEXP (distributed, 0);
8844 outer_code = IOR;
8847 if (n == 0)
8849 /* Distribute the second term. */
8850 new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8851 new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8853 else
8855 /* Distribute the first term. */
8856 new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8857 new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8860 tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8861 new_op0, new_op1));
8862 if (GET_CODE (tmp) != outer_code
8863 && rtx_cost (tmp, SET, optimize_this_for_speed_p)
8864 < rtx_cost (x, SET, optimize_this_for_speed_p))
8865 return tmp;
8867 return NULL_RTX;
8870 /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8871 in MODE. Return an equivalent form, if different from (and VAROP
8872 (const_int CONSTOP)). Otherwise, return NULL_RTX. */
8874 static rtx
8875 simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8876 unsigned HOST_WIDE_INT constop)
8878 unsigned HOST_WIDE_INT nonzero;
8879 unsigned HOST_WIDE_INT orig_constop;
8880 rtx orig_varop;
8881 int i;
8883 orig_varop = varop;
8884 orig_constop = constop;
8885 if (GET_CODE (varop) == CLOBBER)
8886 return NULL_RTX;
8888 /* Simplify VAROP knowing that we will be only looking at some of the
8889 bits in it.
8891 Note by passing in CONSTOP, we guarantee that the bits not set in
8892 CONSTOP are not significant and will never be examined. We must
8893 ensure that is the case by explicitly masking out those bits
8894 before returning. */
8895 varop = force_to_mode (varop, mode, constop, 0);
8897 /* If VAROP is a CLOBBER, we will fail so return it. */
8898 if (GET_CODE (varop) == CLOBBER)
8899 return varop;
8901 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8902 to VAROP and return the new constant. */
8903 if (CONST_INT_P (varop))
8904 return gen_int_mode (INTVAL (varop) & constop, mode);
8906 /* See what bits may be nonzero in VAROP. Unlike the general case of
8907 a call to nonzero_bits, here we don't care about bits outside
8908 MODE. */
8910 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8912 /* Turn off all bits in the constant that are known to already be zero.
8913 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8914 which is tested below. */
8916 constop &= nonzero;
8918 /* If we don't have any bits left, return zero. */
8919 if (constop == 0)
8920 return const0_rtx;
8922 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8923 a power of two, we can replace this with an ASHIFT. */
8924 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8925 && (i = exact_log2 (constop)) >= 0)
8926 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8928 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8929 or XOR, then try to apply the distributive law. This may eliminate
8930 operations if either branch can be simplified because of the AND.
8931 It may also make some cases more complex, but those cases probably
8932 won't match a pattern either with or without this. */
8934 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8935 return
8936 gen_lowpart
8937 (mode,
8938 apply_distributive_law
8939 (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8940 simplify_and_const_int (NULL_RTX,
8941 GET_MODE (varop),
8942 XEXP (varop, 0),
8943 constop),
8944 simplify_and_const_int (NULL_RTX,
8945 GET_MODE (varop),
8946 XEXP (varop, 1),
8947 constop))));
8949 /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8950 the AND and see if one of the operands simplifies to zero. If so, we
8951 may eliminate it. */
8953 if (GET_CODE (varop) == PLUS
8954 && exact_log2 (constop + 1) >= 0)
8956 rtx o0, o1;
8958 o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8959 o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8960 if (o0 == const0_rtx)
8961 return o1;
8962 if (o1 == const0_rtx)
8963 return o0;
8966 /* Make a SUBREG if necessary. If we can't make it, fail. */
8967 varop = gen_lowpart (mode, varop);
8968 if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
8969 return NULL_RTX;
8971 /* If we are only masking insignificant bits, return VAROP. */
8972 if (constop == nonzero)
8973 return varop;
8975 if (varop == orig_varop && constop == orig_constop)
8976 return NULL_RTX;
8978 /* Otherwise, return an AND. */
8979 return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
8983 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8984 in MODE.
8986 Return an equivalent form, if different from X. Otherwise, return X. If
8987 X is zero, we are to always construct the equivalent form. */
8989 static rtx
8990 simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8991 unsigned HOST_WIDE_INT constop)
8993 rtx tem = simplify_and_const_int_1 (mode, varop, constop);
8994 if (tem)
8995 return tem;
8997 if (!x)
8998 x = simplify_gen_binary (AND, GET_MODE (varop), varop,
8999 gen_int_mode (constop, mode));
9000 if (GET_MODE (x) != mode)
9001 x = gen_lowpart (mode, x);
9002 return x;
9005 /* Given a REG, X, compute which bits in X can be nonzero.
9006 We don't care about bits outside of those defined in MODE.
9008 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
9009 a shift, AND, or zero_extract, we can do better. */
9011 static rtx
9012 reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
9013 const_rtx known_x ATTRIBUTE_UNUSED,
9014 enum machine_mode known_mode ATTRIBUTE_UNUSED,
9015 unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
9016 unsigned HOST_WIDE_INT *nonzero)
9018 rtx tem;
9019 reg_stat_type *rsp;
9021 /* If X is a register whose nonzero bits value is current, use it.
9022 Otherwise, if X is a register whose value we can find, use that
9023 value. Otherwise, use the previously-computed global nonzero bits
9024 for this register. */
9026 rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9027 if (rsp->last_set_value != 0
9028 && (rsp->last_set_mode == mode
9029 || (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
9030 && GET_MODE_CLASS (mode) == MODE_INT))
9031 && ((rsp->last_set_label >= label_tick_ebb_start
9032 && rsp->last_set_label < label_tick)
9033 || (rsp->last_set_label == label_tick
9034 && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9035 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9036 && REG_N_SETS (REGNO (x)) == 1
9037 && !REGNO_REG_SET_P
9038 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9040 *nonzero &= rsp->last_set_nonzero_bits;
9041 return NULL;
9044 tem = get_last_value (x);
9046 if (tem)
9048 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
9049 /* If X is narrower than MODE and TEM is a non-negative
9050 constant that would appear negative in the mode of X,
9051 sign-extend it for use in reg_nonzero_bits because some
9052 machines (maybe most) will actually do the sign-extension
9053 and this is the conservative approach.
9055 ??? For 2.5, try to tighten up the MD files in this regard
9056 instead of this kludge. */
9058 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
9059 && CONST_INT_P (tem)
9060 && INTVAL (tem) > 0
9061 && 0 != (INTVAL (tem)
9062 & ((HOST_WIDE_INT) 1
9063 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9064 tem = GEN_INT (INTVAL (tem)
9065 | ((HOST_WIDE_INT) (-1)
9066 << GET_MODE_BITSIZE (GET_MODE (x))));
9067 #endif
9068 return tem;
9070 else if (nonzero_sign_valid && rsp->nonzero_bits)
9072 unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
9074 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
9075 /* We don't know anything about the upper bits. */
9076 mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
9077 *nonzero &= mask;
9080 return NULL;
9083 /* Return the number of bits at the high-order end of X that are known to
9084 be equal to the sign bit. X will be used in mode MODE; if MODE is
9085 VOIDmode, X will be used in its own mode. The returned value will always
9086 be between 1 and the number of bits in MODE. */
9088 static rtx
9089 reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
9090 const_rtx known_x ATTRIBUTE_UNUSED,
9091 enum machine_mode known_mode
9092 ATTRIBUTE_UNUSED,
9093 unsigned int known_ret ATTRIBUTE_UNUSED,
9094 unsigned int *result)
9096 rtx tem;
9097 reg_stat_type *rsp;
9099 rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9100 if (rsp->last_set_value != 0
9101 && rsp->last_set_mode == mode
9102 && ((rsp->last_set_label >= label_tick_ebb_start
9103 && rsp->last_set_label < label_tick)
9104 || (rsp->last_set_label == label_tick
9105 && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9106 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9107 && REG_N_SETS (REGNO (x)) == 1
9108 && !REGNO_REG_SET_P
9109 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9111 *result = rsp->last_set_sign_bit_copies;
9112 return NULL;
9115 tem = get_last_value (x);
9116 if (tem != 0)
9117 return tem;
9119 if (nonzero_sign_valid && rsp->sign_bit_copies != 0
9120 && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
9121 *result = rsp->sign_bit_copies;
9123 return NULL;
9126 /* Return the number of "extended" bits there are in X, when interpreted
9127 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
9128 unsigned quantities, this is the number of high-order zero bits.
9129 For signed quantities, this is the number of copies of the sign bit
9130 minus 1. In both case, this function returns the number of "spare"
9131 bits. For example, if two quantities for which this function returns
9132 at least 1 are added, the addition is known not to overflow.
9134 This function will always return 0 unless called during combine, which
9135 implies that it must be called from a define_split. */
9137 unsigned int
9138 extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
9140 if (nonzero_sign_valid == 0)
9141 return 0;
9143 return (unsignedp
9144 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9145 ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9146 - floor_log2 (nonzero_bits (x, mode)))
9147 : 0)
9148 : num_sign_bit_copies (x, mode) - 1);
9151 /* This function is called from `simplify_shift_const' to merge two
9152 outer operations. Specifically, we have already found that we need
9153 to perform operation *POP0 with constant *PCONST0 at the outermost
9154 position. We would now like to also perform OP1 with constant CONST1
9155 (with *POP0 being done last).
9157 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9158 the resulting operation. *PCOMP_P is set to 1 if we would need to
9159 complement the innermost operand, otherwise it is unchanged.
9161 MODE is the mode in which the operation will be done. No bits outside
9162 the width of this mode matter. It is assumed that the width of this mode
9163 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9165 If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
9166 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
9167 result is simply *PCONST0.
9169 If the resulting operation cannot be expressed as one operation, we
9170 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
9172 static int
9173 merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9175 enum rtx_code op0 = *pop0;
9176 HOST_WIDE_INT const0 = *pconst0;
9178 const0 &= GET_MODE_MASK (mode);
9179 const1 &= GET_MODE_MASK (mode);
9181 /* If OP0 is an AND, clear unimportant bits in CONST1. */
9182 if (op0 == AND)
9183 const1 &= const0;
9185 /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
9186 if OP0 is SET. */
9188 if (op1 == UNKNOWN || op0 == SET)
9189 return 1;
9191 else if (op0 == UNKNOWN)
9192 op0 = op1, const0 = const1;
9194 else if (op0 == op1)
9196 switch (op0)
9198 case AND:
9199 const0 &= const1;
9200 break;
9201 case IOR:
9202 const0 |= const1;
9203 break;
9204 case XOR:
9205 const0 ^= const1;
9206 break;
9207 case PLUS:
9208 const0 += const1;
9209 break;
9210 case NEG:
9211 op0 = UNKNOWN;
9212 break;
9213 default:
9214 break;
9218 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
9219 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9220 return 0;
9222 /* If the two constants aren't the same, we can't do anything. The
9223 remaining six cases can all be done. */
9224 else if (const0 != const1)
9225 return 0;
9227 else
9228 switch (op0)
9230 case IOR:
9231 if (op1 == AND)
9232 /* (a & b) | b == b */
9233 op0 = SET;
9234 else /* op1 == XOR */
9235 /* (a ^ b) | b == a | b */
9237 break;
9239 case XOR:
9240 if (op1 == AND)
9241 /* (a & b) ^ b == (~a) & b */
9242 op0 = AND, *pcomp_p = 1;
9243 else /* op1 == IOR */
9244 /* (a | b) ^ b == a & ~b */
9245 op0 = AND, const0 = ~const0;
9246 break;
9248 case AND:
9249 if (op1 == IOR)
9250 /* (a | b) & b == b */
9251 op0 = SET;
9252 else /* op1 == XOR */
9253 /* (a ^ b) & b) == (~a) & b */
9254 *pcomp_p = 1;
9255 break;
9256 default:
9257 break;
9260 /* Check for NO-OP cases. */
9261 const0 &= GET_MODE_MASK (mode);
9262 if (const0 == 0
9263 && (op0 == IOR || op0 == XOR || op0 == PLUS))
9264 op0 = UNKNOWN;
9265 else if (const0 == 0 && op0 == AND)
9266 op0 = SET;
9267 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9268 && op0 == AND)
9269 op0 = UNKNOWN;
9271 *pop0 = op0;
9273 /* ??? Slightly redundant with the above mask, but not entirely.
9274 Moving this above means we'd have to sign-extend the mode mask
9275 for the final test. */
9276 if (op0 != UNKNOWN && op0 != NEG)
9277 *pconst0 = trunc_int_for_mode (const0, mode);
9279 return 1;
9282 /* A helper to simplify_shift_const_1 to determine the mode we can perform
9283 the shift in. The original shift operation CODE is performed on OP in
9284 ORIG_MODE. Return the wider mode MODE if we can perform the operation
9285 in that mode. Return ORIG_MODE otherwise. We can also assume that the
9286 result of the shift is subject to operation OUTER_CODE with operand
9287 OUTER_CONST. */
9289 static enum machine_mode
9290 try_widen_shift_mode (enum rtx_code code, rtx op, int count,
9291 enum machine_mode orig_mode, enum machine_mode mode,
9292 enum rtx_code outer_code, HOST_WIDE_INT outer_const)
9294 if (orig_mode == mode)
9295 return mode;
9296 gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode));
9298 /* In general we can't perform in wider mode for right shift and rotate. */
9299 switch (code)
9301 case ASHIFTRT:
9302 /* We can still widen if the bits brought in from the left are identical
9303 to the sign bit of ORIG_MODE. */
9304 if (num_sign_bit_copies (op, mode)
9305 > (unsigned) (GET_MODE_BITSIZE (mode)
9306 - GET_MODE_BITSIZE (orig_mode)))
9307 return mode;
9308 return orig_mode;
9310 case LSHIFTRT:
9311 /* Similarly here but with zero bits. */
9312 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9313 && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
9314 return mode;
9316 /* We can also widen if the bits brought in will be masked off. This
9317 operation is performed in ORIG_MODE. */
9318 if (outer_code == AND)
9320 int care_bits = low_bitmask_len (orig_mode, outer_const);
9322 if (care_bits >= 0
9323 && GET_MODE_BITSIZE (orig_mode) - care_bits >= count)
9324 return mode;
9326 /* fall through */
9328 case ROTATE:
9329 return orig_mode;
9331 case ROTATERT:
9332 gcc_unreachable ();
9334 default:
9335 return mode;
9339 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
9340 The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot
9341 simplify it. Otherwise, return a simplified value.
9343 The shift is normally computed in the widest mode we find in VAROP, as
9344 long as it isn't a different number of words than RESULT_MODE. Exceptions
9345 are ASHIFTRT and ROTATE, which are always done in their original mode. */
9347 static rtx
9348 simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
9349 rtx varop, int orig_count)
9351 enum rtx_code orig_code = code;
9352 rtx orig_varop = varop;
9353 int count;
9354 enum machine_mode mode = result_mode;
9355 enum machine_mode shift_mode, tmode;
9356 unsigned int mode_words
9357 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9358 /* We form (outer_op (code varop count) (outer_const)). */
9359 enum rtx_code outer_op = UNKNOWN;
9360 HOST_WIDE_INT outer_const = 0;
9361 int complement_p = 0;
9362 rtx new_rtx, x;
9364 /* Make sure and truncate the "natural" shift on the way in. We don't
9365 want to do this inside the loop as it makes it more difficult to
9366 combine shifts. */
9367 if (SHIFT_COUNT_TRUNCATED)
9368 orig_count &= GET_MODE_BITSIZE (mode) - 1;
9370 /* If we were given an invalid count, don't do anything except exactly
9371 what was requested. */
9373 if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9374 return NULL_RTX;
9376 count = orig_count;
9378 /* Unless one of the branches of the `if' in this loop does a `continue',
9379 we will `break' the loop after the `if'. */
9381 while (count != 0)
9383 /* If we have an operand of (clobber (const_int 0)), fail. */
9384 if (GET_CODE (varop) == CLOBBER)
9385 return NULL_RTX;
9387 /* Convert ROTATERT to ROTATE. */
9388 if (code == ROTATERT)
9390 unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9391 code = ROTATE;
9392 if (VECTOR_MODE_P (result_mode))
9393 count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9394 else
9395 count = bitsize - count;
9398 shift_mode = try_widen_shift_mode (code, varop, count, result_mode,
9399 mode, outer_op, outer_const);
9401 /* Handle cases where the count is greater than the size of the mode
9402 minus 1. For ASHIFT, use the size minus one as the count (this can
9403 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
9404 take the count modulo the size. For other shifts, the result is
9405 zero.
9407 Since these shifts are being produced by the compiler by combining
9408 multiple operations, each of which are defined, we know what the
9409 result is supposed to be. */
9411 if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
9413 if (code == ASHIFTRT)
9414 count = GET_MODE_BITSIZE (shift_mode) - 1;
9415 else if (code == ROTATE || code == ROTATERT)
9416 count %= GET_MODE_BITSIZE (shift_mode);
9417 else
9419 /* We can't simply return zero because there may be an
9420 outer op. */
9421 varop = const0_rtx;
9422 count = 0;
9423 break;
9427 /* If we discovered we had to complement VAROP, leave. Making a NOT
9428 here would cause an infinite loop. */
9429 if (complement_p)
9430 break;
9432 /* An arithmetic right shift of a quantity known to be -1 or 0
9433 is a no-op. */
9434 if (code == ASHIFTRT
9435 && (num_sign_bit_copies (varop, shift_mode)
9436 == GET_MODE_BITSIZE (shift_mode)))
9438 count = 0;
9439 break;
9442 /* If we are doing an arithmetic right shift and discarding all but
9443 the sign bit copies, this is equivalent to doing a shift by the
9444 bitsize minus one. Convert it into that shift because it will often
9445 allow other simplifications. */
9447 if (code == ASHIFTRT
9448 && (count + num_sign_bit_copies (varop, shift_mode)
9449 >= GET_MODE_BITSIZE (shift_mode)))
9450 count = GET_MODE_BITSIZE (shift_mode) - 1;
9452 /* We simplify the tests below and elsewhere by converting
9453 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9454 `make_compound_operation' will convert it to an ASHIFTRT for
9455 those machines (such as VAX) that don't have an LSHIFTRT. */
9456 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9457 && code == ASHIFTRT
9458 && ((nonzero_bits (varop, shift_mode)
9459 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9460 == 0))
9461 code = LSHIFTRT;
9463 if (((code == LSHIFTRT
9464 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9465 && !(nonzero_bits (varop, shift_mode) >> count))
9466 || (code == ASHIFT
9467 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9468 && !((nonzero_bits (varop, shift_mode) << count)
9469 & GET_MODE_MASK (shift_mode))))
9470 && !side_effects_p (varop))
9471 varop = const0_rtx;
9473 switch (GET_CODE (varop))
9475 case SIGN_EXTEND:
9476 case ZERO_EXTEND:
9477 case SIGN_EXTRACT:
9478 case ZERO_EXTRACT:
9479 new_rtx = expand_compound_operation (varop);
9480 if (new_rtx != varop)
9482 varop = new_rtx;
9483 continue;
9485 break;
9487 case MEM:
9488 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9489 minus the width of a smaller mode, we can do this with a
9490 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9491 if ((code == ASHIFTRT || code == LSHIFTRT)
9492 && ! mode_dependent_address_p (XEXP (varop, 0))
9493 && ! MEM_VOLATILE_P (varop)
9494 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9495 MODE_INT, 1)) != BLKmode)
9497 new_rtx = adjust_address_nv (varop, tmode,
9498 BYTES_BIG_ENDIAN ? 0
9499 : count / BITS_PER_UNIT);
9501 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9502 : ZERO_EXTEND, mode, new_rtx);
9503 count = 0;
9504 continue;
9506 break;
9508 case SUBREG:
9509 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9510 the same number of words as what we've seen so far. Then store
9511 the widest mode in MODE. */
9512 if (subreg_lowpart_p (varop)
9513 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9514 > GET_MODE_SIZE (GET_MODE (varop)))
9515 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9516 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9517 == mode_words)
9519 varop = SUBREG_REG (varop);
9520 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9521 mode = GET_MODE (varop);
9522 continue;
9524 break;
9526 case MULT:
9527 /* Some machines use MULT instead of ASHIFT because MULT
9528 is cheaper. But it is still better on those machines to
9529 merge two shifts into one. */
9530 if (CONST_INT_P (XEXP (varop, 1))
9531 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9533 varop
9534 = simplify_gen_binary (ASHIFT, GET_MODE (varop),
9535 XEXP (varop, 0),
9536 GEN_INT (exact_log2 (
9537 INTVAL (XEXP (varop, 1)))));
9538 continue;
9540 break;
9542 case UDIV:
9543 /* Similar, for when divides are cheaper. */
9544 if (CONST_INT_P (XEXP (varop, 1))
9545 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9547 varop
9548 = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
9549 XEXP (varop, 0),
9550 GEN_INT (exact_log2 (
9551 INTVAL (XEXP (varop, 1)))));
9552 continue;
9554 break;
9556 case ASHIFTRT:
9557 /* If we are extracting just the sign bit of an arithmetic
9558 right shift, that shift is not needed. However, the sign
9559 bit of a wider mode may be different from what would be
9560 interpreted as the sign bit in a narrower mode, so, if
9561 the result is narrower, don't discard the shift. */
9562 if (code == LSHIFTRT
9563 && count == (GET_MODE_BITSIZE (result_mode) - 1)
9564 && (GET_MODE_BITSIZE (result_mode)
9565 >= GET_MODE_BITSIZE (GET_MODE (varop))))
9567 varop = XEXP (varop, 0);
9568 continue;
9571 /* ... fall through ... */
9573 case LSHIFTRT:
9574 case ASHIFT:
9575 case ROTATE:
9576 /* Here we have two nested shifts. The result is usually the
9577 AND of a new shift with a mask. We compute the result below. */
9578 if (CONST_INT_P (XEXP (varop, 1))
9579 && INTVAL (XEXP (varop, 1)) >= 0
9580 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9581 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9582 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9583 && !VECTOR_MODE_P (result_mode))
9585 enum rtx_code first_code = GET_CODE (varop);
9586 unsigned int first_count = INTVAL (XEXP (varop, 1));
9587 unsigned HOST_WIDE_INT mask;
9588 rtx mask_rtx;
9590 /* We have one common special case. We can't do any merging if
9591 the inner code is an ASHIFTRT of a smaller mode. However, if
9592 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9593 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9594 we can convert it to
9595 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9596 This simplifies certain SIGN_EXTEND operations. */
9597 if (code == ASHIFT && first_code == ASHIFTRT
9598 && count == (GET_MODE_BITSIZE (result_mode)
9599 - GET_MODE_BITSIZE (GET_MODE (varop))))
9601 /* C3 has the low-order C1 bits zero. */
9603 mask = (GET_MODE_MASK (mode)
9604 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9606 varop = simplify_and_const_int (NULL_RTX, result_mode,
9607 XEXP (varop, 0), mask);
9608 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9609 varop, count);
9610 count = first_count;
9611 code = ASHIFTRT;
9612 continue;
9615 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9616 than C1 high-order bits equal to the sign bit, we can convert
9617 this to either an ASHIFT or an ASHIFTRT depending on the
9618 two counts.
9620 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9622 if (code == ASHIFTRT && first_code == ASHIFT
9623 && GET_MODE (varop) == shift_mode
9624 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9625 > first_count))
9627 varop = XEXP (varop, 0);
9628 count -= first_count;
9629 if (count < 0)
9631 count = -count;
9632 code = ASHIFT;
9635 continue;
9638 /* There are some cases we can't do. If CODE is ASHIFTRT,
9639 we can only do this if FIRST_CODE is also ASHIFTRT.
9641 We can't do the case when CODE is ROTATE and FIRST_CODE is
9642 ASHIFTRT.
9644 If the mode of this shift is not the mode of the outer shift,
9645 we can't do this if either shift is a right shift or ROTATE.
9647 Finally, we can't do any of these if the mode is too wide
9648 unless the codes are the same.
9650 Handle the case where the shift codes are the same
9651 first. */
9653 if (code == first_code)
9655 if (GET_MODE (varop) != result_mode
9656 && (code == ASHIFTRT || code == LSHIFTRT
9657 || code == ROTATE))
9658 break;
9660 count += first_count;
9661 varop = XEXP (varop, 0);
9662 continue;
9665 if (code == ASHIFTRT
9666 || (code == ROTATE && first_code == ASHIFTRT)
9667 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9668 || (GET_MODE (varop) != result_mode
9669 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9670 || first_code == ROTATE
9671 || code == ROTATE)))
9672 break;
9674 /* To compute the mask to apply after the shift, shift the
9675 nonzero bits of the inner shift the same way the
9676 outer shift will. */
9678 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9680 mask_rtx
9681 = simplify_const_binary_operation (code, result_mode, mask_rtx,
9682 GEN_INT (count));
9684 /* Give up if we can't compute an outer operation to use. */
9685 if (mask_rtx == 0
9686 || !CONST_INT_P (mask_rtx)
9687 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9688 INTVAL (mask_rtx),
9689 result_mode, &complement_p))
9690 break;
9692 /* If the shifts are in the same direction, we add the
9693 counts. Otherwise, we subtract them. */
9694 if ((code == ASHIFTRT || code == LSHIFTRT)
9695 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9696 count += first_count;
9697 else
9698 count -= first_count;
9700 /* If COUNT is positive, the new shift is usually CODE,
9701 except for the two exceptions below, in which case it is
9702 FIRST_CODE. If the count is negative, FIRST_CODE should
9703 always be used */
9704 if (count > 0
9705 && ((first_code == ROTATE && code == ASHIFT)
9706 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9707 code = first_code;
9708 else if (count < 0)
9709 code = first_code, count = -count;
9711 varop = XEXP (varop, 0);
9712 continue;
9715 /* If we have (A << B << C) for any shift, we can convert this to
9716 (A << C << B). This wins if A is a constant. Only try this if
9717 B is not a constant. */
9719 else if (GET_CODE (varop) == code
9720 && CONST_INT_P (XEXP (varop, 0))
9721 && !CONST_INT_P (XEXP (varop, 1)))
9723 rtx new_rtx = simplify_const_binary_operation (code, mode,
9724 XEXP (varop, 0),
9725 GEN_INT (count));
9726 varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
9727 count = 0;
9728 continue;
9730 break;
9732 case NOT:
9733 if (VECTOR_MODE_P (mode))
9734 break;
9736 /* Make this fit the case below. */
9737 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9738 GEN_INT (GET_MODE_MASK (mode)));
9739 continue;
9741 case IOR:
9742 case AND:
9743 case XOR:
9744 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9745 with C the size of VAROP - 1 and the shift is logical if
9746 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9747 we have an (le X 0) operation. If we have an arithmetic shift
9748 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9749 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9751 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9752 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9753 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9754 && (code == LSHIFTRT || code == ASHIFTRT)
9755 && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9756 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9758 count = 0;
9759 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9760 const0_rtx);
9762 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9763 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9765 continue;
9768 /* If we have (shift (logical)), move the logical to the outside
9769 to allow it to possibly combine with another logical and the
9770 shift to combine with another shift. This also canonicalizes to
9771 what a ZERO_EXTRACT looks like. Also, some machines have
9772 (and (shift)) insns. */
9774 if (CONST_INT_P (XEXP (varop, 1))
9775 /* We can't do this if we have (ashiftrt (xor)) and the
9776 constant has its sign bit set in shift_mode. */
9777 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9778 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9779 shift_mode))
9780 && (new_rtx = simplify_const_binary_operation (code, result_mode,
9781 XEXP (varop, 1),
9782 GEN_INT (count))) != 0
9783 && CONST_INT_P (new_rtx)
9784 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9785 INTVAL (new_rtx), result_mode, &complement_p))
9787 varop = XEXP (varop, 0);
9788 continue;
9791 /* If we can't do that, try to simplify the shift in each arm of the
9792 logical expression, make a new logical expression, and apply
9793 the inverse distributive law. This also can't be done
9794 for some (ashiftrt (xor)). */
9795 if (CONST_INT_P (XEXP (varop, 1))
9796 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9797 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9798 shift_mode)))
9800 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9801 XEXP (varop, 0), count);
9802 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9803 XEXP (varop, 1), count);
9805 varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9806 lhs, rhs);
9807 varop = apply_distributive_law (varop);
9809 count = 0;
9810 continue;
9812 break;
9814 case EQ:
9815 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9816 says that the sign bit can be tested, FOO has mode MODE, C is
9817 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9818 that may be nonzero. */
9819 if (code == LSHIFTRT
9820 && XEXP (varop, 1) == const0_rtx
9821 && GET_MODE (XEXP (varop, 0)) == result_mode
9822 && count == (GET_MODE_BITSIZE (result_mode) - 1)
9823 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9824 && STORE_FLAG_VALUE == -1
9825 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9826 && merge_outer_ops (&outer_op, &outer_const, XOR,
9827 (HOST_WIDE_INT) 1, result_mode,
9828 &complement_p))
9830 varop = XEXP (varop, 0);
9831 count = 0;
9832 continue;
9834 break;
9836 case NEG:
9837 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9838 than the number of bits in the mode is equivalent to A. */
9839 if (code == LSHIFTRT
9840 && count == (GET_MODE_BITSIZE (result_mode) - 1)
9841 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9843 varop = XEXP (varop, 0);
9844 count = 0;
9845 continue;
9848 /* NEG commutes with ASHIFT since it is multiplication. Move the
9849 NEG outside to allow shifts to combine. */
9850 if (code == ASHIFT
9851 && merge_outer_ops (&outer_op, &outer_const, NEG,
9852 (HOST_WIDE_INT) 0, result_mode,
9853 &complement_p))
9855 varop = XEXP (varop, 0);
9856 continue;
9858 break;
9860 case PLUS:
9861 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9862 is one less than the number of bits in the mode is
9863 equivalent to (xor A 1). */
9864 if (code == LSHIFTRT
9865 && count == (GET_MODE_BITSIZE (result_mode) - 1)
9866 && XEXP (varop, 1) == constm1_rtx
9867 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9868 && merge_outer_ops (&outer_op, &outer_const, XOR,
9869 (HOST_WIDE_INT) 1, result_mode,
9870 &complement_p))
9872 count = 0;
9873 varop = XEXP (varop, 0);
9874 continue;
9877 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9878 that might be nonzero in BAR are those being shifted out and those
9879 bits are known zero in FOO, we can replace the PLUS with FOO.
9880 Similarly in the other operand order. This code occurs when
9881 we are computing the size of a variable-size array. */
9883 if ((code == ASHIFTRT || code == LSHIFTRT)
9884 && count < HOST_BITS_PER_WIDE_INT
9885 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9886 && (nonzero_bits (XEXP (varop, 1), result_mode)
9887 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9889 varop = XEXP (varop, 0);
9890 continue;
9892 else if ((code == ASHIFTRT || code == LSHIFTRT)
9893 && count < HOST_BITS_PER_WIDE_INT
9894 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9895 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9896 >> count)
9897 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9898 & nonzero_bits (XEXP (varop, 1),
9899 result_mode)))
9901 varop = XEXP (varop, 1);
9902 continue;
9905 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9906 if (code == ASHIFT
9907 && CONST_INT_P (XEXP (varop, 1))
9908 && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
9909 XEXP (varop, 1),
9910 GEN_INT (count))) != 0
9911 && CONST_INT_P (new_rtx)
9912 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9913 INTVAL (new_rtx), result_mode, &complement_p))
9915 varop = XEXP (varop, 0);
9916 continue;
9919 /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9920 signbit', and attempt to change the PLUS to an XOR and move it to
9921 the outer operation as is done above in the AND/IOR/XOR case
9922 leg for shift(logical). See details in logical handling above
9923 for reasoning in doing so. */
9924 if (code == LSHIFTRT
9925 && CONST_INT_P (XEXP (varop, 1))
9926 && mode_signbit_p (result_mode, XEXP (varop, 1))
9927 && (new_rtx = simplify_const_binary_operation (code, result_mode,
9928 XEXP (varop, 1),
9929 GEN_INT (count))) != 0
9930 && CONST_INT_P (new_rtx)
9931 && merge_outer_ops (&outer_op, &outer_const, XOR,
9932 INTVAL (new_rtx), result_mode, &complement_p))
9934 varop = XEXP (varop, 0);
9935 continue;
9938 break;
9940 case MINUS:
9941 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9942 with C the size of VAROP - 1 and the shift is logical if
9943 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9944 we have a (gt X 0) operation. If the shift is arithmetic with
9945 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9946 we have a (neg (gt X 0)) operation. */
9948 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9949 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9950 && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9951 && (code == LSHIFTRT || code == ASHIFTRT)
9952 && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
9953 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9954 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9956 count = 0;
9957 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9958 const0_rtx);
9960 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9961 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9963 continue;
9965 break;
9967 case TRUNCATE:
9968 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9969 if the truncate does not affect the value. */
9970 if (code == LSHIFTRT
9971 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9972 && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
9973 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9974 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9975 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9977 rtx varop_inner = XEXP (varop, 0);
9979 varop_inner
9980 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9981 XEXP (varop_inner, 0),
9982 GEN_INT
9983 (count + INTVAL (XEXP (varop_inner, 1))));
9984 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9985 count = 0;
9986 continue;
9988 break;
9990 default:
9991 break;
9994 break;
9997 shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode,
9998 outer_op, outer_const);
10000 /* We have now finished analyzing the shift. The result should be
10001 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
10002 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
10003 to the result of the shift. OUTER_CONST is the relevant constant,
10004 but we must turn off all bits turned off in the shift. */
10006 if (outer_op == UNKNOWN
10007 && orig_code == code && orig_count == count
10008 && varop == orig_varop
10009 && shift_mode == GET_MODE (varop))
10010 return NULL_RTX;
10012 /* Make a SUBREG if necessary. If we can't make it, fail. */
10013 varop = gen_lowpart (shift_mode, varop);
10014 if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10015 return NULL_RTX;
10017 /* If we have an outer operation and we just made a shift, it is
10018 possible that we could have simplified the shift were it not
10019 for the outer operation. So try to do the simplification
10020 recursively. */
10022 if (outer_op != UNKNOWN)
10023 x = simplify_shift_const_1 (code, shift_mode, varop, count);
10024 else
10025 x = NULL_RTX;
10027 if (x == NULL_RTX)
10028 x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
10030 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
10031 turn off all the bits that the shift would have turned off. */
10032 if (orig_code == LSHIFTRT && result_mode != shift_mode)
10033 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
10034 GET_MODE_MASK (result_mode) >> orig_count);
10036 /* Do the remainder of the processing in RESULT_MODE. */
10037 x = gen_lowpart_or_truncate (result_mode, x);
10039 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
10040 operation. */
10041 if (complement_p)
10042 x = simplify_gen_unary (NOT, result_mode, x, result_mode);
10044 if (outer_op != UNKNOWN)
10046 if (GET_RTX_CLASS (outer_op) != RTX_UNARY
10047 && GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
10048 outer_const = trunc_int_for_mode (outer_const, result_mode);
10050 if (outer_op == AND)
10051 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
10052 else if (outer_op == SET)
10054 /* This means that we have determined that the result is
10055 equivalent to a constant. This should be rare. */
10056 if (!side_effects_p (x))
10057 x = GEN_INT (outer_const);
10059 else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
10060 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10061 else
10062 x = simplify_gen_binary (outer_op, result_mode, x,
10063 GEN_INT (outer_const));
10066 return x;
10069 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
10070 The result of the shift is RESULT_MODE. If we cannot simplify it,
10071 return X or, if it is NULL, synthesize the expression with
10072 simplify_gen_binary. Otherwise, return a simplified value.
10074 The shift is normally computed in the widest mode we find in VAROP, as
10075 long as it isn't a different number of words than RESULT_MODE. Exceptions
10076 are ASHIFTRT and ROTATE, which are always done in their original mode. */
10078 static rtx
10079 simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
10080 rtx varop, int count)
10082 rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
10083 if (tem)
10084 return tem;
10086 if (!x)
10087 x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
10088 if (GET_MODE (x) != result_mode)
10089 x = gen_lowpart (result_mode, x);
10090 return x;
10094 /* Like recog, but we receive the address of a pointer to a new pattern.
10095 We try to match the rtx that the pointer points to.
10096 If that fails, we may try to modify or replace the pattern,
10097 storing the replacement into the same pointer object.
10099 Modifications include deletion or addition of CLOBBERs.
10101 PNOTES is a pointer to a location where any REG_UNUSED notes added for
10102 the CLOBBERs are placed.
10104 The value is the final insn code from the pattern ultimately matched,
10105 or -1. */
10107 static int
10108 recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
10110 rtx pat = *pnewpat;
10111 int insn_code_number;
10112 int num_clobbers_to_add = 0;
10113 int i;
10114 rtx notes = 0;
10115 rtx old_notes, old_pat;
10117 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10118 we use to indicate that something didn't match. If we find such a
10119 thing, force rejection. */
10120 if (GET_CODE (pat) == PARALLEL)
10121 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10122 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10123 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10124 return -1;
10126 old_pat = PATTERN (insn);
10127 old_notes = REG_NOTES (insn);
10128 PATTERN (insn) = pat;
10129 REG_NOTES (insn) = 0;
10131 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10132 if (dump_file && (dump_flags & TDF_DETAILS))
10134 if (insn_code_number < 0)
10135 fputs ("Failed to match this instruction:\n", dump_file);
10136 else
10137 fputs ("Successfully matched this instruction:\n", dump_file);
10138 print_rtl_single (dump_file, pat);
10141 /* If it isn't, there is the possibility that we previously had an insn
10142 that clobbered some register as a side effect, but the combined
10143 insn doesn't need to do that. So try once more without the clobbers
10144 unless this represents an ASM insn. */
10146 if (insn_code_number < 0 && ! check_asm_operands (pat)
10147 && GET_CODE (pat) == PARALLEL)
10149 int pos;
10151 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10152 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10154 if (i != pos)
10155 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10156 pos++;
10159 SUBST_INT (XVECLEN (pat, 0), pos);
10161 if (pos == 1)
10162 pat = XVECEXP (pat, 0, 0);
10164 PATTERN (insn) = pat;
10165 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10166 if (dump_file && (dump_flags & TDF_DETAILS))
10168 if (insn_code_number < 0)
10169 fputs ("Failed to match this instruction:\n", dump_file);
10170 else
10171 fputs ("Successfully matched this instruction:\n", dump_file);
10172 print_rtl_single (dump_file, pat);
10175 PATTERN (insn) = old_pat;
10176 REG_NOTES (insn) = old_notes;
10178 /* Recognize all noop sets, these will be killed by followup pass. */
10179 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10180 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10182 /* If we had any clobbers to add, make a new pattern than contains
10183 them. Then check to make sure that all of them are dead. */
10184 if (num_clobbers_to_add)
10186 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10187 rtvec_alloc (GET_CODE (pat) == PARALLEL
10188 ? (XVECLEN (pat, 0)
10189 + num_clobbers_to_add)
10190 : num_clobbers_to_add + 1));
10192 if (GET_CODE (pat) == PARALLEL)
10193 for (i = 0; i < XVECLEN (pat, 0); i++)
10194 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10195 else
10196 XVECEXP (newpat, 0, 0) = pat;
10198 add_clobbers (newpat, insn_code_number);
10200 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10201 i < XVECLEN (newpat, 0); i++)
10203 if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
10204 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10205 return -1;
10206 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
10208 gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
10209 notes = alloc_reg_note (REG_UNUSED,
10210 XEXP (XVECEXP (newpat, 0, i), 0), notes);
10213 pat = newpat;
10216 *pnewpat = pat;
10217 *pnotes = notes;
10219 return insn_code_number;
10222 /* Like gen_lowpart_general but for use by combine. In combine it
10223 is not possible to create any new pseudoregs. However, it is
10224 safe to create invalid memory addresses, because combine will
10225 try to recognize them and all they will do is make the combine
10226 attempt fail.
10228 If for some reason this cannot do its job, an rtx
10229 (clobber (const_int 0)) is returned.
10230 An insn containing that will not be recognized. */
10232 static rtx
10233 gen_lowpart_for_combine (enum machine_mode omode, rtx x)
10235 enum machine_mode imode = GET_MODE (x);
10236 unsigned int osize = GET_MODE_SIZE (omode);
10237 unsigned int isize = GET_MODE_SIZE (imode);
10238 rtx result;
10240 if (omode == imode)
10241 return x;
10243 /* Return identity if this is a CONST or symbolic reference. */
10244 if (omode == Pmode
10245 && (GET_CODE (x) == CONST
10246 || GET_CODE (x) == SYMBOL_REF
10247 || GET_CODE (x) == LABEL_REF))
10248 return x;
10250 /* We can only support MODE being wider than a word if X is a
10251 constant integer or has a mode the same size. */
10252 if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
10253 && ! ((imode == VOIDmode
10254 && (CONST_INT_P (x)
10255 || GET_CODE (x) == CONST_DOUBLE))
10256 || isize == osize))
10257 goto fail;
10259 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
10260 won't know what to do. So we will strip off the SUBREG here and
10261 process normally. */
10262 if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
10264 x = SUBREG_REG (x);
10266 /* For use in case we fall down into the address adjustments
10267 further below, we need to adjust the known mode and size of
10268 x; imode and isize, since we just adjusted x. */
10269 imode = GET_MODE (x);
10271 if (imode == omode)
10272 return x;
10274 isize = GET_MODE_SIZE (imode);
10277 result = gen_lowpart_common (omode, x);
10279 if (result)
10280 return result;
10282 if (MEM_P (x))
10284 int offset = 0;
10286 /* Refuse to work on a volatile memory ref or one with a mode-dependent
10287 address. */
10288 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10289 goto fail;
10291 /* If we want to refer to something bigger than the original memref,
10292 generate a paradoxical subreg instead. That will force a reload
10293 of the original memref X. */
10294 if (isize < osize)
10295 return gen_rtx_SUBREG (omode, x, 0);
10297 if (WORDS_BIG_ENDIAN)
10298 offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
10300 /* Adjust the address so that the address-after-the-data is
10301 unchanged. */
10302 if (BYTES_BIG_ENDIAN)
10303 offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
10305 return adjust_address_nv (x, omode, offset);
10308 /* If X is a comparison operator, rewrite it in a new mode. This
10309 probably won't match, but may allow further simplifications. */
10310 else if (COMPARISON_P (x))
10311 return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
10313 /* If we couldn't simplify X any other way, just enclose it in a
10314 SUBREG. Normally, this SUBREG won't match, but some patterns may
10315 include an explicit SUBREG or we may simplify it further in combine. */
10316 else
10318 int offset = 0;
10319 rtx res;
10321 offset = subreg_lowpart_offset (omode, imode);
10322 if (imode == VOIDmode)
10324 imode = int_mode_for_mode (omode);
10325 x = gen_lowpart_common (imode, x);
10326 if (x == NULL)
10327 goto fail;
10329 res = simplify_gen_subreg (omode, x, imode, offset);
10330 if (res)
10331 return res;
10334 fail:
10335 return gen_rtx_CLOBBER (omode, const0_rtx);
10338 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
10339 comparison code that will be tested.
10341 The result is a possibly different comparison code to use. *POP0 and
10342 *POP1 may be updated.
10344 It is possible that we might detect that a comparison is either always
10345 true or always false. However, we do not perform general constant
10346 folding in combine, so this knowledge isn't useful. Such tautologies
10347 should have been detected earlier. Hence we ignore all such cases. */
10349 static enum rtx_code
10350 simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10352 rtx op0 = *pop0;
10353 rtx op1 = *pop1;
10354 rtx tem, tem1;
10355 int i;
10356 enum machine_mode mode, tmode;
10358 /* Try a few ways of applying the same transformation to both operands. */
10359 while (1)
10361 #ifndef WORD_REGISTER_OPERATIONS
10362 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10363 so check specially. */
10364 if (code != GTU && code != GEU && code != LTU && code != LEU
10365 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10366 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10367 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10368 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10369 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10370 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10371 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10372 && CONST_INT_P (XEXP (op0, 1))
10373 && XEXP (op0, 1) == XEXP (op1, 1)
10374 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10375 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10376 && (INTVAL (XEXP (op0, 1))
10377 == (GET_MODE_BITSIZE (GET_MODE (op0))
10378 - (GET_MODE_BITSIZE
10379 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10381 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10382 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10384 #endif
10386 /* If both operands are the same constant shift, see if we can ignore the
10387 shift. We can if the shift is a rotate or if the bits shifted out of
10388 this shift are known to be zero for both inputs and if the type of
10389 comparison is compatible with the shift. */
10390 if (GET_CODE (op0) == GET_CODE (op1)
10391 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10392 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10393 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10394 && (code != GT && code != LT && code != GE && code != LE))
10395 || (GET_CODE (op0) == ASHIFTRT
10396 && (code != GTU && code != LTU
10397 && code != GEU && code != LEU)))
10398 && CONST_INT_P (XEXP (op0, 1))
10399 && INTVAL (XEXP (op0, 1)) >= 0
10400 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10401 && XEXP (op0, 1) == XEXP (op1, 1))
10403 enum machine_mode mode = GET_MODE (op0);
10404 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10405 int shift_count = INTVAL (XEXP (op0, 1));
10407 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10408 mask &= (mask >> shift_count) << shift_count;
10409 else if (GET_CODE (op0) == ASHIFT)
10410 mask = (mask & (mask << shift_count)) >> shift_count;
10412 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10413 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10414 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10415 else
10416 break;
10419 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10420 SUBREGs are of the same mode, and, in both cases, the AND would
10421 be redundant if the comparison was done in the narrower mode,
10422 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10423 and the operand's possibly nonzero bits are 0xffffff01; in that case
10424 if we only care about QImode, we don't need the AND). This case
10425 occurs if the output mode of an scc insn is not SImode and
10426 STORE_FLAG_VALUE == 1 (e.g., the 386).
10428 Similarly, check for a case where the AND's are ZERO_EXTEND
10429 operations from some narrower mode even though a SUBREG is not
10430 present. */
10432 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10433 && CONST_INT_P (XEXP (op0, 1))
10434 && CONST_INT_P (XEXP (op1, 1)))
10436 rtx inner_op0 = XEXP (op0, 0);
10437 rtx inner_op1 = XEXP (op1, 0);
10438 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10439 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10440 int changed = 0;
10442 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10443 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10444 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10445 && (GET_MODE (SUBREG_REG (inner_op0))
10446 == GET_MODE (SUBREG_REG (inner_op1)))
10447 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10448 <= HOST_BITS_PER_WIDE_INT)
10449 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10450 GET_MODE (SUBREG_REG (inner_op0)))))
10451 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10452 GET_MODE (SUBREG_REG (inner_op1))))))
10454 op0 = SUBREG_REG (inner_op0);
10455 op1 = SUBREG_REG (inner_op1);
10457 /* The resulting comparison is always unsigned since we masked
10458 off the original sign bit. */
10459 code = unsigned_condition (code);
10461 changed = 1;
10464 else if (c0 == c1)
10465 for (tmode = GET_CLASS_NARROWEST_MODE
10466 (GET_MODE_CLASS (GET_MODE (op0)));
10467 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10468 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10470 op0 = gen_lowpart (tmode, inner_op0);
10471 op1 = gen_lowpart (tmode, inner_op1);
10472 code = unsigned_condition (code);
10473 changed = 1;
10474 break;
10477 if (! changed)
10478 break;
10481 /* If both operands are NOT, we can strip off the outer operation
10482 and adjust the comparison code for swapped operands; similarly for
10483 NEG, except that this must be an equality comparison. */
10484 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10485 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10486 && (code == EQ || code == NE)))
10487 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10489 else
10490 break;
10493 /* If the first operand is a constant, swap the operands and adjust the
10494 comparison code appropriately, but don't do this if the second operand
10495 is already a constant integer. */
10496 if (swap_commutative_operands_p (op0, op1))
10498 tem = op0, op0 = op1, op1 = tem;
10499 code = swap_condition (code);
10502 /* We now enter a loop during which we will try to simplify the comparison.
10503 For the most part, we only are concerned with comparisons with zero,
10504 but some things may really be comparisons with zero but not start
10505 out looking that way. */
10507 while (CONST_INT_P (op1))
10509 enum machine_mode mode = GET_MODE (op0);
10510 unsigned int mode_width = GET_MODE_BITSIZE (mode);
10511 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10512 int equality_comparison_p;
10513 int sign_bit_comparison_p;
10514 int unsigned_comparison_p;
10515 HOST_WIDE_INT const_op;
10517 /* We only want to handle integral modes. This catches VOIDmode,
10518 CCmode, and the floating-point modes. An exception is that we
10519 can handle VOIDmode if OP0 is a COMPARE or a comparison
10520 operation. */
10522 if (GET_MODE_CLASS (mode) != MODE_INT
10523 && ! (mode == VOIDmode
10524 && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
10525 break;
10527 /* Get the constant we are comparing against and turn off all bits
10528 not on in our mode. */
10529 const_op = INTVAL (op1);
10530 if (mode != VOIDmode)
10531 const_op = trunc_int_for_mode (const_op, mode);
10532 op1 = GEN_INT (const_op);
10534 /* If we are comparing against a constant power of two and the value
10535 being compared can only have that single bit nonzero (e.g., it was
10536 `and'ed with that bit), we can replace this with a comparison
10537 with zero. */
10538 if (const_op
10539 && (code == EQ || code == NE || code == GE || code == GEU
10540 || code == LT || code == LTU)
10541 && mode_width <= HOST_BITS_PER_WIDE_INT
10542 && exact_log2 (const_op) >= 0
10543 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10545 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10546 op1 = const0_rtx, const_op = 0;
10549 /* Similarly, if we are comparing a value known to be either -1 or
10550 0 with -1, change it to the opposite comparison against zero. */
10552 if (const_op == -1
10553 && (code == EQ || code == NE || code == GT || code == LE
10554 || code == GEU || code == LTU)
10555 && num_sign_bit_copies (op0, mode) == mode_width)
10557 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10558 op1 = const0_rtx, const_op = 0;
10561 /* Do some canonicalizations based on the comparison code. We prefer
10562 comparisons against zero and then prefer equality comparisons.
10563 If we can reduce the size of a constant, we will do that too. */
10565 switch (code)
10567 case LT:
10568 /* < C is equivalent to <= (C - 1) */
10569 if (const_op > 0)
10571 const_op -= 1;
10572 op1 = GEN_INT (const_op);
10573 code = LE;
10574 /* ... fall through to LE case below. */
10576 else
10577 break;
10579 case LE:
10580 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10581 if (const_op < 0)
10583 const_op += 1;
10584 op1 = GEN_INT (const_op);
10585 code = LT;
10588 /* If we are doing a <= 0 comparison on a value known to have
10589 a zero sign bit, we can replace this with == 0. */
10590 else if (const_op == 0
10591 && mode_width <= HOST_BITS_PER_WIDE_INT
10592 && (nonzero_bits (op0, mode)
10593 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10594 code = EQ;
10595 break;
10597 case GE:
10598 /* >= C is equivalent to > (C - 1). */
10599 if (const_op > 0)
10601 const_op -= 1;
10602 op1 = GEN_INT (const_op);
10603 code = GT;
10604 /* ... fall through to GT below. */
10606 else
10607 break;
10609 case GT:
10610 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
10611 if (const_op < 0)
10613 const_op += 1;
10614 op1 = GEN_INT (const_op);
10615 code = GE;
10618 /* If we are doing a > 0 comparison on a value known to have
10619 a zero sign bit, we can replace this with != 0. */
10620 else if (const_op == 0
10621 && mode_width <= HOST_BITS_PER_WIDE_INT
10622 && (nonzero_bits (op0, mode)
10623 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10624 code = NE;
10625 break;
10627 case LTU:
10628 /* < C is equivalent to <= (C - 1). */
10629 if (const_op > 0)
10631 const_op -= 1;
10632 op1 = GEN_INT (const_op);
10633 code = LEU;
10634 /* ... fall through ... */
10637 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10638 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10639 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10641 const_op = 0, op1 = const0_rtx;
10642 code = GE;
10643 break;
10645 else
10646 break;
10648 case LEU:
10649 /* unsigned <= 0 is equivalent to == 0 */
10650 if (const_op == 0)
10651 code = EQ;
10653 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10654 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10655 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10657 const_op = 0, op1 = const0_rtx;
10658 code = GE;
10660 break;
10662 case GEU:
10663 /* >= C is equivalent to > (C - 1). */
10664 if (const_op > 1)
10666 const_op -= 1;
10667 op1 = GEN_INT (const_op);
10668 code = GTU;
10669 /* ... fall through ... */
10672 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10673 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10674 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10676 const_op = 0, op1 = const0_rtx;
10677 code = LT;
10678 break;
10680 else
10681 break;
10683 case GTU:
10684 /* unsigned > 0 is equivalent to != 0 */
10685 if (const_op == 0)
10686 code = NE;
10688 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10689 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10690 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10692 const_op = 0, op1 = const0_rtx;
10693 code = LT;
10695 break;
10697 default:
10698 break;
10701 /* Compute some predicates to simplify code below. */
10703 equality_comparison_p = (code == EQ || code == NE);
10704 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10705 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10706 || code == GEU);
10708 /* If this is a sign bit comparison and we can do arithmetic in
10709 MODE, say that we will only be needing the sign bit of OP0. */
10710 if (sign_bit_comparison_p
10711 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10712 op0 = force_to_mode (op0, mode,
10713 ((HOST_WIDE_INT) 1
10714 << (GET_MODE_BITSIZE (mode) - 1)),
10717 /* Now try cases based on the opcode of OP0. If none of the cases
10718 does a "continue", we exit this loop immediately after the
10719 switch. */
10721 switch (GET_CODE (op0))
10723 case ZERO_EXTRACT:
10724 /* If we are extracting a single bit from a variable position in
10725 a constant that has only a single bit set and are comparing it
10726 with zero, we can convert this into an equality comparison
10727 between the position and the location of the single bit. */
10728 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10729 have already reduced the shift count modulo the word size. */
10730 if (!SHIFT_COUNT_TRUNCATED
10731 && CONST_INT_P (XEXP (op0, 0))
10732 && XEXP (op0, 1) == const1_rtx
10733 && equality_comparison_p && const_op == 0
10734 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10736 if (BITS_BIG_ENDIAN)
10738 enum machine_mode new_mode
10739 = mode_for_extraction (EP_extzv, 1);
10740 if (new_mode == MAX_MACHINE_MODE)
10741 i = BITS_PER_WORD - 1 - i;
10742 else
10744 mode = new_mode;
10745 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10749 op0 = XEXP (op0, 2);
10750 op1 = GEN_INT (i);
10751 const_op = i;
10753 /* Result is nonzero iff shift count is equal to I. */
10754 code = reverse_condition (code);
10755 continue;
10758 /* ... fall through ... */
10760 case SIGN_EXTRACT:
10761 tem = expand_compound_operation (op0);
10762 if (tem != op0)
10764 op0 = tem;
10765 continue;
10767 break;
10769 case NOT:
10770 /* If testing for equality, we can take the NOT of the constant. */
10771 if (equality_comparison_p
10772 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10774 op0 = XEXP (op0, 0);
10775 op1 = tem;
10776 continue;
10779 /* If just looking at the sign bit, reverse the sense of the
10780 comparison. */
10781 if (sign_bit_comparison_p)
10783 op0 = XEXP (op0, 0);
10784 code = (code == GE ? LT : GE);
10785 continue;
10787 break;
10789 case NEG:
10790 /* If testing for equality, we can take the NEG of the constant. */
10791 if (equality_comparison_p
10792 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10794 op0 = XEXP (op0, 0);
10795 op1 = tem;
10796 continue;
10799 /* The remaining cases only apply to comparisons with zero. */
10800 if (const_op != 0)
10801 break;
10803 /* When X is ABS or is known positive,
10804 (neg X) is < 0 if and only if X != 0. */
10806 if (sign_bit_comparison_p
10807 && (GET_CODE (XEXP (op0, 0)) == ABS
10808 || (mode_width <= HOST_BITS_PER_WIDE_INT
10809 && (nonzero_bits (XEXP (op0, 0), mode)
10810 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10812 op0 = XEXP (op0, 0);
10813 code = (code == LT ? NE : EQ);
10814 continue;
10817 /* If we have NEG of something whose two high-order bits are the
10818 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10819 if (num_sign_bit_copies (op0, mode) >= 2)
10821 op0 = XEXP (op0, 0);
10822 code = swap_condition (code);
10823 continue;
10825 break;
10827 case ROTATE:
10828 /* If we are testing equality and our count is a constant, we
10829 can perform the inverse operation on our RHS. */
10830 if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
10831 && (tem = simplify_binary_operation (ROTATERT, mode,
10832 op1, XEXP (op0, 1))) != 0)
10834 op0 = XEXP (op0, 0);
10835 op1 = tem;
10836 continue;
10839 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10840 a particular bit. Convert it to an AND of a constant of that
10841 bit. This will be converted into a ZERO_EXTRACT. */
10842 if (const_op == 0 && sign_bit_comparison_p
10843 && CONST_INT_P (XEXP (op0, 1))
10844 && mode_width <= HOST_BITS_PER_WIDE_INT)
10846 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10847 ((HOST_WIDE_INT) 1
10848 << (mode_width - 1
10849 - INTVAL (XEXP (op0, 1)))));
10850 code = (code == LT ? NE : EQ);
10851 continue;
10854 /* Fall through. */
10856 case ABS:
10857 /* ABS is ignorable inside an equality comparison with zero. */
10858 if (const_op == 0 && equality_comparison_p)
10860 op0 = XEXP (op0, 0);
10861 continue;
10863 break;
10865 case SIGN_EXTEND:
10866 /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10867 (compare FOO CONST) if CONST fits in FOO's mode and we
10868 are either testing inequality or have an unsigned
10869 comparison with ZERO_EXTEND or a signed comparison with
10870 SIGN_EXTEND. But don't do it if we don't have a compare
10871 insn of the given mode, since we'd have to revert it
10872 later on, and then we wouldn't know whether to sign- or
10873 zero-extend. */
10874 mode = GET_MODE (XEXP (op0, 0));
10875 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10876 && ! unsigned_comparison_p
10877 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10878 && ((unsigned HOST_WIDE_INT) const_op
10879 < (((unsigned HOST_WIDE_INT) 1
10880 << (GET_MODE_BITSIZE (mode) - 1))))
10881 && have_insn_for (COMPARE, mode))
10883 op0 = XEXP (op0, 0);
10884 continue;
10886 break;
10888 case SUBREG:
10889 /* Check for the case where we are comparing A - C1 with C2, that is
10891 (subreg:MODE (plus (A) (-C1))) op (C2)
10893 with C1 a constant, and try to lift the SUBREG, i.e. to do the
10894 comparison in the wider mode. One of the following two conditions
10895 must be true in order for this to be valid:
10897 1. The mode extension results in the same bit pattern being added
10898 on both sides and the comparison is equality or unsigned. As
10899 C2 has been truncated to fit in MODE, the pattern can only be
10900 all 0s or all 1s.
10902 2. The mode extension results in the sign bit being copied on
10903 each side.
10905 The difficulty here is that we have predicates for A but not for
10906 (A - C1) so we need to check that C1 is within proper bounds so
10907 as to perturbate A as little as possible. */
10909 if (mode_width <= HOST_BITS_PER_WIDE_INT
10910 && subreg_lowpart_p (op0)
10911 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10912 && GET_CODE (SUBREG_REG (op0)) == PLUS
10913 && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
10915 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10916 rtx a = XEXP (SUBREG_REG (op0), 0);
10917 HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10919 if ((c1 > 0
10920 && (unsigned HOST_WIDE_INT) c1
10921 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10922 && (equality_comparison_p || unsigned_comparison_p)
10923 /* (A - C1) zero-extends if it is positive and sign-extends
10924 if it is negative, C2 both zero- and sign-extends. */
10925 && ((0 == (nonzero_bits (a, inner_mode)
10926 & ~GET_MODE_MASK (mode))
10927 && const_op >= 0)
10928 /* (A - C1) sign-extends if it is positive and 1-extends
10929 if it is negative, C2 both sign- and 1-extends. */
10930 || (num_sign_bit_copies (a, inner_mode)
10931 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10932 - mode_width)
10933 && const_op < 0)))
10934 || ((unsigned HOST_WIDE_INT) c1
10935 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10936 /* (A - C1) always sign-extends, like C2. */
10937 && num_sign_bit_copies (a, inner_mode)
10938 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10939 - (mode_width - 1))))
10941 op0 = SUBREG_REG (op0);
10942 continue;
10946 /* If the inner mode is narrower and we are extracting the low part,
10947 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10948 if (subreg_lowpart_p (op0)
10949 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10950 /* Fall through */ ;
10951 else
10952 break;
10954 /* ... fall through ... */
10956 case ZERO_EXTEND:
10957 mode = GET_MODE (XEXP (op0, 0));
10958 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10959 && (unsigned_comparison_p || equality_comparison_p)
10960 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10961 && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10962 && have_insn_for (COMPARE, mode))
10964 op0 = XEXP (op0, 0);
10965 continue;
10967 break;
10969 case PLUS:
10970 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10971 this for equality comparisons due to pathological cases involving
10972 overflows. */
10973 if (equality_comparison_p
10974 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10975 op1, XEXP (op0, 1))))
10977 op0 = XEXP (op0, 0);
10978 op1 = tem;
10979 continue;
10982 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10983 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10984 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10986 op0 = XEXP (XEXP (op0, 0), 0);
10987 code = (code == LT ? EQ : NE);
10988 continue;
10990 break;
10992 case MINUS:
10993 /* We used to optimize signed comparisons against zero, but that
10994 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10995 arrive here as equality comparisons, or (GEU, LTU) are
10996 optimized away. No need to special-case them. */
10998 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10999 (eq B (minus A C)), whichever simplifies. We can only do
11000 this for equality comparisons due to pathological cases involving
11001 overflows. */
11002 if (equality_comparison_p
11003 && 0 != (tem = simplify_binary_operation (PLUS, mode,
11004 XEXP (op0, 1), op1)))
11006 op0 = XEXP (op0, 0);
11007 op1 = tem;
11008 continue;
11011 if (equality_comparison_p
11012 && 0 != (tem = simplify_binary_operation (MINUS, mode,
11013 XEXP (op0, 0), op1)))
11015 op0 = XEXP (op0, 1);
11016 op1 = tem;
11017 continue;
11020 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
11021 of bits in X minus 1, is one iff X > 0. */
11022 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
11023 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11024 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
11025 == mode_width - 1
11026 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11028 op0 = XEXP (op0, 1);
11029 code = (code == GE ? LE : GT);
11030 continue;
11032 break;
11034 case XOR:
11035 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
11036 if C is zero or B is a constant. */
11037 if (equality_comparison_p
11038 && 0 != (tem = simplify_binary_operation (XOR, mode,
11039 XEXP (op0, 1), op1)))
11041 op0 = XEXP (op0, 0);
11042 op1 = tem;
11043 continue;
11045 break;
11047 case EQ: case NE:
11048 case UNEQ: case LTGT:
11049 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
11050 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
11051 case UNORDERED: case ORDERED:
11052 /* We can't do anything if OP0 is a condition code value, rather
11053 than an actual data value. */
11054 if (const_op != 0
11055 || CC0_P (XEXP (op0, 0))
11056 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11057 break;
11059 /* Get the two operands being compared. */
11060 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11061 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11062 else
11063 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11065 /* Check for the cases where we simply want the result of the
11066 earlier test or the opposite of that result. */
11067 if (code == NE || code == EQ
11068 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
11069 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11070 && (STORE_FLAG_VALUE
11071 & (((HOST_WIDE_INT) 1
11072 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
11073 && (code == LT || code == GE)))
11075 enum rtx_code new_code;
11076 if (code == LT || code == NE)
11077 new_code = GET_CODE (op0);
11078 else
11079 new_code = reversed_comparison_code (op0, NULL);
11081 if (new_code != UNKNOWN)
11083 code = new_code;
11084 op0 = tem;
11085 op1 = tem1;
11086 continue;
11089 break;
11091 case IOR:
11092 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11093 iff X <= 0. */
11094 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11095 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11096 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11098 op0 = XEXP (op0, 1);
11099 code = (code == GE ? GT : LE);
11100 continue;
11102 break;
11104 case AND:
11105 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
11106 will be converted to a ZERO_EXTRACT later. */
11107 if (const_op == 0 && equality_comparison_p
11108 && GET_CODE (XEXP (op0, 0)) == ASHIFT
11109 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11111 op0 = simplify_and_const_int
11112 (NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
11113 XEXP (op0, 1),
11114 XEXP (XEXP (op0, 0), 1)),
11115 (HOST_WIDE_INT) 1);
11116 continue;
11119 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11120 zero and X is a comparison and C1 and C2 describe only bits set
11121 in STORE_FLAG_VALUE, we can compare with X. */
11122 if (const_op == 0 && equality_comparison_p
11123 && mode_width <= HOST_BITS_PER_WIDE_INT
11124 && CONST_INT_P (XEXP (op0, 1))
11125 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11126 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11127 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11128 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11130 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11131 << INTVAL (XEXP (XEXP (op0, 0), 1)));
11132 if ((~STORE_FLAG_VALUE & mask) == 0
11133 && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
11134 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11135 && COMPARISON_P (tem))))
11137 op0 = XEXP (XEXP (op0, 0), 0);
11138 continue;
11142 /* If we are doing an equality comparison of an AND of a bit equal
11143 to the sign bit, replace this with a LT or GE comparison of
11144 the underlying value. */
11145 if (equality_comparison_p
11146 && const_op == 0
11147 && CONST_INT_P (XEXP (op0, 1))
11148 && mode_width <= HOST_BITS_PER_WIDE_INT
11149 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11150 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11152 op0 = XEXP (op0, 0);
11153 code = (code == EQ ? GE : LT);
11154 continue;
11157 /* If this AND operation is really a ZERO_EXTEND from a narrower
11158 mode, the constant fits within that mode, and this is either an
11159 equality or unsigned comparison, try to do this comparison in
11160 the narrower mode.
11162 Note that in:
11164 (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
11165 -> (ne:DI (reg:SI 4) (const_int 0))
11167 unless TRULY_NOOP_TRUNCATION allows it or the register is
11168 known to hold a value of the required mode the
11169 transformation is invalid. */
11170 if ((equality_comparison_p || unsigned_comparison_p)
11171 && CONST_INT_P (XEXP (op0, 1))
11172 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11173 & GET_MODE_MASK (mode))
11174 + 1)) >= 0
11175 && const_op >> i == 0
11176 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
11177 && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
11178 GET_MODE_BITSIZE (GET_MODE (op0)))
11179 || (REG_P (XEXP (op0, 0))
11180 && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
11182 op0 = gen_lowpart (tmode, XEXP (op0, 0));
11183 continue;
11186 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
11187 fits in both M1 and M2 and the SUBREG is either paradoxical
11188 or represents the low part, permute the SUBREG and the AND
11189 and try again. */
11190 if (GET_CODE (XEXP (op0, 0)) == SUBREG)
11192 unsigned HOST_WIDE_INT c1;
11193 tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
11194 /* Require an integral mode, to avoid creating something like
11195 (AND:SF ...). */
11196 if (SCALAR_INT_MODE_P (tmode)
11197 /* It is unsafe to commute the AND into the SUBREG if the
11198 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
11199 not defined. As originally written the upper bits
11200 have a defined value due to the AND operation.
11201 However, if we commute the AND inside the SUBREG then
11202 they no longer have defined values and the meaning of
11203 the code has been changed. */
11204 && (0
11205 #ifdef WORD_REGISTER_OPERATIONS
11206 || (mode_width > GET_MODE_BITSIZE (tmode)
11207 && mode_width <= BITS_PER_WORD)
11208 #endif
11209 || (mode_width <= GET_MODE_BITSIZE (tmode)
11210 && subreg_lowpart_p (XEXP (op0, 0))))
11211 && CONST_INT_P (XEXP (op0, 1))
11212 && mode_width <= HOST_BITS_PER_WIDE_INT
11213 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
11214 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11215 && (c1 & ~GET_MODE_MASK (tmode)) == 0
11216 && c1 != mask
11217 && c1 != GET_MODE_MASK (tmode))
11219 op0 = simplify_gen_binary (AND, tmode,
11220 SUBREG_REG (XEXP (op0, 0)),
11221 gen_int_mode (c1, tmode));
11222 op0 = gen_lowpart (mode, op0);
11223 continue;
11227 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
11228 if (const_op == 0 && equality_comparison_p
11229 && XEXP (op0, 1) == const1_rtx
11230 && GET_CODE (XEXP (op0, 0)) == NOT)
11232 op0 = simplify_and_const_int
11233 (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
11234 code = (code == NE ? EQ : NE);
11235 continue;
11238 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11239 (eq (and (lshiftrt X) 1) 0).
11240 Also handle the case where (not X) is expressed using xor. */
11241 if (const_op == 0 && equality_comparison_p
11242 && XEXP (op0, 1) == const1_rtx
11243 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11245 rtx shift_op = XEXP (XEXP (op0, 0), 0);
11246 rtx shift_count = XEXP (XEXP (op0, 0), 1);
11248 if (GET_CODE (shift_op) == NOT
11249 || (GET_CODE (shift_op) == XOR
11250 && CONST_INT_P (XEXP (shift_op, 1))
11251 && CONST_INT_P (shift_count)
11252 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11253 && (INTVAL (XEXP (shift_op, 1))
11254 == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11256 op0 = simplify_and_const_int
11257 (NULL_RTX, mode,
11258 gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11259 (HOST_WIDE_INT) 1);
11260 code = (code == NE ? EQ : NE);
11261 continue;
11264 break;
11266 case ASHIFT:
11267 /* If we have (compare (ashift FOO N) (const_int C)) and
11268 the high order N bits of FOO (N+1 if an inequality comparison)
11269 are known to be zero, we can do this by comparing FOO with C
11270 shifted right N bits so long as the low-order N bits of C are
11271 zero. */
11272 if (CONST_INT_P (XEXP (op0, 1))
11273 && INTVAL (XEXP (op0, 1)) >= 0
11274 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11275 < HOST_BITS_PER_WIDE_INT)
11276 && ((const_op
11277 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11278 && mode_width <= HOST_BITS_PER_WIDE_INT
11279 && (nonzero_bits (XEXP (op0, 0), mode)
11280 & ~(mask >> (INTVAL (XEXP (op0, 1))
11281 + ! equality_comparison_p))) == 0)
11283 /* We must perform a logical shift, not an arithmetic one,
11284 as we want the top N bits of C to be zero. */
11285 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11287 temp >>= INTVAL (XEXP (op0, 1));
11288 op1 = gen_int_mode (temp, mode);
11289 op0 = XEXP (op0, 0);
11290 continue;
11293 /* If we are doing a sign bit comparison, it means we are testing
11294 a particular bit. Convert it to the appropriate AND. */
11295 if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
11296 && mode_width <= HOST_BITS_PER_WIDE_INT)
11298 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11299 ((HOST_WIDE_INT) 1
11300 << (mode_width - 1
11301 - INTVAL (XEXP (op0, 1)))));
11302 code = (code == LT ? NE : EQ);
11303 continue;
11306 /* If this an equality comparison with zero and we are shifting
11307 the low bit to the sign bit, we can convert this to an AND of the
11308 low-order bit. */
11309 if (const_op == 0 && equality_comparison_p
11310 && CONST_INT_P (XEXP (op0, 1))
11311 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11312 == mode_width - 1)
11314 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11315 (HOST_WIDE_INT) 1);
11316 continue;
11318 break;
11320 case ASHIFTRT:
11321 /* If this is an equality comparison with zero, we can do this
11322 as a logical shift, which might be much simpler. */
11323 if (equality_comparison_p && const_op == 0
11324 && CONST_INT_P (XEXP (op0, 1)))
11326 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11327 XEXP (op0, 0),
11328 INTVAL (XEXP (op0, 1)));
11329 continue;
11332 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11333 do the comparison in a narrower mode. */
11334 if (! unsigned_comparison_p
11335 && CONST_INT_P (XEXP (op0, 1))
11336 && GET_CODE (XEXP (op0, 0)) == ASHIFT
11337 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11338 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11339 MODE_INT, 1)) != BLKmode
11340 && (((unsigned HOST_WIDE_INT) const_op
11341 + (GET_MODE_MASK (tmode) >> 1) + 1)
11342 <= GET_MODE_MASK (tmode)))
11344 op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
11345 continue;
11348 /* Likewise if OP0 is a PLUS of a sign extension with a
11349 constant, which is usually represented with the PLUS
11350 between the shifts. */
11351 if (! unsigned_comparison_p
11352 && CONST_INT_P (XEXP (op0, 1))
11353 && GET_CODE (XEXP (op0, 0)) == PLUS
11354 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11355 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11356 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11357 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11358 MODE_INT, 1)) != BLKmode
11359 && (((unsigned HOST_WIDE_INT) const_op
11360 + (GET_MODE_MASK (tmode) >> 1) + 1)
11361 <= GET_MODE_MASK (tmode)))
11363 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11364 rtx add_const = XEXP (XEXP (op0, 0), 1);
11365 rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
11366 add_const, XEXP (op0, 1));
11368 op0 = simplify_gen_binary (PLUS, tmode,
11369 gen_lowpart (tmode, inner),
11370 new_const);
11371 continue;
11374 /* ... fall through ... */
11375 case LSHIFTRT:
11376 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11377 the low order N bits of FOO are known to be zero, we can do this
11378 by comparing FOO with C shifted left N bits so long as no
11379 overflow occurs. */
11380 if (CONST_INT_P (XEXP (op0, 1))
11381 && INTVAL (XEXP (op0, 1)) >= 0
11382 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11383 && mode_width <= HOST_BITS_PER_WIDE_INT
11384 && (nonzero_bits (XEXP (op0, 0), mode)
11385 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11386 && (((unsigned HOST_WIDE_INT) const_op
11387 + (GET_CODE (op0) != LSHIFTRT
11388 ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11389 + 1)
11390 : 0))
11391 <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11393 /* If the shift was logical, then we must make the condition
11394 unsigned. */
11395 if (GET_CODE (op0) == LSHIFTRT)
11396 code = unsigned_condition (code);
11398 const_op <<= INTVAL (XEXP (op0, 1));
11399 op1 = GEN_INT (const_op);
11400 op0 = XEXP (op0, 0);
11401 continue;
11404 /* If we are using this shift to extract just the sign bit, we
11405 can replace this with an LT or GE comparison. */
11406 if (const_op == 0
11407 && (equality_comparison_p || sign_bit_comparison_p)
11408 && CONST_INT_P (XEXP (op0, 1))
11409 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11410 == mode_width - 1)
11412 op0 = XEXP (op0, 0);
11413 code = (code == NE || code == GT ? LT : GE);
11414 continue;
11416 break;
11418 default:
11419 break;
11422 break;
11425 /* Now make any compound operations involved in this comparison. Then,
11426 check for an outmost SUBREG on OP0 that is not doing anything or is
11427 paradoxical. The latter transformation must only be performed when
11428 it is known that the "extra" bits will be the same in op0 and op1 or
11429 that they don't matter. There are three cases to consider:
11431 1. SUBREG_REG (op0) is a register. In this case the bits are don't
11432 care bits and we can assume they have any convenient value. So
11433 making the transformation is safe.
11435 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11436 In this case the upper bits of op0 are undefined. We should not make
11437 the simplification in that case as we do not know the contents of
11438 those bits.
11440 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11441 UNKNOWN. In that case we know those bits are zeros or ones. We must
11442 also be sure that they are the same as the upper bits of op1.
11444 We can never remove a SUBREG for a non-equality comparison because
11445 the sign bit is in a different place in the underlying object. */
11447 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11448 op1 = make_compound_operation (op1, SET);
11450 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11451 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11452 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11453 && (code == NE || code == EQ))
11455 if (GET_MODE_SIZE (GET_MODE (op0))
11456 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11458 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
11459 implemented. */
11460 if (REG_P (SUBREG_REG (op0)))
11462 op0 = SUBREG_REG (op0);
11463 op1 = gen_lowpart (GET_MODE (op0), op1);
11466 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11467 <= HOST_BITS_PER_WIDE_INT)
11468 && (nonzero_bits (SUBREG_REG (op0),
11469 GET_MODE (SUBREG_REG (op0)))
11470 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11472 tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
11474 if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11475 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11476 op0 = SUBREG_REG (op0), op1 = tem;
11480 /* We now do the opposite procedure: Some machines don't have compare
11481 insns in all modes. If OP0's mode is an integer mode smaller than a
11482 word and we can't do a compare in that mode, see if there is a larger
11483 mode for which we can do the compare. There are a number of cases in
11484 which we can use the wider mode. */
11486 mode = GET_MODE (op0);
11487 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11488 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11489 && ! have_insn_for (COMPARE, mode))
11490 for (tmode = GET_MODE_WIDER_MODE (mode);
11491 (tmode != VOIDmode
11492 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11493 tmode = GET_MODE_WIDER_MODE (tmode))
11494 if (have_insn_for (COMPARE, tmode))
11496 int zero_extended;
11498 /* If this is a test for negative, we can make an explicit
11499 test of the sign bit. Test this first so we can use
11500 a paradoxical subreg to extend OP0. */
11502 if (op1 == const0_rtx && (code == LT || code == GE)
11503 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11505 op0 = simplify_gen_binary (AND, tmode,
11506 gen_lowpart (tmode, op0),
11507 GEN_INT ((HOST_WIDE_INT) 1
11508 << (GET_MODE_BITSIZE (mode)
11509 - 1)));
11510 code = (code == LT) ? NE : EQ;
11511 break;
11514 /* If the only nonzero bits in OP0 and OP1 are those in the
11515 narrower mode and this is an equality or unsigned comparison,
11516 we can use the wider mode. Similarly for sign-extended
11517 values, in which case it is true for all comparisons. */
11518 zero_extended = ((code == EQ || code == NE
11519 || code == GEU || code == GTU
11520 || code == LEU || code == LTU)
11521 && (nonzero_bits (op0, tmode)
11522 & ~GET_MODE_MASK (mode)) == 0
11523 && ((CONST_INT_P (op1)
11524 || (nonzero_bits (op1, tmode)
11525 & ~GET_MODE_MASK (mode)) == 0)));
11527 if (zero_extended
11528 || ((num_sign_bit_copies (op0, tmode)
11529 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11530 - GET_MODE_BITSIZE (mode)))
11531 && (num_sign_bit_copies (op1, tmode)
11532 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11533 - GET_MODE_BITSIZE (mode)))))
11535 /* If OP0 is an AND and we don't have an AND in MODE either,
11536 make a new AND in the proper mode. */
11537 if (GET_CODE (op0) == AND
11538 && !have_insn_for (AND, mode))
11539 op0 = simplify_gen_binary (AND, tmode,
11540 gen_lowpart (tmode,
11541 XEXP (op0, 0)),
11542 gen_lowpart (tmode,
11543 XEXP (op0, 1)));
11544 else
11546 if (zero_extended)
11548 op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
11549 op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
11551 else
11553 op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
11554 op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
11556 break;
11561 #ifdef CANONICALIZE_COMPARISON
11562 /* If this machine only supports a subset of valid comparisons, see if we
11563 can convert an unsupported one into a supported one. */
11564 CANONICALIZE_COMPARISON (code, op0, op1);
11565 #endif
11567 *pop0 = op0;
11568 *pop1 = op1;
11570 return code;
11573 /* Utility function for record_value_for_reg. Count number of
11574 rtxs in X. */
11575 static int
11576 count_rtxs (rtx x)
11578 enum rtx_code code = GET_CODE (x);
11579 const char *fmt;
11580 int i, j, ret = 1;
11582 if (GET_RTX_CLASS (code) == '2'
11583 || GET_RTX_CLASS (code) == 'c')
11585 rtx x0 = XEXP (x, 0);
11586 rtx x1 = XEXP (x, 1);
11588 if (x0 == x1)
11589 return 1 + 2 * count_rtxs (x0);
11591 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11592 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11593 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11594 return 2 + 2 * count_rtxs (x0)
11595 + count_rtxs (x == XEXP (x1, 0)
11596 ? XEXP (x1, 1) : XEXP (x1, 0));
11598 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11599 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11600 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11601 return 2 + 2 * count_rtxs (x1)
11602 + count_rtxs (x == XEXP (x0, 0)
11603 ? XEXP (x0, 1) : XEXP (x0, 0));
11606 fmt = GET_RTX_FORMAT (code);
11607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11608 if (fmt[i] == 'e')
11609 ret += count_rtxs (XEXP (x, i));
11610 else if (fmt[i] == 'E')
11611 for (j = 0; j < XVECLEN (x, i); j++)
11612 ret += count_rtxs (XVECEXP (x, i, j));
11614 return ret;
11617 /* Utility function for following routine. Called when X is part of a value
11618 being stored into last_set_value. Sets last_set_table_tick
11619 for each register mentioned. Similar to mention_regs in cse.c */
11621 static void
11622 update_table_tick (rtx x)
11624 enum rtx_code code = GET_CODE (x);
11625 const char *fmt = GET_RTX_FORMAT (code);
11626 int i, j;
11628 if (code == REG)
11630 unsigned int regno = REGNO (x);
11631 unsigned int endregno = END_REGNO (x);
11632 unsigned int r;
11634 for (r = regno; r < endregno; r++)
11636 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
11637 rsp->last_set_table_tick = label_tick;
11640 return;
11643 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11644 if (fmt[i] == 'e')
11646 /* Check for identical subexpressions. If x contains
11647 identical subexpression we only have to traverse one of
11648 them. */
11649 if (i == 0 && ARITHMETIC_P (x))
11651 /* Note that at this point x1 has already been
11652 processed. */
11653 rtx x0 = XEXP (x, 0);
11654 rtx x1 = XEXP (x, 1);
11656 /* If x0 and x1 are identical then there is no need to
11657 process x0. */
11658 if (x0 == x1)
11659 break;
11661 /* If x0 is identical to a subexpression of x1 then while
11662 processing x1, x0 has already been processed. Thus we
11663 are done with x. */
11664 if (ARITHMETIC_P (x1)
11665 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11666 break;
11668 /* If x1 is identical to a subexpression of x0 then we
11669 still have to process the rest of x0. */
11670 if (ARITHMETIC_P (x0)
11671 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11673 update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11674 break;
11678 update_table_tick (XEXP (x, i));
11680 else if (fmt[i] == 'E')
11681 for (j = 0; j < XVECLEN (x, i); j++)
11682 update_table_tick (XVECEXP (x, i, j));
11685 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11686 are saying that the register is clobbered and we no longer know its
11687 value. If INSN is zero, don't update reg_stat[].last_set; this is
11688 only permitted with VALUE also zero and is used to invalidate the
11689 register. */
11691 static void
11692 record_value_for_reg (rtx reg, rtx insn, rtx value)
11694 unsigned int regno = REGNO (reg);
11695 unsigned int endregno = END_REGNO (reg);
11696 unsigned int i;
11697 reg_stat_type *rsp;
11699 /* If VALUE contains REG and we have a previous value for REG, substitute
11700 the previous value. */
11701 if (value && insn && reg_overlap_mentioned_p (reg, value))
11703 rtx tem;
11705 /* Set things up so get_last_value is allowed to see anything set up to
11706 our insn. */
11707 subst_low_luid = DF_INSN_LUID (insn);
11708 tem = get_last_value (reg);
11710 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11711 it isn't going to be useful and will take a lot of time to process,
11712 so just use the CLOBBER. */
11714 if (tem)
11716 if (ARITHMETIC_P (tem)
11717 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11718 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11719 tem = XEXP (tem, 0);
11720 else if (count_occurrences (value, reg, 1) >= 2)
11722 /* If there are two or more occurrences of REG in VALUE,
11723 prevent the value from growing too much. */
11724 if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11725 tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11728 value = replace_rtx (copy_rtx (value), reg, tem);
11732 /* For each register modified, show we don't know its value, that
11733 we don't know about its bitwise content, that its value has been
11734 updated, and that we don't know the location of the death of the
11735 register. */
11736 for (i = regno; i < endregno; i++)
11738 rsp = VEC_index (reg_stat_type, reg_stat, i);
11740 if (insn)
11741 rsp->last_set = insn;
11743 rsp->last_set_value = 0;
11744 rsp->last_set_mode = VOIDmode;
11745 rsp->last_set_nonzero_bits = 0;
11746 rsp->last_set_sign_bit_copies = 0;
11747 rsp->last_death = 0;
11748 rsp->truncated_to_mode = VOIDmode;
11751 /* Mark registers that are being referenced in this value. */
11752 if (value)
11753 update_table_tick (value);
11755 /* Now update the status of each register being set.
11756 If someone is using this register in this block, set this register
11757 to invalid since we will get confused between the two lives in this
11758 basic block. This makes using this register always invalid. In cse, we
11759 scan the table to invalidate all entries using this register, but this
11760 is too much work for us. */
11762 for (i = regno; i < endregno; i++)
11764 rsp = VEC_index (reg_stat_type, reg_stat, i);
11765 rsp->last_set_label = label_tick;
11766 if (!insn
11767 || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
11768 rsp->last_set_invalid = 1;
11769 else
11770 rsp->last_set_invalid = 0;
11773 /* The value being assigned might refer to X (like in "x++;"). In that
11774 case, we must replace it with (clobber (const_int 0)) to prevent
11775 infinite loops. */
11776 rsp = VEC_index (reg_stat_type, reg_stat, regno);
11777 if (value && !get_last_value_validate (&value, insn, label_tick, 0))
11779 value = copy_rtx (value);
11780 if (!get_last_value_validate (&value, insn, label_tick, 1))
11781 value = 0;
11784 /* For the main register being modified, update the value, the mode, the
11785 nonzero bits, and the number of sign bit copies. */
11787 rsp->last_set_value = value;
11789 if (value)
11791 enum machine_mode mode = GET_MODE (reg);
11792 subst_low_luid = DF_INSN_LUID (insn);
11793 rsp->last_set_mode = mode;
11794 if (GET_MODE_CLASS (mode) == MODE_INT
11795 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11796 mode = nonzero_bits_mode;
11797 rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
11798 rsp->last_set_sign_bit_copies
11799 = num_sign_bit_copies (value, GET_MODE (reg));
11803 /* Called via note_stores from record_dead_and_set_regs to handle one
11804 SET or CLOBBER in an insn. DATA is the instruction in which the
11805 set is occurring. */
11807 static void
11808 record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
11810 rtx record_dead_insn = (rtx) data;
11812 if (GET_CODE (dest) == SUBREG)
11813 dest = SUBREG_REG (dest);
11815 if (!record_dead_insn)
11817 if (REG_P (dest))
11818 record_value_for_reg (dest, NULL_RTX, NULL_RTX);
11819 return;
11822 if (REG_P (dest))
11824 /* If we are setting the whole register, we know its value. Otherwise
11825 show that we don't know the value. We can handle SUBREG in
11826 some cases. */
11827 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11828 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11829 else if (GET_CODE (setter) == SET
11830 && GET_CODE (SET_DEST (setter)) == SUBREG
11831 && SUBREG_REG (SET_DEST (setter)) == dest
11832 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11833 && subreg_lowpart_p (SET_DEST (setter)))
11834 record_value_for_reg (dest, record_dead_insn,
11835 gen_lowpart (GET_MODE (dest),
11836 SET_SRC (setter)));
11837 else
11838 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11840 else if (MEM_P (dest)
11841 /* Ignore pushes, they clobber nothing. */
11842 && ! push_operand (dest, GET_MODE (dest)))
11843 mem_last_set = DF_INSN_LUID (record_dead_insn);
11846 /* Update the records of when each REG was most recently set or killed
11847 for the things done by INSN. This is the last thing done in processing
11848 INSN in the combiner loop.
11850 We update reg_stat[], in particular fields last_set, last_set_value,
11851 last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11852 last_death, and also the similar information mem_last_set (which insn
11853 most recently modified memory) and last_call_luid (which insn was the
11854 most recent subroutine call). */
11856 static void
11857 record_dead_and_set_regs (rtx insn)
11859 rtx link;
11860 unsigned int i;
11862 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11864 if (REG_NOTE_KIND (link) == REG_DEAD
11865 && REG_P (XEXP (link, 0)))
11867 unsigned int regno = REGNO (XEXP (link, 0));
11868 unsigned int endregno = END_REGNO (XEXP (link, 0));
11870 for (i = regno; i < endregno; i++)
11872 reg_stat_type *rsp;
11874 rsp = VEC_index (reg_stat_type, reg_stat, i);
11875 rsp->last_death = insn;
11878 else if (REG_NOTE_KIND (link) == REG_INC)
11879 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11882 if (CALL_P (insn))
11884 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11885 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11887 reg_stat_type *rsp;
11889 rsp = VEC_index (reg_stat_type, reg_stat, i);
11890 rsp->last_set_invalid = 1;
11891 rsp->last_set = insn;
11892 rsp->last_set_value = 0;
11893 rsp->last_set_mode = VOIDmode;
11894 rsp->last_set_nonzero_bits = 0;
11895 rsp->last_set_sign_bit_copies = 0;
11896 rsp->last_death = 0;
11897 rsp->truncated_to_mode = VOIDmode;
11900 last_call_luid = mem_last_set = DF_INSN_LUID (insn);
11902 /* We can't combine into a call pattern. Remember, though, that
11903 the return value register is set at this LUID. We could
11904 still replace a register with the return value from the
11905 wrong subroutine call! */
11906 note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
11908 else
11909 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11912 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11913 register present in the SUBREG, so for each such SUBREG go back and
11914 adjust nonzero and sign bit information of the registers that are
11915 known to have some zero/sign bits set.
11917 This is needed because when combine blows the SUBREGs away, the
11918 information on zero/sign bits is lost and further combines can be
11919 missed because of that. */
11921 static void
11922 record_promoted_value (rtx insn, rtx subreg)
11924 rtx links, set;
11925 unsigned int regno = REGNO (SUBREG_REG (subreg));
11926 enum machine_mode mode = GET_MODE (subreg);
11928 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11929 return;
11931 for (links = LOG_LINKS (insn); links;)
11933 reg_stat_type *rsp;
11935 insn = XEXP (links, 0);
11936 set = single_set (insn);
11938 if (! set || !REG_P (SET_DEST (set))
11939 || REGNO (SET_DEST (set)) != regno
11940 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11942 links = XEXP (links, 1);
11943 continue;
11946 rsp = VEC_index (reg_stat_type, reg_stat, regno);
11947 if (rsp->last_set == insn)
11949 if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11950 rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
11953 if (REG_P (SET_SRC (set)))
11955 regno = REGNO (SET_SRC (set));
11956 links = LOG_LINKS (insn);
11958 else
11959 break;
11963 /* Check if X, a register, is known to contain a value already
11964 truncated to MODE. In this case we can use a subreg to refer to
11965 the truncated value even though in the generic case we would need
11966 an explicit truncation. */
11968 static bool
11969 reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
11971 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
11972 enum machine_mode truncated = rsp->truncated_to_mode;
11974 if (truncated == 0
11975 || rsp->truncation_label < label_tick_ebb_start)
11976 return false;
11977 if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
11978 return true;
11979 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
11980 GET_MODE_BITSIZE (truncated)))
11981 return true;
11982 return false;
11985 /* Callback for for_each_rtx. If *P is a hard reg or a subreg record the mode
11986 that the register is accessed in. For non-TRULY_NOOP_TRUNCATION targets we
11987 might be able to turn a truncate into a subreg using this information.
11988 Return -1 if traversing *P is complete or 0 otherwise. */
11990 static int
11991 record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
11993 rtx x = *p;
11994 enum machine_mode truncated_mode;
11995 reg_stat_type *rsp;
11997 if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
11999 enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
12000 truncated_mode = GET_MODE (x);
12002 if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
12003 return -1;
12005 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
12006 GET_MODE_BITSIZE (original_mode)))
12007 return -1;
12009 x = SUBREG_REG (x);
12011 /* ??? For hard-regs we now record everything. We might be able to
12012 optimize this using last_set_mode. */
12013 else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
12014 truncated_mode = GET_MODE (x);
12015 else
12016 return 0;
12018 rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12019 if (rsp->truncated_to_mode == 0
12020 || rsp->truncation_label < label_tick_ebb_start
12021 || (GET_MODE_SIZE (truncated_mode)
12022 < GET_MODE_SIZE (rsp->truncated_to_mode)))
12024 rsp->truncated_to_mode = truncated_mode;
12025 rsp->truncation_label = label_tick;
12028 return -1;
12031 /* Callback for note_uses. Find hardregs and subregs of pseudos and
12032 the modes they are used in. This can help truning TRUNCATEs into
12033 SUBREGs. */
12035 static void
12036 record_truncated_values (rtx *x, void *data ATTRIBUTE_UNUSED)
12038 for_each_rtx (x, record_truncated_value, NULL);
12041 /* Scan X for promoted SUBREGs. For each one found,
12042 note what it implies to the registers used in it. */
12044 static void
12045 check_promoted_subreg (rtx insn, rtx x)
12047 if (GET_CODE (x) == SUBREG
12048 && SUBREG_PROMOTED_VAR_P (x)
12049 && REG_P (SUBREG_REG (x)))
12050 record_promoted_value (insn, x);
12051 else
12053 const char *format = GET_RTX_FORMAT (GET_CODE (x));
12054 int i, j;
12056 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
12057 switch (format[i])
12059 case 'e':
12060 check_promoted_subreg (insn, XEXP (x, i));
12061 break;
12062 case 'V':
12063 case 'E':
12064 if (XVEC (x, i) != 0)
12065 for (j = 0; j < XVECLEN (x, i); j++)
12066 check_promoted_subreg (insn, XVECEXP (x, i, j));
12067 break;
12072 /* Verify that all the registers and memory references mentioned in *LOC are
12073 still valid. *LOC was part of a value set in INSN when label_tick was
12074 equal to TICK. Return 0 if some are not. If REPLACE is nonzero, replace
12075 the invalid references with (clobber (const_int 0)) and return 1. This
12076 replacement is useful because we often can get useful information about
12077 the form of a value (e.g., if it was produced by a shift that always
12078 produces -1 or 0) even though we don't know exactly what registers it
12079 was produced from. */
12081 static int
12082 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
12084 rtx x = *loc;
12085 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
12086 int len = GET_RTX_LENGTH (GET_CODE (x));
12087 int i, j;
12089 if (REG_P (x))
12091 unsigned int regno = REGNO (x);
12092 unsigned int endregno = END_REGNO (x);
12093 unsigned int j;
12095 for (j = regno; j < endregno; j++)
12097 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
12098 if (rsp->last_set_invalid
12099 /* If this is a pseudo-register that was only set once and not
12100 live at the beginning of the function, it is always valid. */
12101 || (! (regno >= FIRST_PSEUDO_REGISTER
12102 && REG_N_SETS (regno) == 1
12103 && (!REGNO_REG_SET_P
12104 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
12105 && rsp->last_set_label > tick))
12107 if (replace)
12108 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12109 return replace;
12113 return 1;
12115 /* If this is a memory reference, make sure that there were no stores after
12116 it that might have clobbered the value. We don't have alias info, so we
12117 assume any store invalidates it. Moreover, we only have local UIDs, so
12118 we also assume that there were stores in the intervening basic blocks. */
12119 else if (MEM_P (x) && !MEM_READONLY_P (x)
12120 && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
12122 if (replace)
12123 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12124 return replace;
12127 for (i = 0; i < len; i++)
12129 if (fmt[i] == 'e')
12131 /* Check for identical subexpressions. If x contains
12132 identical subexpression we only have to traverse one of
12133 them. */
12134 if (i == 1 && ARITHMETIC_P (x))
12136 /* Note that at this point x0 has already been checked
12137 and found valid. */
12138 rtx x0 = XEXP (x, 0);
12139 rtx x1 = XEXP (x, 1);
12141 /* If x0 and x1 are identical then x is also valid. */
12142 if (x0 == x1)
12143 return 1;
12145 /* If x1 is identical to a subexpression of x0 then
12146 while checking x0, x1 has already been checked. Thus
12147 it is valid and so as x. */
12148 if (ARITHMETIC_P (x0)
12149 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12150 return 1;
12152 /* If x0 is identical to a subexpression of x1 then x is
12153 valid iff the rest of x1 is valid. */
12154 if (ARITHMETIC_P (x1)
12155 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12156 return
12157 get_last_value_validate (&XEXP (x1,
12158 x0 == XEXP (x1, 0) ? 1 : 0),
12159 insn, tick, replace);
12162 if (get_last_value_validate (&XEXP (x, i), insn, tick,
12163 replace) == 0)
12164 return 0;
12166 else if (fmt[i] == 'E')
12167 for (j = 0; j < XVECLEN (x, i); j++)
12168 if (get_last_value_validate (&XVECEXP (x, i, j),
12169 insn, tick, replace) == 0)
12170 return 0;
12173 /* If we haven't found a reason for it to be invalid, it is valid. */
12174 return 1;
12177 /* Get the last value assigned to X, if known. Some registers
12178 in the value may be replaced with (clobber (const_int 0)) if their value
12179 is known longer known reliably. */
12181 static rtx
12182 get_last_value (const_rtx x)
12184 unsigned int regno;
12185 rtx value;
12186 reg_stat_type *rsp;
12188 /* If this is a non-paradoxical SUBREG, get the value of its operand and
12189 then convert it to the desired mode. If this is a paradoxical SUBREG,
12190 we cannot predict what values the "extra" bits might have. */
12191 if (GET_CODE (x) == SUBREG
12192 && subreg_lowpart_p (x)
12193 && (GET_MODE_SIZE (GET_MODE (x))
12194 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
12195 && (value = get_last_value (SUBREG_REG (x))) != 0)
12196 return gen_lowpart (GET_MODE (x), value);
12198 if (!REG_P (x))
12199 return 0;
12201 regno = REGNO (x);
12202 rsp = VEC_index (reg_stat_type, reg_stat, regno);
12203 value = rsp->last_set_value;
12205 /* If we don't have a value, or if it isn't for this basic block and
12206 it's either a hard register, set more than once, or it's a live
12207 at the beginning of the function, return 0.
12209 Because if it's not live at the beginning of the function then the reg
12210 is always set before being used (is never used without being set).
12211 And, if it's set only once, and it's always set before use, then all
12212 uses must have the same last value, even if it's not from this basic
12213 block. */
12215 if (value == 0
12216 || (rsp->last_set_label < label_tick_ebb_start
12217 && (regno < FIRST_PSEUDO_REGISTER
12218 || REG_N_SETS (regno) != 1
12219 || REGNO_REG_SET_P
12220 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
12221 return 0;
12223 /* If the value was set in a later insn than the ones we are processing,
12224 we can't use it even if the register was only set once. */
12225 if (rsp->last_set_label == label_tick
12226 && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
12227 return 0;
12229 /* If the value has all its registers valid, return it. */
12230 if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
12231 return value;
12233 /* Otherwise, make a copy and replace any invalid register with
12234 (clobber (const_int 0)). If that fails for some reason, return 0. */
12236 value = copy_rtx (value);
12237 if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
12238 return value;
12240 return 0;
12243 /* Return nonzero if expression X refers to a REG or to memory
12244 that is set in an instruction more recent than FROM_LUID. */
12246 static int
12247 use_crosses_set_p (const_rtx x, int from_luid)
12249 const char *fmt;
12250 int i;
12251 enum rtx_code code = GET_CODE (x);
12253 if (code == REG)
12255 unsigned int regno = REGNO (x);
12256 unsigned endreg = END_REGNO (x);
12258 #ifdef PUSH_ROUNDING
12259 /* Don't allow uses of the stack pointer to be moved,
12260 because we don't know whether the move crosses a push insn. */
12261 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12262 return 1;
12263 #endif
12264 for (; regno < endreg; regno++)
12266 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
12267 if (rsp->last_set
12268 && rsp->last_set_label == label_tick
12269 && DF_INSN_LUID (rsp->last_set) > from_luid)
12270 return 1;
12272 return 0;
12275 if (code == MEM && mem_last_set > from_luid)
12276 return 1;
12278 fmt = GET_RTX_FORMAT (code);
12280 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12282 if (fmt[i] == 'E')
12284 int j;
12285 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12286 if (use_crosses_set_p (XVECEXP (x, i, j), from_luid))
12287 return 1;
12289 else if (fmt[i] == 'e'
12290 && use_crosses_set_p (XEXP (x, i), from_luid))
12291 return 1;
12293 return 0;
12296 /* Define three variables used for communication between the following
12297 routines. */
12299 static unsigned int reg_dead_regno, reg_dead_endregno;
12300 static int reg_dead_flag;
12302 /* Function called via note_stores from reg_dead_at_p.
12304 If DEST is within [reg_dead_regno, reg_dead_endregno), set
12305 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
12307 static void
12308 reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
12310 unsigned int regno, endregno;
12312 if (!REG_P (dest))
12313 return;
12315 regno = REGNO (dest);
12316 endregno = END_REGNO (dest);
12317 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12318 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12321 /* Return nonzero if REG is known to be dead at INSN.
12323 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
12324 referencing REG, it is dead. If we hit a SET referencing REG, it is
12325 live. Otherwise, see if it is live or dead at the start of the basic
12326 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
12327 must be assumed to be always live. */
12329 static int
12330 reg_dead_at_p (rtx reg, rtx insn)
12332 basic_block block;
12333 unsigned int i;
12335 /* Set variables for reg_dead_at_p_1. */
12336 reg_dead_regno = REGNO (reg);
12337 reg_dead_endregno = END_REGNO (reg);
12339 reg_dead_flag = 0;
12341 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
12342 we allow the machine description to decide whether use-and-clobber
12343 patterns are OK. */
12344 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12346 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12347 if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
12348 return 0;
12351 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
12352 beginning of basic block. */
12353 block = BLOCK_FOR_INSN (insn);
12354 for (;;)
12356 if (INSN_P (insn))
12358 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12359 if (reg_dead_flag)
12360 return reg_dead_flag == 1 ? 1 : 0;
12362 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12363 return 1;
12366 if (insn == BB_HEAD (block))
12367 break;
12369 insn = PREV_INSN (insn);
12372 /* Look at live-in sets for the basic block that we were in. */
12373 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12374 if (REGNO_REG_SET_P (df_get_live_in (block), i))
12375 return 0;
12377 return 1;
12380 /* Note hard registers in X that are used. */
12382 static void
12383 mark_used_regs_combine (rtx x)
12385 RTX_CODE code = GET_CODE (x);
12386 unsigned int regno;
12387 int i;
12389 switch (code)
12391 case LABEL_REF:
12392 case SYMBOL_REF:
12393 case CONST_INT:
12394 case CONST:
12395 case CONST_DOUBLE:
12396 case CONST_VECTOR:
12397 case PC:
12398 case ADDR_VEC:
12399 case ADDR_DIFF_VEC:
12400 case ASM_INPUT:
12401 #ifdef HAVE_cc0
12402 /* CC0 must die in the insn after it is set, so we don't need to take
12403 special note of it here. */
12404 case CC0:
12405 #endif
12406 return;
12408 case CLOBBER:
12409 /* If we are clobbering a MEM, mark any hard registers inside the
12410 address as used. */
12411 if (MEM_P (XEXP (x, 0)))
12412 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12413 return;
12415 case REG:
12416 regno = REGNO (x);
12417 /* A hard reg in a wide mode may really be multiple registers.
12418 If so, mark all of them just like the first. */
12419 if (regno < FIRST_PSEUDO_REGISTER)
12421 /* None of this applies to the stack, frame or arg pointers. */
12422 if (regno == STACK_POINTER_REGNUM
12423 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12424 || regno == HARD_FRAME_POINTER_REGNUM
12425 #endif
12426 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12427 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12428 #endif
12429 || regno == FRAME_POINTER_REGNUM)
12430 return;
12432 add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
12434 return;
12436 case SET:
12438 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12439 the address. */
12440 rtx testreg = SET_DEST (x);
12442 while (GET_CODE (testreg) == SUBREG
12443 || GET_CODE (testreg) == ZERO_EXTRACT
12444 || GET_CODE (testreg) == STRICT_LOW_PART)
12445 testreg = XEXP (testreg, 0);
12447 if (MEM_P (testreg))
12448 mark_used_regs_combine (XEXP (testreg, 0));
12450 mark_used_regs_combine (SET_SRC (x));
12452 return;
12454 default:
12455 break;
12458 /* Recursively scan the operands of this expression. */
12461 const char *fmt = GET_RTX_FORMAT (code);
12463 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12465 if (fmt[i] == 'e')
12466 mark_used_regs_combine (XEXP (x, i));
12467 else if (fmt[i] == 'E')
12469 int j;
12471 for (j = 0; j < XVECLEN (x, i); j++)
12472 mark_used_regs_combine (XVECEXP (x, i, j));
12478 /* Remove register number REGNO from the dead registers list of INSN.
12480 Return the note used to record the death, if there was one. */
12483 remove_death (unsigned int regno, rtx insn)
12485 rtx note = find_regno_note (insn, REG_DEAD, regno);
12487 if (note)
12488 remove_note (insn, note);
12490 return note;
12493 /* For each register (hardware or pseudo) used within expression X, if its
12494 death is in an instruction with luid between FROM_LUID (inclusive) and
12495 TO_INSN (exclusive), put a REG_DEAD note for that register in the
12496 list headed by PNOTES.
12498 That said, don't move registers killed by maybe_kill_insn.
12500 This is done when X is being merged by combination into TO_INSN. These
12501 notes will then be distributed as needed. */
12503 static void
12504 move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
12505 rtx *pnotes)
12507 const char *fmt;
12508 int len, i;
12509 enum rtx_code code = GET_CODE (x);
12511 if (code == REG)
12513 unsigned int regno = REGNO (x);
12514 rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
12516 /* Don't move the register if it gets killed in between from and to. */
12517 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12518 && ! reg_referenced_p (x, maybe_kill_insn))
12519 return;
12521 if (where_dead
12522 && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
12523 && DF_INSN_LUID (where_dead) >= from_luid
12524 && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
12526 rtx note = remove_death (regno, where_dead);
12528 /* It is possible for the call above to return 0. This can occur
12529 when last_death points to I2 or I1 that we combined with.
12530 In that case make a new note.
12532 We must also check for the case where X is a hard register
12533 and NOTE is a death note for a range of hard registers
12534 including X. In that case, we must put REG_DEAD notes for
12535 the remaining registers in place of NOTE. */
12537 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12538 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12539 > GET_MODE_SIZE (GET_MODE (x))))
12541 unsigned int deadregno = REGNO (XEXP (note, 0));
12542 unsigned int deadend = END_HARD_REGNO (XEXP (note, 0));
12543 unsigned int ourend = END_HARD_REGNO (x);
12544 unsigned int i;
12546 for (i = deadregno; i < deadend; i++)
12547 if (i < regno || i >= ourend)
12548 add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
12551 /* If we didn't find any note, or if we found a REG_DEAD note that
12552 covers only part of the given reg, and we have a multi-reg hard
12553 register, then to be safe we must check for REG_DEAD notes
12554 for each register other than the first. They could have
12555 their own REG_DEAD notes lying around. */
12556 else if ((note == 0
12557 || (note != 0
12558 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12559 < GET_MODE_SIZE (GET_MODE (x)))))
12560 && regno < FIRST_PSEUDO_REGISTER
12561 && hard_regno_nregs[regno][GET_MODE (x)] > 1)
12563 unsigned int ourend = END_HARD_REGNO (x);
12564 unsigned int i, offset;
12565 rtx oldnotes = 0;
12567 if (note)
12568 offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
12569 else
12570 offset = 1;
12572 for (i = regno + offset; i < ourend; i++)
12573 move_deaths (regno_reg_rtx[i],
12574 maybe_kill_insn, from_luid, to_insn, &oldnotes);
12577 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12579 XEXP (note, 1) = *pnotes;
12580 *pnotes = note;
12582 else
12583 *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
12586 return;
12589 else if (GET_CODE (x) == SET)
12591 rtx dest = SET_DEST (x);
12593 move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
12595 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12596 that accesses one word of a multi-word item, some
12597 piece of everything register in the expression is used by
12598 this insn, so remove any old death. */
12599 /* ??? So why do we test for equality of the sizes? */
12601 if (GET_CODE (dest) == ZERO_EXTRACT
12602 || GET_CODE (dest) == STRICT_LOW_PART
12603 || (GET_CODE (dest) == SUBREG
12604 && (((GET_MODE_SIZE (GET_MODE (dest))
12605 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12606 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12607 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12609 move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
12610 return;
12613 /* If this is some other SUBREG, we know it replaces the entire
12614 value, so use that as the destination. */
12615 if (GET_CODE (dest) == SUBREG)
12616 dest = SUBREG_REG (dest);
12618 /* If this is a MEM, adjust deaths of anything used in the address.
12619 For a REG (the only other possibility), the entire value is
12620 being replaced so the old value is not used in this insn. */
12622 if (MEM_P (dest))
12623 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
12624 to_insn, pnotes);
12625 return;
12628 else if (GET_CODE (x) == CLOBBER)
12629 return;
12631 len = GET_RTX_LENGTH (code);
12632 fmt = GET_RTX_FORMAT (code);
12634 for (i = 0; i < len; i++)
12636 if (fmt[i] == 'E')
12638 int j;
12639 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12640 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
12641 to_insn, pnotes);
12643 else if (fmt[i] == 'e')
12644 move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
12648 /* Return 1 if X is the target of a bit-field assignment in BODY, the
12649 pattern of an insn. X must be a REG. */
12651 static int
12652 reg_bitfield_target_p (rtx x, rtx body)
12654 int i;
12656 if (GET_CODE (body) == SET)
12658 rtx dest = SET_DEST (body);
12659 rtx target;
12660 unsigned int regno, tregno, endregno, endtregno;
12662 if (GET_CODE (dest) == ZERO_EXTRACT)
12663 target = XEXP (dest, 0);
12664 else if (GET_CODE (dest) == STRICT_LOW_PART)
12665 target = SUBREG_REG (XEXP (dest, 0));
12666 else
12667 return 0;
12669 if (GET_CODE (target) == SUBREG)
12670 target = SUBREG_REG (target);
12672 if (!REG_P (target))
12673 return 0;
12675 tregno = REGNO (target), regno = REGNO (x);
12676 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12677 return target == x;
12679 endtregno = end_hard_regno (GET_MODE (target), tregno);
12680 endregno = end_hard_regno (GET_MODE (x), regno);
12682 return endregno > tregno && regno < endtregno;
12685 else if (GET_CODE (body) == PARALLEL)
12686 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12687 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12688 return 1;
12690 return 0;
12693 /* Return the next insn after INSN that is neither a NOTE nor a
12694 DEBUG_INSN. This routine does not look inside SEQUENCEs. */
12696 static rtx
12697 next_nonnote_nondebug_insn (rtx insn)
12699 while (insn)
12701 insn = NEXT_INSN (insn);
12702 if (insn == 0)
12703 break;
12704 if (NOTE_P (insn))
12705 continue;
12706 if (DEBUG_INSN_P (insn))
12707 continue;
12708 break;
12711 return insn;
12716 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12717 as appropriate. I3 and I2 are the insns resulting from the combination
12718 insns including FROM (I2 may be zero).
12720 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12721 not need REG_DEAD notes because they are being substituted for. This
12722 saves searching in the most common cases.
12724 Each note in the list is either ignored or placed on some insns, depending
12725 on the type of note. */
12727 static void
12728 distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
12729 rtx elim_i1)
12731 rtx note, next_note;
12732 rtx tem;
12734 for (note = notes; note; note = next_note)
12736 rtx place = 0, place2 = 0;
12738 next_note = XEXP (note, 1);
12739 switch (REG_NOTE_KIND (note))
12741 case REG_BR_PROB:
12742 case REG_BR_PRED:
12743 /* Doesn't matter much where we put this, as long as it's somewhere.
12744 It is preferable to keep these notes on branches, which is most
12745 likely to be i3. */
12746 place = i3;
12747 break;
12749 case REG_VALUE_PROFILE:
12750 /* Just get rid of this note, as it is unused later anyway. */
12751 break;
12753 case REG_NON_LOCAL_GOTO:
12754 if (JUMP_P (i3))
12755 place = i3;
12756 else
12758 gcc_assert (i2 && JUMP_P (i2));
12759 place = i2;
12761 break;
12763 case REG_EH_REGION:
12764 /* These notes must remain with the call or trapping instruction. */
12765 if (CALL_P (i3))
12766 place = i3;
12767 else if (i2 && CALL_P (i2))
12768 place = i2;
12769 else
12771 gcc_assert (flag_non_call_exceptions);
12772 if (may_trap_p (i3))
12773 place = i3;
12774 else if (i2 && may_trap_p (i2))
12775 place = i2;
12776 /* ??? Otherwise assume we've combined things such that we
12777 can now prove that the instructions can't trap. Drop the
12778 note in this case. */
12780 break;
12782 case REG_NORETURN:
12783 case REG_SETJMP:
12784 /* These notes must remain with the call. It should not be
12785 possible for both I2 and I3 to be a call. */
12786 if (CALL_P (i3))
12787 place = i3;
12788 else
12790 gcc_assert (i2 && CALL_P (i2));
12791 place = i2;
12793 break;
12795 case REG_UNUSED:
12796 /* Any clobbers for i3 may still exist, and so we must process
12797 REG_UNUSED notes from that insn.
12799 Any clobbers from i2 or i1 can only exist if they were added by
12800 recog_for_combine. In that case, recog_for_combine created the
12801 necessary REG_UNUSED notes. Trying to keep any original
12802 REG_UNUSED notes from these insns can cause incorrect output
12803 if it is for the same register as the original i3 dest.
12804 In that case, we will notice that the register is set in i3,
12805 and then add a REG_UNUSED note for the destination of i3, which
12806 is wrong. However, it is possible to have REG_UNUSED notes from
12807 i2 or i1 for register which were both used and clobbered, so
12808 we keep notes from i2 or i1 if they will turn into REG_DEAD
12809 notes. */
12811 /* If this register is set or clobbered in I3, put the note there
12812 unless there is one already. */
12813 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12815 if (from_insn != i3)
12816 break;
12818 if (! (REG_P (XEXP (note, 0))
12819 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12820 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12821 place = i3;
12823 /* Otherwise, if this register is used by I3, then this register
12824 now dies here, so we must put a REG_DEAD note here unless there
12825 is one already. */
12826 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12827 && ! (REG_P (XEXP (note, 0))
12828 ? find_regno_note (i3, REG_DEAD,
12829 REGNO (XEXP (note, 0)))
12830 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12832 PUT_REG_NOTE_KIND (note, REG_DEAD);
12833 place = i3;
12835 break;
12837 case REG_EQUAL:
12838 case REG_EQUIV:
12839 case REG_NOALIAS:
12840 /* These notes say something about results of an insn. We can
12841 only support them if they used to be on I3 in which case they
12842 remain on I3. Otherwise they are ignored.
12844 If the note refers to an expression that is not a constant, we
12845 must also ignore the note since we cannot tell whether the
12846 equivalence is still true. It might be possible to do
12847 slightly better than this (we only have a problem if I2DEST
12848 or I1DEST is present in the expression), but it doesn't
12849 seem worth the trouble. */
12851 if (from_insn == i3
12852 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12853 place = i3;
12854 break;
12856 case REG_INC:
12857 /* These notes say something about how a register is used. They must
12858 be present on any use of the register in I2 or I3. */
12859 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12860 place = i3;
12862 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12864 if (place)
12865 place2 = i2;
12866 else
12867 place = i2;
12869 break;
12871 case REG_LABEL_TARGET:
12872 case REG_LABEL_OPERAND:
12873 /* This can show up in several ways -- either directly in the
12874 pattern, or hidden off in the constant pool with (or without?)
12875 a REG_EQUAL note. */
12876 /* ??? Ignore the without-reg_equal-note problem for now. */
12877 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12878 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12879 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12880 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12881 place = i3;
12883 if (i2
12884 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12885 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12886 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12887 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12889 if (place)
12890 place2 = i2;
12891 else
12892 place = i2;
12895 /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
12896 as a JUMP_LABEL or decrement LABEL_NUSES if it's already
12897 there. */
12898 if (place && JUMP_P (place)
12899 && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12900 && (JUMP_LABEL (place) == NULL
12901 || JUMP_LABEL (place) == XEXP (note, 0)))
12903 rtx label = JUMP_LABEL (place);
12905 if (!label)
12906 JUMP_LABEL (place) = XEXP (note, 0);
12907 else if (LABEL_P (label))
12908 LABEL_NUSES (label)--;
12911 if (place2 && JUMP_P (place2)
12912 && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12913 && (JUMP_LABEL (place2) == NULL
12914 || JUMP_LABEL (place2) == XEXP (note, 0)))
12916 rtx label = JUMP_LABEL (place2);
12918 if (!label)
12919 JUMP_LABEL (place2) = XEXP (note, 0);
12920 else if (LABEL_P (label))
12921 LABEL_NUSES (label)--;
12922 place2 = 0;
12924 break;
12926 case REG_NONNEG:
12927 /* This note says something about the value of a register prior
12928 to the execution of an insn. It is too much trouble to see
12929 if the note is still correct in all situations. It is better
12930 to simply delete it. */
12931 break;
12933 case REG_DEAD:
12934 /* If we replaced the right hand side of FROM_INSN with a
12935 REG_EQUAL note, the original use of the dying register
12936 will not have been combined into I3 and I2. In such cases,
12937 FROM_INSN is guaranteed to be the first of the combined
12938 instructions, so we simply need to search back before
12939 FROM_INSN for the previous use or set of this register,
12940 then alter the notes there appropriately.
12942 If the register is used as an input in I3, it dies there.
12943 Similarly for I2, if it is nonzero and adjacent to I3.
12945 If the register is not used as an input in either I3 or I2
12946 and it is not one of the registers we were supposed to eliminate,
12947 there are two possibilities. We might have a non-adjacent I2
12948 or we might have somehow eliminated an additional register
12949 from a computation. For example, we might have had A & B where
12950 we discover that B will always be zero. In this case we will
12951 eliminate the reference to A.
12953 In both cases, we must search to see if we can find a previous
12954 use of A and put the death note there. */
12956 if (from_insn
12957 && from_insn == i2mod
12958 && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
12959 tem = from_insn;
12960 else
12962 if (from_insn
12963 && CALL_P (from_insn)
12964 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12965 place = from_insn;
12966 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12967 place = i3;
12968 else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
12969 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12970 place = i2;
12971 else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12972 && !(i2mod
12973 && reg_overlap_mentioned_p (XEXP (note, 0),
12974 i2mod_old_rhs)))
12975 || rtx_equal_p (XEXP (note, 0), elim_i1))
12976 break;
12977 tem = i3;
12980 if (place == 0)
12982 basic_block bb = this_basic_block;
12984 for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
12986 if (!NONDEBUG_INSN_P (tem))
12988 if (tem == BB_HEAD (bb))
12989 break;
12990 continue;
12993 /* If the register is being set at TEM, see if that is all
12994 TEM is doing. If so, delete TEM. Otherwise, make this
12995 into a REG_UNUSED note instead. Don't delete sets to
12996 global register vars. */
12997 if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12998 || !global_regs[REGNO (XEXP (note, 0))])
12999 && reg_set_p (XEXP (note, 0), PATTERN (tem)))
13001 rtx set = single_set (tem);
13002 rtx inner_dest = 0;
13003 #ifdef HAVE_cc0
13004 rtx cc0_setter = NULL_RTX;
13005 #endif
13007 if (set != 0)
13008 for (inner_dest = SET_DEST (set);
13009 (GET_CODE (inner_dest) == STRICT_LOW_PART
13010 || GET_CODE (inner_dest) == SUBREG
13011 || GET_CODE (inner_dest) == ZERO_EXTRACT);
13012 inner_dest = XEXP (inner_dest, 0))
13015 /* Verify that it was the set, and not a clobber that
13016 modified the register.
13018 CC0 targets must be careful to maintain setter/user
13019 pairs. If we cannot delete the setter due to side
13020 effects, mark the user with an UNUSED note instead
13021 of deleting it. */
13023 if (set != 0 && ! side_effects_p (SET_SRC (set))
13024 && rtx_equal_p (XEXP (note, 0), inner_dest)
13025 #ifdef HAVE_cc0
13026 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
13027 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
13028 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
13029 #endif
13032 /* Move the notes and links of TEM elsewhere.
13033 This might delete other dead insns recursively.
13034 First set the pattern to something that won't use
13035 any register. */
13036 rtx old_notes = REG_NOTES (tem);
13038 PATTERN (tem) = pc_rtx;
13039 REG_NOTES (tem) = NULL;
13041 distribute_notes (old_notes, tem, tem, NULL_RTX,
13042 NULL_RTX, NULL_RTX);
13043 distribute_links (LOG_LINKS (tem));
13045 SET_INSN_DELETED (tem);
13046 if (tem == i2)
13047 i2 = NULL_RTX;
13049 #ifdef HAVE_cc0
13050 /* Delete the setter too. */
13051 if (cc0_setter)
13053 PATTERN (cc0_setter) = pc_rtx;
13054 old_notes = REG_NOTES (cc0_setter);
13055 REG_NOTES (cc0_setter) = NULL;
13057 distribute_notes (old_notes, cc0_setter,
13058 cc0_setter, NULL_RTX,
13059 NULL_RTX, NULL_RTX);
13060 distribute_links (LOG_LINKS (cc0_setter));
13062 SET_INSN_DELETED (cc0_setter);
13063 if (cc0_setter == i2)
13064 i2 = NULL_RTX;
13066 #endif
13068 else
13070 PUT_REG_NOTE_KIND (note, REG_UNUSED);
13072 /* If there isn't already a REG_UNUSED note, put one
13073 here. Do not place a REG_DEAD note, even if
13074 the register is also used here; that would not
13075 match the algorithm used in lifetime analysis
13076 and can cause the consistency check in the
13077 scheduler to fail. */
13078 if (! find_regno_note (tem, REG_UNUSED,
13079 REGNO (XEXP (note, 0))))
13080 place = tem;
13081 break;
13084 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
13085 || (CALL_P (tem)
13086 && find_reg_fusage (tem, USE, XEXP (note, 0))))
13088 place = tem;
13090 /* If we are doing a 3->2 combination, and we have a
13091 register which formerly died in i3 and was not used
13092 by i2, which now no longer dies in i3 and is used in
13093 i2 but does not die in i2, and place is between i2
13094 and i3, then we may need to move a link from place to
13095 i2. */
13096 if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
13097 && from_insn
13098 && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
13099 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13101 rtx links = LOG_LINKS (place);
13102 LOG_LINKS (place) = 0;
13103 distribute_links (links);
13105 break;
13108 if (tem == BB_HEAD (bb))
13109 break;
13114 /* If the register is set or already dead at PLACE, we needn't do
13115 anything with this note if it is still a REG_DEAD note.
13116 We check here if it is set at all, not if is it totally replaced,
13117 which is what `dead_or_set_p' checks, so also check for it being
13118 set partially. */
13120 if (place && REG_NOTE_KIND (note) == REG_DEAD)
13122 unsigned int regno = REGNO (XEXP (note, 0));
13123 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
13125 if (dead_or_set_p (place, XEXP (note, 0))
13126 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13128 /* Unless the register previously died in PLACE, clear
13129 last_death. [I no longer understand why this is
13130 being done.] */
13131 if (rsp->last_death != place)
13132 rsp->last_death = 0;
13133 place = 0;
13135 else
13136 rsp->last_death = place;
13138 /* If this is a death note for a hard reg that is occupying
13139 multiple registers, ensure that we are still using all
13140 parts of the object. If we find a piece of the object
13141 that is unused, we must arrange for an appropriate REG_DEAD
13142 note to be added for it. However, we can't just emit a USE
13143 and tag the note to it, since the register might actually
13144 be dead; so we recourse, and the recursive call then finds
13145 the previous insn that used this register. */
13147 if (place && regno < FIRST_PSEUDO_REGISTER
13148 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
13150 unsigned int endregno = END_HARD_REGNO (XEXP (note, 0));
13151 int all_used = 1;
13152 unsigned int i;
13154 for (i = regno; i < endregno; i++)
13155 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13156 && ! find_regno_fusage (place, USE, i))
13157 || dead_or_set_regno_p (place, i))
13158 all_used = 0;
13160 if (! all_used)
13162 /* Put only REG_DEAD notes for pieces that are
13163 not already dead or set. */
13165 for (i = regno; i < endregno;
13166 i += hard_regno_nregs[i][reg_raw_mode[i]])
13168 rtx piece = regno_reg_rtx[i];
13169 basic_block bb = this_basic_block;
13171 if (! dead_or_set_p (place, piece)
13172 && ! reg_bitfield_target_p (piece,
13173 PATTERN (place)))
13175 rtx new_note = alloc_reg_note (REG_DEAD, piece,
13176 NULL_RTX);
13178 distribute_notes (new_note, place, place,
13179 NULL_RTX, NULL_RTX, NULL_RTX);
13181 else if (! refers_to_regno_p (i, i + 1,
13182 PATTERN (place), 0)
13183 && ! find_regno_fusage (place, USE, i))
13184 for (tem = PREV_INSN (place); ;
13185 tem = PREV_INSN (tem))
13187 if (!NONDEBUG_INSN_P (tem))
13189 if (tem == BB_HEAD (bb))
13190 break;
13191 continue;
13193 if (dead_or_set_p (tem, piece)
13194 || reg_bitfield_target_p (piece,
13195 PATTERN (tem)))
13197 add_reg_note (tem, REG_UNUSED, piece);
13198 break;
13204 place = 0;
13208 break;
13210 default:
13211 /* Any other notes should not be present at this point in the
13212 compilation. */
13213 gcc_unreachable ();
13216 if (place)
13218 XEXP (note, 1) = REG_NOTES (place);
13219 REG_NOTES (place) = note;
13222 if (place2)
13223 add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0));
13227 /* Similarly to above, distribute the LOG_LINKS that used to be present on
13228 I3, I2, and I1 to new locations. This is also called to add a link
13229 pointing at I3 when I3's destination is changed. */
13231 static void
13232 distribute_links (rtx links)
13234 rtx link, next_link;
13236 for (link = links; link; link = next_link)
13238 rtx place = 0;
13239 rtx insn;
13240 rtx set, reg;
13242 next_link = XEXP (link, 1);
13244 /* If the insn that this link points to is a NOTE or isn't a single
13245 set, ignore it. In the latter case, it isn't clear what we
13246 can do other than ignore the link, since we can't tell which
13247 register it was for. Such links wouldn't be used by combine
13248 anyway.
13250 It is not possible for the destination of the target of the link to
13251 have been changed by combine. The only potential of this is if we
13252 replace I3, I2, and I1 by I3 and I2. But in that case the
13253 destination of I2 also remains unchanged. */
13255 if (NOTE_P (XEXP (link, 0))
13256 || (set = single_set (XEXP (link, 0))) == 0)
13257 continue;
13259 reg = SET_DEST (set);
13260 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13261 || GET_CODE (reg) == STRICT_LOW_PART)
13262 reg = XEXP (reg, 0);
13264 /* A LOG_LINK is defined as being placed on the first insn that uses
13265 a register and points to the insn that sets the register. Start
13266 searching at the next insn after the target of the link and stop
13267 when we reach a set of the register or the end of the basic block.
13269 Note that this correctly handles the link that used to point from
13270 I3 to I2. Also note that not much searching is typically done here
13271 since most links don't point very far away. */
13273 for (insn = NEXT_INSN (XEXP (link, 0));
13274 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13275 || BB_HEAD (this_basic_block->next_bb) != insn));
13276 insn = NEXT_INSN (insn))
13277 if (DEBUG_INSN_P (insn))
13278 continue;
13279 else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13281 if (reg_referenced_p (reg, PATTERN (insn)))
13282 place = insn;
13283 break;
13285 else if (CALL_P (insn)
13286 && find_reg_fusage (insn, USE, reg))
13288 place = insn;
13289 break;
13291 else if (INSN_P (insn) && reg_set_p (reg, insn))
13292 break;
13294 /* If we found a place to put the link, place it there unless there
13295 is already a link to the same insn as LINK at that point. */
13297 if (place)
13299 rtx link2;
13301 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13302 if (XEXP (link2, 0) == XEXP (link, 0))
13303 break;
13305 if (link2 == 0)
13307 XEXP (link, 1) = LOG_LINKS (place);
13308 LOG_LINKS (place) = link;
13310 /* Set added_links_insn to the earliest insn we added a
13311 link to. */
13312 if (added_links_insn == 0
13313 || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
13314 added_links_insn = place;
13320 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
13321 Check whether the expression pointer to by LOC is a register or
13322 memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
13323 Otherwise return zero. */
13325 static int
13326 unmentioned_reg_p_1 (rtx *loc, void *expr)
13328 rtx x = *loc;
13330 if (x != NULL_RTX
13331 && (REG_P (x) || MEM_P (x))
13332 && ! reg_mentioned_p (x, (rtx) expr))
13333 return 1;
13334 return 0;
13337 /* Check for any register or memory mentioned in EQUIV that is not
13338 mentioned in EXPR. This is used to restrict EQUIV to "specializations"
13339 of EXPR where some registers may have been replaced by constants. */
13341 static bool
13342 unmentioned_reg_p (rtx equiv, rtx expr)
13344 return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
13347 void
13348 dump_combine_stats (FILE *file)
13350 fprintf
13351 (file,
13352 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13353 combine_attempts, combine_merges, combine_extras, combine_successes);
13356 void
13357 dump_combine_total_stats (FILE *file)
13359 fprintf
13360 (file,
13361 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13362 total_attempts, total_merges, total_extras, total_successes);
13365 static bool
13366 gate_handle_combine (void)
13368 return (optimize > 0);
13371 /* Try combining insns through substitution. */
13372 static unsigned int
13373 rest_of_handle_combine (void)
13375 int rebuild_jump_labels_after_combine;
13377 df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
13378 df_note_add_problem ();
13379 df_analyze ();
13381 regstat_init_n_sets_and_refs ();
13383 rebuild_jump_labels_after_combine
13384 = combine_instructions (get_insns (), max_reg_num ());
13386 /* Combining insns may have turned an indirect jump into a
13387 direct jump. Rebuild the JUMP_LABEL fields of jumping
13388 instructions. */
13389 if (rebuild_jump_labels_after_combine)
13391 timevar_push (TV_JUMP);
13392 rebuild_jump_labels (get_insns ());
13393 cleanup_cfg (0);
13394 timevar_pop (TV_JUMP);
13397 regstat_free_n_sets_and_refs ();
13398 return 0;
13401 struct rtl_opt_pass pass_combine =
13404 RTL_PASS,
13405 "combine", /* name */
13406 gate_handle_combine, /* gate */
13407 rest_of_handle_combine, /* execute */
13408 NULL, /* sub */
13409 NULL, /* next */
13410 0, /* static_pass_number */
13411 TV_COMBINE, /* tv_id */
13412 PROP_cfglayout, /* properties_required */
13413 0, /* properties_provided */
13414 0, /* properties_destroyed */
13415 0, /* todo_flags_start */
13416 TODO_dump_func |
13417 TODO_df_finish | TODO_verify_rtl_sharing |
13418 TODO_ggc_collect, /* todo_flags_finish */