* config/xtensa/xtensa.c (constantpool_mem_p): Skip over SUBREGs.
[official-gcc.git] / gcc / fold-const.c
blobc147efafb68bbf4d19b9681944fe6384d4190d7e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "ggc.h"
63 #include "hashtab.h"
64 #include "langhooks.h"
65 #include "md5.h"
67 /* Non-zero if we are folding constants inside an initializer; zero
68 otherwise. */
69 int folding_initializer = 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
75 COMPCODE_FALSE = 0,
76 COMPCODE_LT = 1,
77 COMPCODE_EQ = 2,
78 COMPCODE_LE = 3,
79 COMPCODE_GT = 4,
80 COMPCODE_LTGT = 5,
81 COMPCODE_GE = 6,
82 COMPCODE_ORD = 7,
83 COMPCODE_UNORD = 8,
84 COMPCODE_UNLT = 9,
85 COMPCODE_UNEQ = 10,
86 COMPCODE_UNLE = 11,
87 COMPCODE_UNGT = 12,
88 COMPCODE_NE = 13,
89 COMPCODE_UNGE = 14,
90 COMPCODE_TRUE = 15
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
133 static int multiple_of_p (tree, tree, tree);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
250 else
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 *lv = l1;
261 *hv = h1;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
287 int sign_extended_type;
288 bool overflow;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
335 *lv = l;
336 *hv = h;
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 if (l1 == 0)
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
359 else
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
394 carry = 0;
395 for (j = 0; j < 4; j++)
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
405 prod[i + 4] = carry;
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 if (h2 < 0)
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
443 if (count < 0)
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
464 else
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
504 unsigned HOST_WIDE_INT signmask;
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
520 else if (count >= HOST_BITS_PER_WIDE_INT)
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 else
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
536 *hv = signmask;
537 *lv = signmask;
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 else
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
589 count %= prec;
590 if (count < 0)
591 count += prec;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
636 if (hnum < 0)
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
644 if (hden < 0)
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
687 else
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
698 den_hi_sig = i;
699 break;
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
727 num_hi_sig = 4;
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
786 decode (quo, lquo, hquo);
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 switch (code)
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
813 else
814 return overflow;
815 break;
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
826 break;
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
861 else
862 return overflow;
864 break;
866 default:
867 gcc_unreachable ();
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
901 return build_int_cst_wide (type, quol, quoh);
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
907 static bool
908 negate_mathfn_p (enum built_in_function code)
910 switch (code)
912 CASE_FLT_FN (BUILT_IN_ASIN):
913 CASE_FLT_FN (BUILT_IN_ASINH):
914 CASE_FLT_FN (BUILT_IN_ATAN):
915 CASE_FLT_FN (BUILT_IN_ATANH):
916 CASE_FLT_FN (BUILT_IN_CASIN):
917 CASE_FLT_FN (BUILT_IN_CASINH):
918 CASE_FLT_FN (BUILT_IN_CATAN):
919 CASE_FLT_FN (BUILT_IN_CATANH):
920 CASE_FLT_FN (BUILT_IN_CBRT):
921 CASE_FLT_FN (BUILT_IN_CPROJ):
922 CASE_FLT_FN (BUILT_IN_CSIN):
923 CASE_FLT_FN (BUILT_IN_CSINH):
924 CASE_FLT_FN (BUILT_IN_CTAN):
925 CASE_FLT_FN (BUILT_IN_CTANH):
926 CASE_FLT_FN (BUILT_IN_ERF):
927 CASE_FLT_FN (BUILT_IN_LLROUND):
928 CASE_FLT_FN (BUILT_IN_LROUND):
929 CASE_FLT_FN (BUILT_IN_ROUND):
930 CASE_FLT_FN (BUILT_IN_SIN):
931 CASE_FLT_FN (BUILT_IN_SINH):
932 CASE_FLT_FN (BUILT_IN_TAN):
933 CASE_FLT_FN (BUILT_IN_TANH):
934 CASE_FLT_FN (BUILT_IN_TRUNC):
935 return true;
937 CASE_FLT_FN (BUILT_IN_LLRINT):
938 CASE_FLT_FN (BUILT_IN_LRINT):
939 CASE_FLT_FN (BUILT_IN_NEARBYINT):
940 CASE_FLT_FN (BUILT_IN_RINT):
941 return !flag_rounding_math;
943 default:
944 break;
946 return false;
949 /* Check whether we may negate an integer constant T without causing
950 overflow. */
952 bool
953 may_negate_without_overflow_p (tree t)
955 unsigned HOST_WIDE_INT val;
956 unsigned int prec;
957 tree type;
959 gcc_assert (TREE_CODE (t) == INTEGER_CST);
961 type = TREE_TYPE (t);
962 if (TYPE_UNSIGNED (type))
963 return false;
965 prec = TYPE_PRECISION (type);
966 if (prec > HOST_BITS_PER_WIDE_INT)
968 if (TREE_INT_CST_LOW (t) != 0)
969 return true;
970 prec -= HOST_BITS_PER_WIDE_INT;
971 val = TREE_INT_CST_HIGH (t);
973 else
974 val = TREE_INT_CST_LOW (t);
975 if (prec < HOST_BITS_PER_WIDE_INT)
976 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
977 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
980 /* Determine whether an expression T can be cheaply negated using
981 the function negate_expr without introducing undefined overflow. */
983 static bool
984 negate_expr_p (tree t)
986 tree type;
988 if (t == 0)
989 return false;
991 type = TREE_TYPE (t);
993 STRIP_SIGN_NOPS (t);
994 switch (TREE_CODE (t))
996 case INTEGER_CST:
997 if (TYPE_OVERFLOW_WRAPS (type))
998 return true;
1000 /* Check that -CST will not overflow type. */
1001 return may_negate_without_overflow_p (t);
1002 case BIT_NOT_EXPR:
1003 return (INTEGRAL_TYPE_P (type)
1004 && TYPE_OVERFLOW_WRAPS (type));
1006 case REAL_CST:
1007 case NEGATE_EXPR:
1008 return true;
1010 case COMPLEX_CST:
1011 return negate_expr_p (TREE_REALPART (t))
1012 && negate_expr_p (TREE_IMAGPART (t));
1014 case COMPLEX_EXPR:
1015 return negate_expr_p (TREE_OPERAND (t, 0))
1016 && negate_expr_p (TREE_OPERAND (t, 1));
1018 case PLUS_EXPR:
1019 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1020 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1021 return false;
1022 /* -(A + B) -> (-B) - A. */
1023 if (negate_expr_p (TREE_OPERAND (t, 1))
1024 && reorder_operands_p (TREE_OPERAND (t, 0),
1025 TREE_OPERAND (t, 1)))
1026 return true;
1027 /* -(A + B) -> (-A) - B. */
1028 return negate_expr_p (TREE_OPERAND (t, 0));
1030 case MINUS_EXPR:
1031 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1032 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1033 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1034 && reorder_operands_p (TREE_OPERAND (t, 0),
1035 TREE_OPERAND (t, 1));
1037 case MULT_EXPR:
1038 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1039 break;
1041 /* Fall through. */
1043 case RDIV_EXPR:
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1045 return negate_expr_p (TREE_OPERAND (t, 1))
1046 || negate_expr_p (TREE_OPERAND (t, 0));
1047 break;
1049 case TRUNC_DIV_EXPR:
1050 case ROUND_DIV_EXPR:
1051 case FLOOR_DIV_EXPR:
1052 case CEIL_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1055 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1056 break;
1057 return negate_expr_p (TREE_OPERAND (t, 1))
1058 || negate_expr_p (TREE_OPERAND (t, 0));
1060 case NOP_EXPR:
1061 /* Negate -((double)float) as (double)(-float). */
1062 if (TREE_CODE (type) == REAL_TYPE)
1064 tree tem = strip_float_extensions (t);
1065 if (tem != t)
1066 return negate_expr_p (tem);
1068 break;
1070 case CALL_EXPR:
1071 /* Negate -f(x) as f(-x). */
1072 if (negate_mathfn_p (builtin_mathfn_code (t)))
1073 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1074 break;
1076 case RSHIFT_EXPR:
1077 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1078 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1080 tree op1 = TREE_OPERAND (t, 1);
1081 if (TREE_INT_CST_HIGH (op1) == 0
1082 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1083 == TREE_INT_CST_LOW (op1))
1084 return true;
1086 break;
1088 default:
1089 break;
1091 return false;
1094 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1095 simplification is possible.
1096 If negate_expr_p would return true for T, NULL_TREE will never be
1097 returned. */
1099 static tree
1100 fold_negate_expr (tree t)
1102 tree type = TREE_TYPE (t);
1103 tree tem;
1105 switch (TREE_CODE (t))
1107 /* Convert - (~A) to A + 1. */
1108 case BIT_NOT_EXPR:
1109 if (INTEGRAL_TYPE_P (type))
1110 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1111 build_int_cst (type, 1));
1112 break;
1114 case INTEGER_CST:
1115 tem = fold_negate_const (t, type);
1116 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1117 || !TYPE_OVERFLOW_TRAPS (type))
1118 return tem;
1119 break;
1121 case REAL_CST:
1122 tem = fold_negate_const (t, type);
1123 /* Two's complement FP formats, such as c4x, may overflow. */
1124 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1125 return tem;
1126 break;
1128 case COMPLEX_CST:
1130 tree rpart = negate_expr (TREE_REALPART (t));
1131 tree ipart = negate_expr (TREE_IMAGPART (t));
1133 if ((TREE_CODE (rpart) == REAL_CST
1134 && TREE_CODE (ipart) == REAL_CST)
1135 || (TREE_CODE (rpart) == INTEGER_CST
1136 && TREE_CODE (ipart) == INTEGER_CST))
1137 return build_complex (type, rpart, ipart);
1139 break;
1141 case COMPLEX_EXPR:
1142 if (negate_expr_p (t))
1143 return fold_build2 (COMPLEX_EXPR, type,
1144 fold_negate_expr (TREE_OPERAND (t, 0)),
1145 fold_negate_expr (TREE_OPERAND (t, 1)));
1146 break;
1148 case NEGATE_EXPR:
1149 return TREE_OPERAND (t, 0);
1151 case PLUS_EXPR:
1152 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t, 1))
1157 && reorder_operands_p (TREE_OPERAND (t, 0),
1158 TREE_OPERAND (t, 1)))
1160 tem = negate_expr (TREE_OPERAND (t, 1));
1161 return fold_build2 (MINUS_EXPR, type,
1162 tem, TREE_OPERAND (t, 0));
1165 /* -(A + B) -> (-A) - B. */
1166 if (negate_expr_p (TREE_OPERAND (t, 0)))
1168 tem = negate_expr (TREE_OPERAND (t, 0));
1169 return fold_build2 (MINUS_EXPR, type,
1170 tem, TREE_OPERAND (t, 1));
1173 break;
1175 case MINUS_EXPR:
1176 /* - (A - B) -> B - A */
1177 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1178 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1179 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1180 return fold_build2 (MINUS_EXPR, type,
1181 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1182 break;
1184 case MULT_EXPR:
1185 if (TYPE_UNSIGNED (type))
1186 break;
1188 /* Fall through. */
1190 case RDIV_EXPR:
1191 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1193 tem = TREE_OPERAND (t, 1);
1194 if (negate_expr_p (tem))
1195 return fold_build2 (TREE_CODE (t), type,
1196 TREE_OPERAND (t, 0), negate_expr (tem));
1197 tem = TREE_OPERAND (t, 0);
1198 if (negate_expr_p (tem))
1199 return fold_build2 (TREE_CODE (t), type,
1200 negate_expr (tem), TREE_OPERAND (t, 1));
1202 break;
1204 case TRUNC_DIV_EXPR:
1205 case ROUND_DIV_EXPR:
1206 case FLOOR_DIV_EXPR:
1207 case CEIL_DIV_EXPR:
1208 case EXACT_DIV_EXPR:
1209 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1211 tem = TREE_OPERAND (t, 1);
1212 if (negate_expr_p (tem))
1213 return fold_build2 (TREE_CODE (t), type,
1214 TREE_OPERAND (t, 0), negate_expr (tem));
1215 tem = TREE_OPERAND (t, 0);
1216 if (negate_expr_p (tem))
1217 return fold_build2 (TREE_CODE (t), type,
1218 negate_expr (tem), TREE_OPERAND (t, 1));
1220 break;
1222 case NOP_EXPR:
1223 /* Convert -((double)float) into (double)(-float). */
1224 if (TREE_CODE (type) == REAL_TYPE)
1226 tem = strip_float_extensions (t);
1227 if (tem != t && negate_expr_p (tem))
1228 return negate_expr (tem);
1230 break;
1232 case CALL_EXPR:
1233 /* Negate -f(x) as f(-x). */
1234 if (negate_mathfn_p (builtin_mathfn_code (t))
1235 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1237 tree fndecl, arg, arglist;
1239 fndecl = get_callee_fndecl (t);
1240 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1241 arglist = build_tree_list (NULL_TREE, arg);
1242 return build_function_call_expr (fndecl, arglist);
1244 break;
1246 case RSHIFT_EXPR:
1247 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1248 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1250 tree op1 = TREE_OPERAND (t, 1);
1251 if (TREE_INT_CST_HIGH (op1) == 0
1252 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1253 == TREE_INT_CST_LOW (op1))
1255 tree ntype = TYPE_UNSIGNED (type)
1256 ? lang_hooks.types.signed_type (type)
1257 : lang_hooks.types.unsigned_type (type);
1258 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1259 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1260 return fold_convert (type, temp);
1263 break;
1265 default:
1266 break;
1269 return NULL_TREE;
1272 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1273 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1274 return NULL_TREE. */
1276 static tree
1277 negate_expr (tree t)
1279 tree type, tem;
1281 if (t == NULL_TREE)
1282 return NULL_TREE;
1284 type = TREE_TYPE (t);
1285 STRIP_SIGN_NOPS (t);
1287 tem = fold_negate_expr (t);
1288 if (!tem)
1289 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1290 return fold_convert (type, tem);
1293 /* Split a tree IN into a constant, literal and variable parts that could be
1294 combined with CODE to make IN. "constant" means an expression with
1295 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1296 commutative arithmetic operation. Store the constant part into *CONP,
1297 the literal in *LITP and return the variable part. If a part isn't
1298 present, set it to null. If the tree does not decompose in this way,
1299 return the entire tree as the variable part and the other parts as null.
1301 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1302 case, we negate an operand that was subtracted. Except if it is a
1303 literal for which we use *MINUS_LITP instead.
1305 If NEGATE_P is true, we are negating all of IN, again except a literal
1306 for which we use *MINUS_LITP instead.
1308 If IN is itself a literal or constant, return it as appropriate.
1310 Note that we do not guarantee that any of the three values will be the
1311 same type as IN, but they will have the same signedness and mode. */
1313 static tree
1314 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1315 tree *minus_litp, int negate_p)
1317 tree var = 0;
1319 *conp = 0;
1320 *litp = 0;
1321 *minus_litp = 0;
1323 /* Strip any conversions that don't change the machine mode or signedness. */
1324 STRIP_SIGN_NOPS (in);
1326 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1327 *litp = in;
1328 else if (TREE_CODE (in) == code
1329 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1330 /* We can associate addition and subtraction together (even
1331 though the C standard doesn't say so) for integers because
1332 the value is not affected. For reals, the value might be
1333 affected, so we can't. */
1334 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1335 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1337 tree op0 = TREE_OPERAND (in, 0);
1338 tree op1 = TREE_OPERAND (in, 1);
1339 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1340 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1342 /* First see if either of the operands is a literal, then a constant. */
1343 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1344 *litp = op0, op0 = 0;
1345 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1346 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1348 if (op0 != 0 && TREE_CONSTANT (op0))
1349 *conp = op0, op0 = 0;
1350 else if (op1 != 0 && TREE_CONSTANT (op1))
1351 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1353 /* If we haven't dealt with either operand, this is not a case we can
1354 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1355 if (op0 != 0 && op1 != 0)
1356 var = in;
1357 else if (op0 != 0)
1358 var = op0;
1359 else
1360 var = op1, neg_var_p = neg1_p;
1362 /* Now do any needed negations. */
1363 if (neg_litp_p)
1364 *minus_litp = *litp, *litp = 0;
1365 if (neg_conp_p)
1366 *conp = negate_expr (*conp);
1367 if (neg_var_p)
1368 var = negate_expr (var);
1370 else if (TREE_CONSTANT (in))
1371 *conp = in;
1372 else
1373 var = in;
1375 if (negate_p)
1377 if (*litp)
1378 *minus_litp = *litp, *litp = 0;
1379 else if (*minus_litp)
1380 *litp = *minus_litp, *minus_litp = 0;
1381 *conp = negate_expr (*conp);
1382 var = negate_expr (var);
1385 return var;
1388 /* Re-associate trees split by the above function. T1 and T2 are either
1389 expressions to associate or null. Return the new expression, if any. If
1390 we build an operation, do it in TYPE and with CODE. */
1392 static tree
1393 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1395 if (t1 == 0)
1396 return t2;
1397 else if (t2 == 0)
1398 return t1;
1400 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1401 try to fold this since we will have infinite recursion. But do
1402 deal with any NEGATE_EXPRs. */
1403 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1404 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1406 if (code == PLUS_EXPR)
1408 if (TREE_CODE (t1) == NEGATE_EXPR)
1409 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1410 fold_convert (type, TREE_OPERAND (t1, 0)));
1411 else if (TREE_CODE (t2) == NEGATE_EXPR)
1412 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1413 fold_convert (type, TREE_OPERAND (t2, 0)));
1414 else if (integer_zerop (t2))
1415 return fold_convert (type, t1);
1417 else if (code == MINUS_EXPR)
1419 if (integer_zerop (t2))
1420 return fold_convert (type, t1);
1423 return build2 (code, type, fold_convert (type, t1),
1424 fold_convert (type, t2));
1427 return fold_build2 (code, type, fold_convert (type, t1),
1428 fold_convert (type, t2));
1431 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1432 for use in int_const_binop, size_binop and size_diffop. */
1434 static bool
1435 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1437 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1438 return false;
1439 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1440 return false;
1442 switch (code)
1444 case LSHIFT_EXPR:
1445 case RSHIFT_EXPR:
1446 case LROTATE_EXPR:
1447 case RROTATE_EXPR:
1448 return true;
1450 default:
1451 break;
1454 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1455 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1456 && TYPE_MODE (type1) == TYPE_MODE (type2);
1460 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1461 to produce a new constant. Return NULL_TREE if we don't know how
1462 to evaluate CODE at compile-time.
1464 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1466 tree
1467 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1469 unsigned HOST_WIDE_INT int1l, int2l;
1470 HOST_WIDE_INT int1h, int2h;
1471 unsigned HOST_WIDE_INT low;
1472 HOST_WIDE_INT hi;
1473 unsigned HOST_WIDE_INT garbagel;
1474 HOST_WIDE_INT garbageh;
1475 tree t;
1476 tree type = TREE_TYPE (arg1);
1477 int uns = TYPE_UNSIGNED (type);
1478 int is_sizetype
1479 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1480 int overflow = 0;
1482 int1l = TREE_INT_CST_LOW (arg1);
1483 int1h = TREE_INT_CST_HIGH (arg1);
1484 int2l = TREE_INT_CST_LOW (arg2);
1485 int2h = TREE_INT_CST_HIGH (arg2);
1487 switch (code)
1489 case BIT_IOR_EXPR:
1490 low = int1l | int2l, hi = int1h | int2h;
1491 break;
1493 case BIT_XOR_EXPR:
1494 low = int1l ^ int2l, hi = int1h ^ int2h;
1495 break;
1497 case BIT_AND_EXPR:
1498 low = int1l & int2l, hi = int1h & int2h;
1499 break;
1501 case RSHIFT_EXPR:
1502 int2l = -int2l;
1503 case LSHIFT_EXPR:
1504 /* It's unclear from the C standard whether shifts can overflow.
1505 The following code ignores overflow; perhaps a C standard
1506 interpretation ruling is needed. */
1507 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1508 &low, &hi, !uns);
1509 break;
1511 case RROTATE_EXPR:
1512 int2l = - int2l;
1513 case LROTATE_EXPR:
1514 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1515 &low, &hi);
1516 break;
1518 case PLUS_EXPR:
1519 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1520 break;
1522 case MINUS_EXPR:
1523 neg_double (int2l, int2h, &low, &hi);
1524 add_double (int1l, int1h, low, hi, &low, &hi);
1525 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1526 break;
1528 case MULT_EXPR:
1529 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1530 break;
1532 case TRUNC_DIV_EXPR:
1533 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1534 case EXACT_DIV_EXPR:
1535 /* This is a shortcut for a common special case. */
1536 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1537 && !TREE_OVERFLOW (arg1)
1538 && !TREE_OVERFLOW (arg2)
1539 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1541 if (code == CEIL_DIV_EXPR)
1542 int1l += int2l - 1;
1544 low = int1l / int2l, hi = 0;
1545 break;
1548 /* ... fall through ... */
1550 case ROUND_DIV_EXPR:
1551 if (int2h == 0 && int2l == 0)
1552 return NULL_TREE;
1553 if (int2h == 0 && int2l == 1)
1555 low = int1l, hi = int1h;
1556 break;
1558 if (int1l == int2l && int1h == int2h
1559 && ! (int1l == 0 && int1h == 0))
1561 low = 1, hi = 0;
1562 break;
1564 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1565 &low, &hi, &garbagel, &garbageh);
1566 break;
1568 case TRUNC_MOD_EXPR:
1569 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1570 /* This is a shortcut for a common special case. */
1571 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1572 && !TREE_OVERFLOW (arg1)
1573 && !TREE_OVERFLOW (arg2)
1574 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1576 if (code == CEIL_MOD_EXPR)
1577 int1l += int2l - 1;
1578 low = int1l % int2l, hi = 0;
1579 break;
1582 /* ... fall through ... */
1584 case ROUND_MOD_EXPR:
1585 if (int2h == 0 && int2l == 0)
1586 return NULL_TREE;
1587 overflow = div_and_round_double (code, uns,
1588 int1l, int1h, int2l, int2h,
1589 &garbagel, &garbageh, &low, &hi);
1590 break;
1592 case MIN_EXPR:
1593 case MAX_EXPR:
1594 if (uns)
1595 low = (((unsigned HOST_WIDE_INT) int1h
1596 < (unsigned HOST_WIDE_INT) int2h)
1597 || (((unsigned HOST_WIDE_INT) int1h
1598 == (unsigned HOST_WIDE_INT) int2h)
1599 && int1l < int2l));
1600 else
1601 low = (int1h < int2h
1602 || (int1h == int2h && int1l < int2l));
1604 if (low == (code == MIN_EXPR))
1605 low = int1l, hi = int1h;
1606 else
1607 low = int2l, hi = int2h;
1608 break;
1610 default:
1611 return NULL_TREE;
1614 if (notrunc)
1616 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1618 /* Propagate overflow flags ourselves. */
1619 if (((!uns || is_sizetype) && overflow)
1620 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1622 t = copy_node (t);
1623 TREE_OVERFLOW (t) = 1;
1626 else
1627 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1628 ((!uns || is_sizetype) && overflow)
1629 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1631 return t;
1634 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1635 constant. We assume ARG1 and ARG2 have the same data type, or at least
1636 are the same kind of constant and the same machine mode. Return zero if
1637 combining the constants is not allowed in the current operating mode.
1639 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1641 static tree
1642 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1644 /* Sanity check for the recursive cases. */
1645 if (!arg1 || !arg2)
1646 return NULL_TREE;
1648 STRIP_NOPS (arg1);
1649 STRIP_NOPS (arg2);
1651 if (TREE_CODE (arg1) == INTEGER_CST)
1652 return int_const_binop (code, arg1, arg2, notrunc);
1654 if (TREE_CODE (arg1) == REAL_CST)
1656 enum machine_mode mode;
1657 REAL_VALUE_TYPE d1;
1658 REAL_VALUE_TYPE d2;
1659 REAL_VALUE_TYPE value;
1660 REAL_VALUE_TYPE result;
1661 bool inexact;
1662 tree t, type;
1664 /* The following codes are handled by real_arithmetic. */
1665 switch (code)
1667 case PLUS_EXPR:
1668 case MINUS_EXPR:
1669 case MULT_EXPR:
1670 case RDIV_EXPR:
1671 case MIN_EXPR:
1672 case MAX_EXPR:
1673 break;
1675 default:
1676 return NULL_TREE;
1679 d1 = TREE_REAL_CST (arg1);
1680 d2 = TREE_REAL_CST (arg2);
1682 type = TREE_TYPE (arg1);
1683 mode = TYPE_MODE (type);
1685 /* Don't perform operation if we honor signaling NaNs and
1686 either operand is a NaN. */
1687 if (HONOR_SNANS (mode)
1688 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1689 return NULL_TREE;
1691 /* Don't perform operation if it would raise a division
1692 by zero exception. */
1693 if (code == RDIV_EXPR
1694 && REAL_VALUES_EQUAL (d2, dconst0)
1695 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1696 return NULL_TREE;
1698 /* If either operand is a NaN, just return it. Otherwise, set up
1699 for floating-point trap; we return an overflow. */
1700 if (REAL_VALUE_ISNAN (d1))
1701 return arg1;
1702 else if (REAL_VALUE_ISNAN (d2))
1703 return arg2;
1705 inexact = real_arithmetic (&value, code, &d1, &d2);
1706 real_convert (&result, mode, &value);
1708 /* Don't constant fold this floating point operation if
1709 the result has overflowed and flag_trapping_math. */
1710 if (flag_trapping_math
1711 && MODE_HAS_INFINITIES (mode)
1712 && REAL_VALUE_ISINF (result)
1713 && !REAL_VALUE_ISINF (d1)
1714 && !REAL_VALUE_ISINF (d2))
1715 return NULL_TREE;
1717 /* Don't constant fold this floating point operation if the
1718 result may dependent upon the run-time rounding mode and
1719 flag_rounding_math is set, or if GCC's software emulation
1720 is unable to accurately represent the result. */
1721 if ((flag_rounding_math
1722 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1723 && !flag_unsafe_math_optimizations))
1724 && (inexact || !real_identical (&result, &value)))
1725 return NULL_TREE;
1727 t = build_real (type, result);
1729 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1730 return t;
1733 if (TREE_CODE (arg1) == COMPLEX_CST)
1735 tree type = TREE_TYPE (arg1);
1736 tree r1 = TREE_REALPART (arg1);
1737 tree i1 = TREE_IMAGPART (arg1);
1738 tree r2 = TREE_REALPART (arg2);
1739 tree i2 = TREE_IMAGPART (arg2);
1740 tree real, imag;
1742 switch (code)
1744 case PLUS_EXPR:
1745 case MINUS_EXPR:
1746 real = const_binop (code, r1, r2, notrunc);
1747 imag = const_binop (code, i1, i2, notrunc);
1748 break;
1750 case MULT_EXPR:
1751 real = const_binop (MINUS_EXPR,
1752 const_binop (MULT_EXPR, r1, r2, notrunc),
1753 const_binop (MULT_EXPR, i1, i2, notrunc),
1754 notrunc);
1755 imag = const_binop (PLUS_EXPR,
1756 const_binop (MULT_EXPR, r1, i2, notrunc),
1757 const_binop (MULT_EXPR, i1, r2, notrunc),
1758 notrunc);
1759 break;
1761 case RDIV_EXPR:
1763 tree magsquared
1764 = const_binop (PLUS_EXPR,
1765 const_binop (MULT_EXPR, r2, r2, notrunc),
1766 const_binop (MULT_EXPR, i2, i2, notrunc),
1767 notrunc);
1768 tree t1
1769 = const_binop (PLUS_EXPR,
1770 const_binop (MULT_EXPR, r1, r2, notrunc),
1771 const_binop (MULT_EXPR, i1, i2, notrunc),
1772 notrunc);
1773 tree t2
1774 = const_binop (MINUS_EXPR,
1775 const_binop (MULT_EXPR, i1, r2, notrunc),
1776 const_binop (MULT_EXPR, r1, i2, notrunc),
1777 notrunc);
1779 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1780 code = TRUNC_DIV_EXPR;
1782 real = const_binop (code, t1, magsquared, notrunc);
1783 imag = const_binop (code, t2, magsquared, notrunc);
1785 break;
1787 default:
1788 return NULL_TREE;
1791 if (real && imag)
1792 return build_complex (type, real, imag);
1795 return NULL_TREE;
1798 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1799 indicates which particular sizetype to create. */
1801 tree
1802 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1804 return build_int_cst (sizetype_tab[(int) kind], number);
1807 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1808 is a tree code. The type of the result is taken from the operands.
1809 Both must be equivalent integer types, ala int_binop_types_match_p.
1810 If the operands are constant, so is the result. */
1812 tree
1813 size_binop (enum tree_code code, tree arg0, tree arg1)
1815 tree type = TREE_TYPE (arg0);
1817 if (arg0 == error_mark_node || arg1 == error_mark_node)
1818 return error_mark_node;
1820 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1821 TREE_TYPE (arg1)));
1823 /* Handle the special case of two integer constants faster. */
1824 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1826 /* And some specific cases even faster than that. */
1827 if (code == PLUS_EXPR)
1829 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1830 return arg1;
1831 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1832 return arg0;
1834 else if (code == MINUS_EXPR)
1836 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1837 return arg0;
1839 else if (code == MULT_EXPR)
1841 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1842 return arg1;
1845 /* Handle general case of two integer constants. */
1846 return int_const_binop (code, arg0, arg1, 0);
1849 return fold_build2 (code, type, arg0, arg1);
1852 /* Given two values, either both of sizetype or both of bitsizetype,
1853 compute the difference between the two values. Return the value
1854 in signed type corresponding to the type of the operands. */
1856 tree
1857 size_diffop (tree arg0, tree arg1)
1859 tree type = TREE_TYPE (arg0);
1860 tree ctype;
1862 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1863 TREE_TYPE (arg1)));
1865 /* If the type is already signed, just do the simple thing. */
1866 if (!TYPE_UNSIGNED (type))
1867 return size_binop (MINUS_EXPR, arg0, arg1);
1869 if (type == sizetype)
1870 ctype = ssizetype;
1871 else if (type == bitsizetype)
1872 ctype = sbitsizetype;
1873 else
1874 ctype = lang_hooks.types.signed_type (type);
1876 /* If either operand is not a constant, do the conversions to the signed
1877 type and subtract. The hardware will do the right thing with any
1878 overflow in the subtraction. */
1879 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1880 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1881 fold_convert (ctype, arg1));
1883 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1884 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1885 overflow) and negate (which can't either). Special-case a result
1886 of zero while we're here. */
1887 if (tree_int_cst_equal (arg0, arg1))
1888 return build_int_cst (ctype, 0);
1889 else if (tree_int_cst_lt (arg1, arg0))
1890 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1891 else
1892 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1893 fold_convert (ctype, size_binop (MINUS_EXPR,
1894 arg1, arg0)));
1897 /* A subroutine of fold_convert_const handling conversions of an
1898 INTEGER_CST to another integer type. */
1900 static tree
1901 fold_convert_const_int_from_int (tree type, tree arg1)
1903 tree t;
1905 /* Given an integer constant, make new constant with new type,
1906 appropriately sign-extended or truncated. */
1907 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1908 TREE_INT_CST_HIGH (arg1),
1909 /* Don't set the overflow when
1910 converting a pointer */
1911 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1912 (TREE_INT_CST_HIGH (arg1) < 0
1913 && (TYPE_UNSIGNED (type)
1914 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1915 | TREE_OVERFLOW (arg1));
1917 return t;
1920 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1921 to an integer type. */
1923 static tree
1924 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1926 int overflow = 0;
1927 tree t;
1929 /* The following code implements the floating point to integer
1930 conversion rules required by the Java Language Specification,
1931 that IEEE NaNs are mapped to zero and values that overflow
1932 the target precision saturate, i.e. values greater than
1933 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1934 are mapped to INT_MIN. These semantics are allowed by the
1935 C and C++ standards that simply state that the behavior of
1936 FP-to-integer conversion is unspecified upon overflow. */
1938 HOST_WIDE_INT high, low;
1939 REAL_VALUE_TYPE r;
1940 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1942 switch (code)
1944 case FIX_TRUNC_EXPR:
1945 real_trunc (&r, VOIDmode, &x);
1946 break;
1948 default:
1949 gcc_unreachable ();
1952 /* If R is NaN, return zero and show we have an overflow. */
1953 if (REAL_VALUE_ISNAN (r))
1955 overflow = 1;
1956 high = 0;
1957 low = 0;
1960 /* See if R is less than the lower bound or greater than the
1961 upper bound. */
1963 if (! overflow)
1965 tree lt = TYPE_MIN_VALUE (type);
1966 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1967 if (REAL_VALUES_LESS (r, l))
1969 overflow = 1;
1970 high = TREE_INT_CST_HIGH (lt);
1971 low = TREE_INT_CST_LOW (lt);
1975 if (! overflow)
1977 tree ut = TYPE_MAX_VALUE (type);
1978 if (ut)
1980 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1981 if (REAL_VALUES_LESS (u, r))
1983 overflow = 1;
1984 high = TREE_INT_CST_HIGH (ut);
1985 low = TREE_INT_CST_LOW (ut);
1990 if (! overflow)
1991 REAL_VALUE_TO_INT (&low, &high, r);
1993 t = force_fit_type_double (type, low, high, -1,
1994 overflow | TREE_OVERFLOW (arg1));
1995 return t;
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2001 static tree
2002 fold_convert_const_real_from_real (tree type, tree arg1)
2004 REAL_VALUE_TYPE value;
2005 tree t;
2007 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2008 t = build_real (type, value);
2010 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2011 return t;
2014 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2015 type TYPE. If no simplification can be done return NULL_TREE. */
2017 static tree
2018 fold_convert_const (enum tree_code code, tree type, tree arg1)
2020 if (TREE_TYPE (arg1) == type)
2021 return arg1;
2023 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2025 if (TREE_CODE (arg1) == INTEGER_CST)
2026 return fold_convert_const_int_from_int (type, arg1);
2027 else if (TREE_CODE (arg1) == REAL_CST)
2028 return fold_convert_const_int_from_real (code, type, arg1);
2030 else if (TREE_CODE (type) == REAL_TYPE)
2032 if (TREE_CODE (arg1) == INTEGER_CST)
2033 return build_real_from_int_cst (type, arg1);
2034 if (TREE_CODE (arg1) == REAL_CST)
2035 return fold_convert_const_real_from_real (type, arg1);
2037 return NULL_TREE;
2040 /* Construct a vector of zero elements of vector type TYPE. */
2042 static tree
2043 build_zero_vector (tree type)
2045 tree elem, list;
2046 int i, units;
2048 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2049 units = TYPE_VECTOR_SUBPARTS (type);
2051 list = NULL_TREE;
2052 for (i = 0; i < units; i++)
2053 list = tree_cons (NULL_TREE, elem, list);
2054 return build_vector (type, list);
2057 /* Convert expression ARG to type TYPE. Used by the middle-end for
2058 simple conversions in preference to calling the front-end's convert. */
2060 tree
2061 fold_convert (tree type, tree arg)
2063 tree orig = TREE_TYPE (arg);
2064 tree tem;
2066 if (type == orig)
2067 return arg;
2069 if (TREE_CODE (arg) == ERROR_MARK
2070 || TREE_CODE (type) == ERROR_MARK
2071 || TREE_CODE (orig) == ERROR_MARK)
2072 return error_mark_node;
2074 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2075 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2076 TYPE_MAIN_VARIANT (orig)))
2077 return fold_build1 (NOP_EXPR, type, arg);
2079 switch (TREE_CODE (type))
2081 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2082 case POINTER_TYPE: case REFERENCE_TYPE:
2083 case OFFSET_TYPE:
2084 if (TREE_CODE (arg) == INTEGER_CST)
2086 tem = fold_convert_const (NOP_EXPR, type, arg);
2087 if (tem != NULL_TREE)
2088 return tem;
2090 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2091 || TREE_CODE (orig) == OFFSET_TYPE)
2092 return fold_build1 (NOP_EXPR, type, arg);
2093 if (TREE_CODE (orig) == COMPLEX_TYPE)
2095 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2096 return fold_convert (type, tem);
2098 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2099 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2100 return fold_build1 (NOP_EXPR, type, arg);
2102 case REAL_TYPE:
2103 if (TREE_CODE (arg) == INTEGER_CST)
2105 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2106 if (tem != NULL_TREE)
2107 return tem;
2109 else if (TREE_CODE (arg) == REAL_CST)
2111 tem = fold_convert_const (NOP_EXPR, type, arg);
2112 if (tem != NULL_TREE)
2113 return tem;
2116 switch (TREE_CODE (orig))
2118 case INTEGER_TYPE:
2119 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2120 case POINTER_TYPE: case REFERENCE_TYPE:
2121 return fold_build1 (FLOAT_EXPR, type, arg);
2123 case REAL_TYPE:
2124 return fold_build1 (NOP_EXPR, type, arg);
2126 case COMPLEX_TYPE:
2127 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2128 return fold_convert (type, tem);
2130 default:
2131 gcc_unreachable ();
2134 case COMPLEX_TYPE:
2135 switch (TREE_CODE (orig))
2137 case INTEGER_TYPE:
2138 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2139 case POINTER_TYPE: case REFERENCE_TYPE:
2140 case REAL_TYPE:
2141 return build2 (COMPLEX_EXPR, type,
2142 fold_convert (TREE_TYPE (type), arg),
2143 fold_convert (TREE_TYPE (type), integer_zero_node));
2144 case COMPLEX_TYPE:
2146 tree rpart, ipart;
2148 if (TREE_CODE (arg) == COMPLEX_EXPR)
2150 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2151 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2152 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2155 arg = save_expr (arg);
2156 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2157 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2158 rpart = fold_convert (TREE_TYPE (type), rpart);
2159 ipart = fold_convert (TREE_TYPE (type), ipart);
2160 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2163 default:
2164 gcc_unreachable ();
2167 case VECTOR_TYPE:
2168 if (integer_zerop (arg))
2169 return build_zero_vector (type);
2170 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2171 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2172 || TREE_CODE (orig) == VECTOR_TYPE);
2173 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2175 case VOID_TYPE:
2176 tem = fold_ignored_result (arg);
2177 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2178 return tem;
2179 return fold_build1 (NOP_EXPR, type, tem);
2181 default:
2182 gcc_unreachable ();
2186 /* Return false if expr can be assumed not to be an lvalue, true
2187 otherwise. */
2189 static bool
2190 maybe_lvalue_p (tree x)
2192 /* We only need to wrap lvalue tree codes. */
2193 switch (TREE_CODE (x))
2195 case VAR_DECL:
2196 case PARM_DECL:
2197 case RESULT_DECL:
2198 case LABEL_DECL:
2199 case FUNCTION_DECL:
2200 case SSA_NAME:
2202 case COMPONENT_REF:
2203 case INDIRECT_REF:
2204 case ALIGN_INDIRECT_REF:
2205 case MISALIGNED_INDIRECT_REF:
2206 case ARRAY_REF:
2207 case ARRAY_RANGE_REF:
2208 case BIT_FIELD_REF:
2209 case OBJ_TYPE_REF:
2211 case REALPART_EXPR:
2212 case IMAGPART_EXPR:
2213 case PREINCREMENT_EXPR:
2214 case PREDECREMENT_EXPR:
2215 case SAVE_EXPR:
2216 case TRY_CATCH_EXPR:
2217 case WITH_CLEANUP_EXPR:
2218 case COMPOUND_EXPR:
2219 case MODIFY_EXPR:
2220 case GIMPLE_MODIFY_STMT:
2221 case TARGET_EXPR:
2222 case COND_EXPR:
2223 case BIND_EXPR:
2224 case MIN_EXPR:
2225 case MAX_EXPR:
2226 break;
2228 default:
2229 /* Assume the worst for front-end tree codes. */
2230 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2231 break;
2232 return false;
2235 return true;
2238 /* Return an expr equal to X but certainly not valid as an lvalue. */
2240 tree
2241 non_lvalue (tree x)
2243 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2244 us. */
2245 if (in_gimple_form)
2246 return x;
2248 if (! maybe_lvalue_p (x))
2249 return x;
2250 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2253 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2254 Zero means allow extended lvalues. */
2256 int pedantic_lvalues;
2258 /* When pedantic, return an expr equal to X but certainly not valid as a
2259 pedantic lvalue. Otherwise, return X. */
2261 static tree
2262 pedantic_non_lvalue (tree x)
2264 if (pedantic_lvalues)
2265 return non_lvalue (x);
2266 else
2267 return x;
2270 /* Given a tree comparison code, return the code that is the logical inverse
2271 of the given code. It is not safe to do this for floating-point
2272 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2273 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2275 enum tree_code
2276 invert_tree_comparison (enum tree_code code, bool honor_nans)
2278 if (honor_nans && flag_trapping_math)
2279 return ERROR_MARK;
2281 switch (code)
2283 case EQ_EXPR:
2284 return NE_EXPR;
2285 case NE_EXPR:
2286 return EQ_EXPR;
2287 case GT_EXPR:
2288 return honor_nans ? UNLE_EXPR : LE_EXPR;
2289 case GE_EXPR:
2290 return honor_nans ? UNLT_EXPR : LT_EXPR;
2291 case LT_EXPR:
2292 return honor_nans ? UNGE_EXPR : GE_EXPR;
2293 case LE_EXPR:
2294 return honor_nans ? UNGT_EXPR : GT_EXPR;
2295 case LTGT_EXPR:
2296 return UNEQ_EXPR;
2297 case UNEQ_EXPR:
2298 return LTGT_EXPR;
2299 case UNGT_EXPR:
2300 return LE_EXPR;
2301 case UNGE_EXPR:
2302 return LT_EXPR;
2303 case UNLT_EXPR:
2304 return GE_EXPR;
2305 case UNLE_EXPR:
2306 return GT_EXPR;
2307 case ORDERED_EXPR:
2308 return UNORDERED_EXPR;
2309 case UNORDERED_EXPR:
2310 return ORDERED_EXPR;
2311 default:
2312 gcc_unreachable ();
2316 /* Similar, but return the comparison that results if the operands are
2317 swapped. This is safe for floating-point. */
2319 enum tree_code
2320 swap_tree_comparison (enum tree_code code)
2322 switch (code)
2324 case EQ_EXPR:
2325 case NE_EXPR:
2326 case ORDERED_EXPR:
2327 case UNORDERED_EXPR:
2328 case LTGT_EXPR:
2329 case UNEQ_EXPR:
2330 return code;
2331 case GT_EXPR:
2332 return LT_EXPR;
2333 case GE_EXPR:
2334 return LE_EXPR;
2335 case LT_EXPR:
2336 return GT_EXPR;
2337 case LE_EXPR:
2338 return GE_EXPR;
2339 case UNGT_EXPR:
2340 return UNLT_EXPR;
2341 case UNGE_EXPR:
2342 return UNLE_EXPR;
2343 case UNLT_EXPR:
2344 return UNGT_EXPR;
2345 case UNLE_EXPR:
2346 return UNGE_EXPR;
2347 default:
2348 gcc_unreachable ();
2353 /* Convert a comparison tree code from an enum tree_code representation
2354 into a compcode bit-based encoding. This function is the inverse of
2355 compcode_to_comparison. */
2357 static enum comparison_code
2358 comparison_to_compcode (enum tree_code code)
2360 switch (code)
2362 case LT_EXPR:
2363 return COMPCODE_LT;
2364 case EQ_EXPR:
2365 return COMPCODE_EQ;
2366 case LE_EXPR:
2367 return COMPCODE_LE;
2368 case GT_EXPR:
2369 return COMPCODE_GT;
2370 case NE_EXPR:
2371 return COMPCODE_NE;
2372 case GE_EXPR:
2373 return COMPCODE_GE;
2374 case ORDERED_EXPR:
2375 return COMPCODE_ORD;
2376 case UNORDERED_EXPR:
2377 return COMPCODE_UNORD;
2378 case UNLT_EXPR:
2379 return COMPCODE_UNLT;
2380 case UNEQ_EXPR:
2381 return COMPCODE_UNEQ;
2382 case UNLE_EXPR:
2383 return COMPCODE_UNLE;
2384 case UNGT_EXPR:
2385 return COMPCODE_UNGT;
2386 case LTGT_EXPR:
2387 return COMPCODE_LTGT;
2388 case UNGE_EXPR:
2389 return COMPCODE_UNGE;
2390 default:
2391 gcc_unreachable ();
2395 /* Convert a compcode bit-based encoding of a comparison operator back
2396 to GCC's enum tree_code representation. This function is the
2397 inverse of comparison_to_compcode. */
2399 static enum tree_code
2400 compcode_to_comparison (enum comparison_code code)
2402 switch (code)
2404 case COMPCODE_LT:
2405 return LT_EXPR;
2406 case COMPCODE_EQ:
2407 return EQ_EXPR;
2408 case COMPCODE_LE:
2409 return LE_EXPR;
2410 case COMPCODE_GT:
2411 return GT_EXPR;
2412 case COMPCODE_NE:
2413 return NE_EXPR;
2414 case COMPCODE_GE:
2415 return GE_EXPR;
2416 case COMPCODE_ORD:
2417 return ORDERED_EXPR;
2418 case COMPCODE_UNORD:
2419 return UNORDERED_EXPR;
2420 case COMPCODE_UNLT:
2421 return UNLT_EXPR;
2422 case COMPCODE_UNEQ:
2423 return UNEQ_EXPR;
2424 case COMPCODE_UNLE:
2425 return UNLE_EXPR;
2426 case COMPCODE_UNGT:
2427 return UNGT_EXPR;
2428 case COMPCODE_LTGT:
2429 return LTGT_EXPR;
2430 case COMPCODE_UNGE:
2431 return UNGE_EXPR;
2432 default:
2433 gcc_unreachable ();
2437 /* Return a tree for the comparison which is the combination of
2438 doing the AND or OR (depending on CODE) of the two operations LCODE
2439 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2440 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2441 if this makes the transformation invalid. */
2443 tree
2444 combine_comparisons (enum tree_code code, enum tree_code lcode,
2445 enum tree_code rcode, tree truth_type,
2446 tree ll_arg, tree lr_arg)
2448 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2449 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2450 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2451 enum comparison_code compcode;
2453 switch (code)
2455 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2456 compcode = lcompcode & rcompcode;
2457 break;
2459 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2460 compcode = lcompcode | rcompcode;
2461 break;
2463 default:
2464 return NULL_TREE;
2467 if (!honor_nans)
2469 /* Eliminate unordered comparisons, as well as LTGT and ORD
2470 which are not used unless the mode has NaNs. */
2471 compcode &= ~COMPCODE_UNORD;
2472 if (compcode == COMPCODE_LTGT)
2473 compcode = COMPCODE_NE;
2474 else if (compcode == COMPCODE_ORD)
2475 compcode = COMPCODE_TRUE;
2477 else if (flag_trapping_math)
2479 /* Check that the original operation and the optimized ones will trap
2480 under the same condition. */
2481 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2482 && (lcompcode != COMPCODE_EQ)
2483 && (lcompcode != COMPCODE_ORD);
2484 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2485 && (rcompcode != COMPCODE_EQ)
2486 && (rcompcode != COMPCODE_ORD);
2487 bool trap = (compcode & COMPCODE_UNORD) == 0
2488 && (compcode != COMPCODE_EQ)
2489 && (compcode != COMPCODE_ORD);
2491 /* In a short-circuited boolean expression the LHS might be
2492 such that the RHS, if evaluated, will never trap. For
2493 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2494 if neither x nor y is NaN. (This is a mixed blessing: for
2495 example, the expression above will never trap, hence
2496 optimizing it to x < y would be invalid). */
2497 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2498 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2499 rtrap = false;
2501 /* If the comparison was short-circuited, and only the RHS
2502 trapped, we may now generate a spurious trap. */
2503 if (rtrap && !ltrap
2504 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2505 return NULL_TREE;
2507 /* If we changed the conditions that cause a trap, we lose. */
2508 if ((ltrap || rtrap) != trap)
2509 return NULL_TREE;
2512 if (compcode == COMPCODE_TRUE)
2513 return constant_boolean_node (true, truth_type);
2514 else if (compcode == COMPCODE_FALSE)
2515 return constant_boolean_node (false, truth_type);
2516 else
2517 return fold_build2 (compcode_to_comparison (compcode),
2518 truth_type, ll_arg, lr_arg);
2521 /* Return nonzero if CODE is a tree code that represents a truth value. */
2523 static int
2524 truth_value_p (enum tree_code code)
2526 return (TREE_CODE_CLASS (code) == tcc_comparison
2527 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2528 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2529 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2532 /* Return nonzero if two operands (typically of the same tree node)
2533 are necessarily equal. If either argument has side-effects this
2534 function returns zero. FLAGS modifies behavior as follows:
2536 If OEP_ONLY_CONST is set, only return nonzero for constants.
2537 This function tests whether the operands are indistinguishable;
2538 it does not test whether they are equal using C's == operation.
2539 The distinction is important for IEEE floating point, because
2540 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2541 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2543 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2544 even though it may hold multiple values during a function.
2545 This is because a GCC tree node guarantees that nothing else is
2546 executed between the evaluation of its "operands" (which may often
2547 be evaluated in arbitrary order). Hence if the operands themselves
2548 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2549 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2550 unset means assuming isochronic (or instantaneous) tree equivalence.
2551 Unless comparing arbitrary expression trees, such as from different
2552 statements, this flag can usually be left unset.
2554 If OEP_PURE_SAME is set, then pure functions with identical arguments
2555 are considered the same. It is used when the caller has other ways
2556 to ensure that global memory is unchanged in between. */
2559 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2561 /* If either is ERROR_MARK, they aren't equal. */
2562 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2563 return 0;
2565 /* If both types don't have the same signedness, then we can't consider
2566 them equal. We must check this before the STRIP_NOPS calls
2567 because they may change the signedness of the arguments. */
2568 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2569 return 0;
2571 /* If both types don't have the same precision, then it is not safe
2572 to strip NOPs. */
2573 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2574 return 0;
2576 STRIP_NOPS (arg0);
2577 STRIP_NOPS (arg1);
2579 /* In case both args are comparisons but with different comparison
2580 code, try to swap the comparison operands of one arg to produce
2581 a match and compare that variant. */
2582 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2583 && COMPARISON_CLASS_P (arg0)
2584 && COMPARISON_CLASS_P (arg1))
2586 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2588 if (TREE_CODE (arg0) == swap_code)
2589 return operand_equal_p (TREE_OPERAND (arg0, 0),
2590 TREE_OPERAND (arg1, 1), flags)
2591 && operand_equal_p (TREE_OPERAND (arg0, 1),
2592 TREE_OPERAND (arg1, 0), flags);
2595 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2596 /* This is needed for conversions and for COMPONENT_REF.
2597 Might as well play it safe and always test this. */
2598 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2599 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2600 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2601 return 0;
2603 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2604 We don't care about side effects in that case because the SAVE_EXPR
2605 takes care of that for us. In all other cases, two expressions are
2606 equal if they have no side effects. If we have two identical
2607 expressions with side effects that should be treated the same due
2608 to the only side effects being identical SAVE_EXPR's, that will
2609 be detected in the recursive calls below. */
2610 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2611 && (TREE_CODE (arg0) == SAVE_EXPR
2612 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2613 return 1;
2615 /* Next handle constant cases, those for which we can return 1 even
2616 if ONLY_CONST is set. */
2617 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2618 switch (TREE_CODE (arg0))
2620 case INTEGER_CST:
2621 return tree_int_cst_equal (arg0, arg1);
2623 case REAL_CST:
2624 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2625 TREE_REAL_CST (arg1)))
2626 return 1;
2629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2631 /* If we do not distinguish between signed and unsigned zero,
2632 consider them equal. */
2633 if (real_zerop (arg0) && real_zerop (arg1))
2634 return 1;
2636 return 0;
2638 case VECTOR_CST:
2640 tree v1, v2;
2642 v1 = TREE_VECTOR_CST_ELTS (arg0);
2643 v2 = TREE_VECTOR_CST_ELTS (arg1);
2644 while (v1 && v2)
2646 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2647 flags))
2648 return 0;
2649 v1 = TREE_CHAIN (v1);
2650 v2 = TREE_CHAIN (v2);
2653 return v1 == v2;
2656 case COMPLEX_CST:
2657 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2658 flags)
2659 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2660 flags));
2662 case STRING_CST:
2663 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2664 && ! memcmp (TREE_STRING_POINTER (arg0),
2665 TREE_STRING_POINTER (arg1),
2666 TREE_STRING_LENGTH (arg0)));
2668 case ADDR_EXPR:
2669 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2671 default:
2672 break;
2675 if (flags & OEP_ONLY_CONST)
2676 return 0;
2678 /* Define macros to test an operand from arg0 and arg1 for equality and a
2679 variant that allows null and views null as being different from any
2680 non-null value. In the latter case, if either is null, the both
2681 must be; otherwise, do the normal comparison. */
2682 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2683 TREE_OPERAND (arg1, N), flags)
2685 #define OP_SAME_WITH_NULL(N) \
2686 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2687 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2689 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2691 case tcc_unary:
2692 /* Two conversions are equal only if signedness and modes match. */
2693 switch (TREE_CODE (arg0))
2695 case NOP_EXPR:
2696 case CONVERT_EXPR:
2697 case FIX_TRUNC_EXPR:
2698 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2699 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2700 return 0;
2701 break;
2702 default:
2703 break;
2706 return OP_SAME (0);
2709 case tcc_comparison:
2710 case tcc_binary:
2711 if (OP_SAME (0) && OP_SAME (1))
2712 return 1;
2714 /* For commutative ops, allow the other order. */
2715 return (commutative_tree_code (TREE_CODE (arg0))
2716 && operand_equal_p (TREE_OPERAND (arg0, 0),
2717 TREE_OPERAND (arg1, 1), flags)
2718 && operand_equal_p (TREE_OPERAND (arg0, 1),
2719 TREE_OPERAND (arg1, 0), flags));
2721 case tcc_reference:
2722 /* If either of the pointer (or reference) expressions we are
2723 dereferencing contain a side effect, these cannot be equal. */
2724 if (TREE_SIDE_EFFECTS (arg0)
2725 || TREE_SIDE_EFFECTS (arg1))
2726 return 0;
2728 switch (TREE_CODE (arg0))
2730 case INDIRECT_REF:
2731 case ALIGN_INDIRECT_REF:
2732 case MISALIGNED_INDIRECT_REF:
2733 case REALPART_EXPR:
2734 case IMAGPART_EXPR:
2735 return OP_SAME (0);
2737 case ARRAY_REF:
2738 case ARRAY_RANGE_REF:
2739 /* Operands 2 and 3 may be null. */
2740 return (OP_SAME (0)
2741 && OP_SAME (1)
2742 && OP_SAME_WITH_NULL (2)
2743 && OP_SAME_WITH_NULL (3));
2745 case COMPONENT_REF:
2746 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2747 may be NULL when we're called to compare MEM_EXPRs. */
2748 return OP_SAME_WITH_NULL (0)
2749 && OP_SAME (1)
2750 && OP_SAME_WITH_NULL (2);
2752 case BIT_FIELD_REF:
2753 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2755 default:
2756 return 0;
2759 case tcc_expression:
2760 switch (TREE_CODE (arg0))
2762 case ADDR_EXPR:
2763 case TRUTH_NOT_EXPR:
2764 return OP_SAME (0);
2766 case TRUTH_ANDIF_EXPR:
2767 case TRUTH_ORIF_EXPR:
2768 return OP_SAME (0) && OP_SAME (1);
2770 case TRUTH_AND_EXPR:
2771 case TRUTH_OR_EXPR:
2772 case TRUTH_XOR_EXPR:
2773 if (OP_SAME (0) && OP_SAME (1))
2774 return 1;
2776 /* Otherwise take into account this is a commutative operation. */
2777 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2778 TREE_OPERAND (arg1, 1), flags)
2779 && operand_equal_p (TREE_OPERAND (arg0, 1),
2780 TREE_OPERAND (arg1, 0), flags));
2782 case CALL_EXPR:
2783 /* If the CALL_EXPRs call different functions, then they
2784 clearly can not be equal. */
2785 if (!OP_SAME (0))
2786 return 0;
2789 unsigned int cef = call_expr_flags (arg0);
2790 if (flags & OEP_PURE_SAME)
2791 cef &= ECF_CONST | ECF_PURE;
2792 else
2793 cef &= ECF_CONST;
2794 if (!cef)
2795 return 0;
2798 /* Now see if all the arguments are the same. operand_equal_p
2799 does not handle TREE_LIST, so we walk the operands here
2800 feeding them to operand_equal_p. */
2801 arg0 = TREE_OPERAND (arg0, 1);
2802 arg1 = TREE_OPERAND (arg1, 1);
2803 while (arg0 && arg1)
2805 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2806 flags))
2807 return 0;
2809 arg0 = TREE_CHAIN (arg0);
2810 arg1 = TREE_CHAIN (arg1);
2813 /* If we get here and both argument lists are exhausted
2814 then the CALL_EXPRs are equal. */
2815 return ! (arg0 || arg1);
2817 default:
2818 return 0;
2821 case tcc_declaration:
2822 /* Consider __builtin_sqrt equal to sqrt. */
2823 return (TREE_CODE (arg0) == FUNCTION_DECL
2824 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2825 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2826 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2828 default:
2829 return 0;
2832 #undef OP_SAME
2833 #undef OP_SAME_WITH_NULL
2836 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2837 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2839 When in doubt, return 0. */
2841 static int
2842 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2844 int unsignedp1, unsignedpo;
2845 tree primarg0, primarg1, primother;
2846 unsigned int correct_width;
2848 if (operand_equal_p (arg0, arg1, 0))
2849 return 1;
2851 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2852 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2853 return 0;
2855 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2856 and see if the inner values are the same. This removes any
2857 signedness comparison, which doesn't matter here. */
2858 primarg0 = arg0, primarg1 = arg1;
2859 STRIP_NOPS (primarg0);
2860 STRIP_NOPS (primarg1);
2861 if (operand_equal_p (primarg0, primarg1, 0))
2862 return 1;
2864 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2865 actual comparison operand, ARG0.
2867 First throw away any conversions to wider types
2868 already present in the operands. */
2870 primarg1 = get_narrower (arg1, &unsignedp1);
2871 primother = get_narrower (other, &unsignedpo);
2873 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2874 if (unsignedp1 == unsignedpo
2875 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2876 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2878 tree type = TREE_TYPE (arg0);
2880 /* Make sure shorter operand is extended the right way
2881 to match the longer operand. */
2882 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2883 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2885 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2886 return 1;
2889 return 0;
2892 /* See if ARG is an expression that is either a comparison or is performing
2893 arithmetic on comparisons. The comparisons must only be comparing
2894 two different values, which will be stored in *CVAL1 and *CVAL2; if
2895 they are nonzero it means that some operands have already been found.
2896 No variables may be used anywhere else in the expression except in the
2897 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2898 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2900 If this is true, return 1. Otherwise, return zero. */
2902 static int
2903 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2905 enum tree_code code = TREE_CODE (arg);
2906 enum tree_code_class class = TREE_CODE_CLASS (code);
2908 /* We can handle some of the tcc_expression cases here. */
2909 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2910 class = tcc_unary;
2911 else if (class == tcc_expression
2912 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2913 || code == COMPOUND_EXPR))
2914 class = tcc_binary;
2916 else if (class == tcc_expression && code == SAVE_EXPR
2917 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2919 /* If we've already found a CVAL1 or CVAL2, this expression is
2920 two complex to handle. */
2921 if (*cval1 || *cval2)
2922 return 0;
2924 class = tcc_unary;
2925 *save_p = 1;
2928 switch (class)
2930 case tcc_unary:
2931 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2933 case tcc_binary:
2934 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2935 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2936 cval1, cval2, save_p));
2938 case tcc_constant:
2939 return 1;
2941 case tcc_expression:
2942 if (code == COND_EXPR)
2943 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2944 cval1, cval2, save_p)
2945 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2946 cval1, cval2, save_p)
2947 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2948 cval1, cval2, save_p));
2949 return 0;
2951 case tcc_comparison:
2952 /* First see if we can handle the first operand, then the second. For
2953 the second operand, we know *CVAL1 can't be zero. It must be that
2954 one side of the comparison is each of the values; test for the
2955 case where this isn't true by failing if the two operands
2956 are the same. */
2958 if (operand_equal_p (TREE_OPERAND (arg, 0),
2959 TREE_OPERAND (arg, 1), 0))
2960 return 0;
2962 if (*cval1 == 0)
2963 *cval1 = TREE_OPERAND (arg, 0);
2964 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2966 else if (*cval2 == 0)
2967 *cval2 = TREE_OPERAND (arg, 0);
2968 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2970 else
2971 return 0;
2973 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2975 else if (*cval2 == 0)
2976 *cval2 = TREE_OPERAND (arg, 1);
2977 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2979 else
2980 return 0;
2982 return 1;
2984 default:
2985 return 0;
2989 /* ARG is a tree that is known to contain just arithmetic operations and
2990 comparisons. Evaluate the operations in the tree substituting NEW0 for
2991 any occurrence of OLD0 as an operand of a comparison and likewise for
2992 NEW1 and OLD1. */
2994 static tree
2995 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2997 tree type = TREE_TYPE (arg);
2998 enum tree_code code = TREE_CODE (arg);
2999 enum tree_code_class class = TREE_CODE_CLASS (code);
3001 /* We can handle some of the tcc_expression cases here. */
3002 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3003 class = tcc_unary;
3004 else if (class == tcc_expression
3005 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3006 class = tcc_binary;
3008 switch (class)
3010 case tcc_unary:
3011 return fold_build1 (code, type,
3012 eval_subst (TREE_OPERAND (arg, 0),
3013 old0, new0, old1, new1));
3015 case tcc_binary:
3016 return fold_build2 (code, type,
3017 eval_subst (TREE_OPERAND (arg, 0),
3018 old0, new0, old1, new1),
3019 eval_subst (TREE_OPERAND (arg, 1),
3020 old0, new0, old1, new1));
3022 case tcc_expression:
3023 switch (code)
3025 case SAVE_EXPR:
3026 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3028 case COMPOUND_EXPR:
3029 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3031 case COND_EXPR:
3032 return fold_build3 (code, type,
3033 eval_subst (TREE_OPERAND (arg, 0),
3034 old0, new0, old1, new1),
3035 eval_subst (TREE_OPERAND (arg, 1),
3036 old0, new0, old1, new1),
3037 eval_subst (TREE_OPERAND (arg, 2),
3038 old0, new0, old1, new1));
3039 default:
3040 break;
3042 /* Fall through - ??? */
3044 case tcc_comparison:
3046 tree arg0 = TREE_OPERAND (arg, 0);
3047 tree arg1 = TREE_OPERAND (arg, 1);
3049 /* We need to check both for exact equality and tree equality. The
3050 former will be true if the operand has a side-effect. In that
3051 case, we know the operand occurred exactly once. */
3053 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3054 arg0 = new0;
3055 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3056 arg0 = new1;
3058 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3059 arg1 = new0;
3060 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3061 arg1 = new1;
3063 return fold_build2 (code, type, arg0, arg1);
3066 default:
3067 return arg;
3071 /* Return a tree for the case when the result of an expression is RESULT
3072 converted to TYPE and OMITTED was previously an operand of the expression
3073 but is now not needed (e.g., we folded OMITTED * 0).
3075 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3076 the conversion of RESULT to TYPE. */
3078 tree
3079 omit_one_operand (tree type, tree result, tree omitted)
3081 tree t = fold_convert (type, result);
3083 if (TREE_SIDE_EFFECTS (omitted))
3084 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3086 return non_lvalue (t);
3089 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3091 static tree
3092 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3094 tree t = fold_convert (type, result);
3096 if (TREE_SIDE_EFFECTS (omitted))
3097 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3099 return pedantic_non_lvalue (t);
3102 /* Return a tree for the case when the result of an expression is RESULT
3103 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3104 of the expression but are now not needed.
3106 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3107 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3108 evaluated before OMITTED2. Otherwise, if neither has side effects,
3109 just do the conversion of RESULT to TYPE. */
3111 tree
3112 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3114 tree t = fold_convert (type, result);
3116 if (TREE_SIDE_EFFECTS (omitted2))
3117 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3118 if (TREE_SIDE_EFFECTS (omitted1))
3119 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3121 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3125 /* Return a simplified tree node for the truth-negation of ARG. This
3126 never alters ARG itself. We assume that ARG is an operation that
3127 returns a truth value (0 or 1).
3129 FIXME: one would think we would fold the result, but it causes
3130 problems with the dominator optimizer. */
3132 tree
3133 fold_truth_not_expr (tree arg)
3135 tree type = TREE_TYPE (arg);
3136 enum tree_code code = TREE_CODE (arg);
3138 /* If this is a comparison, we can simply invert it, except for
3139 floating-point non-equality comparisons, in which case we just
3140 enclose a TRUTH_NOT_EXPR around what we have. */
3142 if (TREE_CODE_CLASS (code) == tcc_comparison)
3144 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3145 if (FLOAT_TYPE_P (op_type)
3146 && flag_trapping_math
3147 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3148 && code != NE_EXPR && code != EQ_EXPR)
3149 return NULL_TREE;
3150 else
3152 code = invert_tree_comparison (code,
3153 HONOR_NANS (TYPE_MODE (op_type)));
3154 if (code == ERROR_MARK)
3155 return NULL_TREE;
3156 else
3157 return build2 (code, type,
3158 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3162 switch (code)
3164 case INTEGER_CST:
3165 return constant_boolean_node (integer_zerop (arg), type);
3167 case TRUTH_AND_EXPR:
3168 return build2 (TRUTH_OR_EXPR, type,
3169 invert_truthvalue (TREE_OPERAND (arg, 0)),
3170 invert_truthvalue (TREE_OPERAND (arg, 1)));
3172 case TRUTH_OR_EXPR:
3173 return build2 (TRUTH_AND_EXPR, type,
3174 invert_truthvalue (TREE_OPERAND (arg, 0)),
3175 invert_truthvalue (TREE_OPERAND (arg, 1)));
3177 case TRUTH_XOR_EXPR:
3178 /* Here we can invert either operand. We invert the first operand
3179 unless the second operand is a TRUTH_NOT_EXPR in which case our
3180 result is the XOR of the first operand with the inside of the
3181 negation of the second operand. */
3183 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3184 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3185 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3186 else
3187 return build2 (TRUTH_XOR_EXPR, type,
3188 invert_truthvalue (TREE_OPERAND (arg, 0)),
3189 TREE_OPERAND (arg, 1));
3191 case TRUTH_ANDIF_EXPR:
3192 return build2 (TRUTH_ORIF_EXPR, type,
3193 invert_truthvalue (TREE_OPERAND (arg, 0)),
3194 invert_truthvalue (TREE_OPERAND (arg, 1)));
3196 case TRUTH_ORIF_EXPR:
3197 return build2 (TRUTH_ANDIF_EXPR, type,
3198 invert_truthvalue (TREE_OPERAND (arg, 0)),
3199 invert_truthvalue (TREE_OPERAND (arg, 1)));
3201 case TRUTH_NOT_EXPR:
3202 return TREE_OPERAND (arg, 0);
3204 case COND_EXPR:
3206 tree arg1 = TREE_OPERAND (arg, 1);
3207 tree arg2 = TREE_OPERAND (arg, 2);
3208 /* A COND_EXPR may have a throw as one operand, which
3209 then has void type. Just leave void operands
3210 as they are. */
3211 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3212 VOID_TYPE_P (TREE_TYPE (arg1))
3213 ? arg1 : invert_truthvalue (arg1),
3214 VOID_TYPE_P (TREE_TYPE (arg2))
3215 ? arg2 : invert_truthvalue (arg2));
3218 case COMPOUND_EXPR:
3219 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3220 invert_truthvalue (TREE_OPERAND (arg, 1)));
3222 case NON_LVALUE_EXPR:
3223 return invert_truthvalue (TREE_OPERAND (arg, 0));
3225 case NOP_EXPR:
3226 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3227 return build1 (TRUTH_NOT_EXPR, type, arg);
3229 case CONVERT_EXPR:
3230 case FLOAT_EXPR:
3231 return build1 (TREE_CODE (arg), type,
3232 invert_truthvalue (TREE_OPERAND (arg, 0)));
3234 case BIT_AND_EXPR:
3235 if (!integer_onep (TREE_OPERAND (arg, 1)))
3236 break;
3237 return build2 (EQ_EXPR, type, arg,
3238 build_int_cst (type, 0));
3240 case SAVE_EXPR:
3241 return build1 (TRUTH_NOT_EXPR, type, arg);
3243 case CLEANUP_POINT_EXPR:
3244 return build1 (CLEANUP_POINT_EXPR, type,
3245 invert_truthvalue (TREE_OPERAND (arg, 0)));
3247 default:
3248 break;
3251 return NULL_TREE;
3254 /* Return a simplified tree node for the truth-negation of ARG. This
3255 never alters ARG itself. We assume that ARG is an operation that
3256 returns a truth value (0 or 1).
3258 FIXME: one would think we would fold the result, but it causes
3259 problems with the dominator optimizer. */
3261 tree
3262 invert_truthvalue (tree arg)
3264 tree tem;
3266 if (TREE_CODE (arg) == ERROR_MARK)
3267 return arg;
3269 tem = fold_truth_not_expr (arg);
3270 if (!tem)
3271 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3273 return tem;
3276 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3277 operands are another bit-wise operation with a common input. If so,
3278 distribute the bit operations to save an operation and possibly two if
3279 constants are involved. For example, convert
3280 (A | B) & (A | C) into A | (B & C)
3281 Further simplification will occur if B and C are constants.
3283 If this optimization cannot be done, 0 will be returned. */
3285 static tree
3286 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3288 tree common;
3289 tree left, right;
3291 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3292 || TREE_CODE (arg0) == code
3293 || (TREE_CODE (arg0) != BIT_AND_EXPR
3294 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3295 return 0;
3297 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3299 common = TREE_OPERAND (arg0, 0);
3300 left = TREE_OPERAND (arg0, 1);
3301 right = TREE_OPERAND (arg1, 1);
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3305 common = TREE_OPERAND (arg0, 0);
3306 left = TREE_OPERAND (arg0, 1);
3307 right = TREE_OPERAND (arg1, 0);
3309 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3311 common = TREE_OPERAND (arg0, 1);
3312 left = TREE_OPERAND (arg0, 0);
3313 right = TREE_OPERAND (arg1, 1);
3315 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3317 common = TREE_OPERAND (arg0, 1);
3318 left = TREE_OPERAND (arg0, 0);
3319 right = TREE_OPERAND (arg1, 0);
3321 else
3322 return 0;
3324 return fold_build2 (TREE_CODE (arg0), type, common,
3325 fold_build2 (code, type, left, right));
3328 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3329 with code CODE. This optimization is unsafe. */
3330 static tree
3331 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3333 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3334 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3336 /* (A / C) +- (B / C) -> (A +- B) / C. */
3337 if (mul0 == mul1
3338 && operand_equal_p (TREE_OPERAND (arg0, 1),
3339 TREE_OPERAND (arg1, 1), 0))
3340 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3341 fold_build2 (code, type,
3342 TREE_OPERAND (arg0, 0),
3343 TREE_OPERAND (arg1, 0)),
3344 TREE_OPERAND (arg0, 1));
3346 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3347 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3348 TREE_OPERAND (arg1, 0), 0)
3349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3350 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3352 REAL_VALUE_TYPE r0, r1;
3353 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3354 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3355 if (!mul0)
3356 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3357 if (!mul1)
3358 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3359 real_arithmetic (&r0, code, &r0, &r1);
3360 return fold_build2 (MULT_EXPR, type,
3361 TREE_OPERAND (arg0, 0),
3362 build_real (type, r0));
3365 return NULL_TREE;
3368 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3369 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3371 static tree
3372 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3373 int unsignedp)
3375 tree result;
3377 if (bitpos == 0)
3379 tree size = TYPE_SIZE (TREE_TYPE (inner));
3380 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3381 || POINTER_TYPE_P (TREE_TYPE (inner)))
3382 && host_integerp (size, 0)
3383 && tree_low_cst (size, 0) == bitsize)
3384 return fold_convert (type, inner);
3387 result = build3 (BIT_FIELD_REF, type, inner,
3388 size_int (bitsize), bitsize_int (bitpos));
3390 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3392 return result;
3395 /* Optimize a bit-field compare.
3397 There are two cases: First is a compare against a constant and the
3398 second is a comparison of two items where the fields are at the same
3399 bit position relative to the start of a chunk (byte, halfword, word)
3400 large enough to contain it. In these cases we can avoid the shift
3401 implicit in bitfield extractions.
3403 For constants, we emit a compare of the shifted constant with the
3404 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3405 compared. For two fields at the same position, we do the ANDs with the
3406 similar mask and compare the result of the ANDs.
3408 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3409 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3410 are the left and right operands of the comparison, respectively.
3412 If the optimization described above can be done, we return the resulting
3413 tree. Otherwise we return zero. */
3415 static tree
3416 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3417 tree lhs, tree rhs)
3419 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3420 tree type = TREE_TYPE (lhs);
3421 tree signed_type, unsigned_type;
3422 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3423 enum machine_mode lmode, rmode, nmode;
3424 int lunsignedp, runsignedp;
3425 int lvolatilep = 0, rvolatilep = 0;
3426 tree linner, rinner = NULL_TREE;
3427 tree mask;
3428 tree offset;
3430 /* Get all the information about the extractions being done. If the bit size
3431 if the same as the size of the underlying object, we aren't doing an
3432 extraction at all and so can do nothing. We also don't want to
3433 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3434 then will no longer be able to replace it. */
3435 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3436 &lunsignedp, &lvolatilep, false);
3437 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3438 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3439 return 0;
3441 if (!const_p)
3443 /* If this is not a constant, we can only do something if bit positions,
3444 sizes, and signedness are the same. */
3445 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3446 &runsignedp, &rvolatilep, false);
3448 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3449 || lunsignedp != runsignedp || offset != 0
3450 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3451 return 0;
3454 /* See if we can find a mode to refer to this field. We should be able to,
3455 but fail if we can't. */
3456 nmode = get_best_mode (lbitsize, lbitpos,
3457 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3458 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3459 TYPE_ALIGN (TREE_TYPE (rinner))),
3460 word_mode, lvolatilep || rvolatilep);
3461 if (nmode == VOIDmode)
3462 return 0;
3464 /* Set signed and unsigned types of the precision of this mode for the
3465 shifts below. */
3466 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3467 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3469 /* Compute the bit position and size for the new reference and our offset
3470 within it. If the new reference is the same size as the original, we
3471 won't optimize anything, so return zero. */
3472 nbitsize = GET_MODE_BITSIZE (nmode);
3473 nbitpos = lbitpos & ~ (nbitsize - 1);
3474 lbitpos -= nbitpos;
3475 if (nbitsize == lbitsize)
3476 return 0;
3478 if (BYTES_BIG_ENDIAN)
3479 lbitpos = nbitsize - lbitsize - lbitpos;
3481 /* Make the mask to be used against the extracted field. */
3482 mask = build_int_cst_type (unsigned_type, -1);
3483 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3484 mask = const_binop (RSHIFT_EXPR, mask,
3485 size_int (nbitsize - lbitsize - lbitpos), 0);
3487 if (! const_p)
3488 /* If not comparing with constant, just rework the comparison
3489 and return. */
3490 return fold_build2 (code, compare_type,
3491 fold_build2 (BIT_AND_EXPR, unsigned_type,
3492 make_bit_field_ref (linner,
3493 unsigned_type,
3494 nbitsize, nbitpos,
3496 mask),
3497 fold_build2 (BIT_AND_EXPR, unsigned_type,
3498 make_bit_field_ref (rinner,
3499 unsigned_type,
3500 nbitsize, nbitpos,
3502 mask));
3504 /* Otherwise, we are handling the constant case. See if the constant is too
3505 big for the field. Warn and return a tree of for 0 (false) if so. We do
3506 this not only for its own sake, but to avoid having to test for this
3507 error case below. If we didn't, we might generate wrong code.
3509 For unsigned fields, the constant shifted right by the field length should
3510 be all zero. For signed fields, the high-order bits should agree with
3511 the sign bit. */
3513 if (lunsignedp)
3515 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3516 fold_convert (unsigned_type, rhs),
3517 size_int (lbitsize), 0)))
3519 warning (0, "comparison is always %d due to width of bit-field",
3520 code == NE_EXPR);
3521 return constant_boolean_node (code == NE_EXPR, compare_type);
3524 else
3526 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3527 size_int (lbitsize - 1), 0);
3528 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3530 warning (0, "comparison is always %d due to width of bit-field",
3531 code == NE_EXPR);
3532 return constant_boolean_node (code == NE_EXPR, compare_type);
3536 /* Single-bit compares should always be against zero. */
3537 if (lbitsize == 1 && ! integer_zerop (rhs))
3539 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3540 rhs = build_int_cst (type, 0);
3543 /* Make a new bitfield reference, shift the constant over the
3544 appropriate number of bits and mask it with the computed mask
3545 (in case this was a signed field). If we changed it, make a new one. */
3546 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3547 if (lvolatilep)
3549 TREE_SIDE_EFFECTS (lhs) = 1;
3550 TREE_THIS_VOLATILE (lhs) = 1;
3553 rhs = const_binop (BIT_AND_EXPR,
3554 const_binop (LSHIFT_EXPR,
3555 fold_convert (unsigned_type, rhs),
3556 size_int (lbitpos), 0),
3557 mask, 0);
3559 return build2 (code, compare_type,
3560 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3561 rhs);
3564 /* Subroutine for fold_truthop: decode a field reference.
3566 If EXP is a comparison reference, we return the innermost reference.
3568 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3569 set to the starting bit number.
3571 If the innermost field can be completely contained in a mode-sized
3572 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3574 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3575 otherwise it is not changed.
3577 *PUNSIGNEDP is set to the signedness of the field.
3579 *PMASK is set to the mask used. This is either contained in a
3580 BIT_AND_EXPR or derived from the width of the field.
3582 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3584 Return 0 if this is not a component reference or is one that we can't
3585 do anything with. */
3587 static tree
3588 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3589 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3590 int *punsignedp, int *pvolatilep,
3591 tree *pmask, tree *pand_mask)
3593 tree outer_type = 0;
3594 tree and_mask = 0;
3595 tree mask, inner, offset;
3596 tree unsigned_type;
3597 unsigned int precision;
3599 /* All the optimizations using this function assume integer fields.
3600 There are problems with FP fields since the type_for_size call
3601 below can fail for, e.g., XFmode. */
3602 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3603 return 0;
3605 /* We are interested in the bare arrangement of bits, so strip everything
3606 that doesn't affect the machine mode. However, record the type of the
3607 outermost expression if it may matter below. */
3608 if (TREE_CODE (exp) == NOP_EXPR
3609 || TREE_CODE (exp) == CONVERT_EXPR
3610 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3611 outer_type = TREE_TYPE (exp);
3612 STRIP_NOPS (exp);
3614 if (TREE_CODE (exp) == BIT_AND_EXPR)
3616 and_mask = TREE_OPERAND (exp, 1);
3617 exp = TREE_OPERAND (exp, 0);
3618 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3619 if (TREE_CODE (and_mask) != INTEGER_CST)
3620 return 0;
3623 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3624 punsignedp, pvolatilep, false);
3625 if ((inner == exp && and_mask == 0)
3626 || *pbitsize < 0 || offset != 0
3627 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3628 return 0;
3630 /* If the number of bits in the reference is the same as the bitsize of
3631 the outer type, then the outer type gives the signedness. Otherwise
3632 (in case of a small bitfield) the signedness is unchanged. */
3633 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3634 *punsignedp = TYPE_UNSIGNED (outer_type);
3636 /* Compute the mask to access the bitfield. */
3637 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3638 precision = TYPE_PRECISION (unsigned_type);
3640 mask = build_int_cst_type (unsigned_type, -1);
3642 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3643 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3645 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3646 if (and_mask != 0)
3647 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3648 fold_convert (unsigned_type, and_mask), mask);
3650 *pmask = mask;
3651 *pand_mask = and_mask;
3652 return inner;
3655 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3656 bit positions. */
3658 static int
3659 all_ones_mask_p (tree mask, int size)
3661 tree type = TREE_TYPE (mask);
3662 unsigned int precision = TYPE_PRECISION (type);
3663 tree tmask;
3665 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3667 return
3668 tree_int_cst_equal (mask,
3669 const_binop (RSHIFT_EXPR,
3670 const_binop (LSHIFT_EXPR, tmask,
3671 size_int (precision - size),
3673 size_int (precision - size), 0));
3676 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3677 represents the sign bit of EXP's type. If EXP represents a sign
3678 or zero extension, also test VAL against the unextended type.
3679 The return value is the (sub)expression whose sign bit is VAL,
3680 or NULL_TREE otherwise. */
3682 static tree
3683 sign_bit_p (tree exp, tree val)
3685 unsigned HOST_WIDE_INT mask_lo, lo;
3686 HOST_WIDE_INT mask_hi, hi;
3687 int width;
3688 tree t;
3690 /* Tree EXP must have an integral type. */
3691 t = TREE_TYPE (exp);
3692 if (! INTEGRAL_TYPE_P (t))
3693 return NULL_TREE;
3695 /* Tree VAL must be an integer constant. */
3696 if (TREE_CODE (val) != INTEGER_CST
3697 || TREE_OVERFLOW (val))
3698 return NULL_TREE;
3700 width = TYPE_PRECISION (t);
3701 if (width > HOST_BITS_PER_WIDE_INT)
3703 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3704 lo = 0;
3706 mask_hi = ((unsigned HOST_WIDE_INT) -1
3707 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3708 mask_lo = -1;
3710 else
3712 hi = 0;
3713 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3715 mask_hi = 0;
3716 mask_lo = ((unsigned HOST_WIDE_INT) -1
3717 >> (HOST_BITS_PER_WIDE_INT - width));
3720 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3721 treat VAL as if it were unsigned. */
3722 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3723 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3724 return exp;
3726 /* Handle extension from a narrower type. */
3727 if (TREE_CODE (exp) == NOP_EXPR
3728 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3729 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3731 return NULL_TREE;
3734 /* Subroutine for fold_truthop: determine if an operand is simple enough
3735 to be evaluated unconditionally. */
3737 static int
3738 simple_operand_p (tree exp)
3740 /* Strip any conversions that don't change the machine mode. */
3741 STRIP_NOPS (exp);
3743 return (CONSTANT_CLASS_P (exp)
3744 || TREE_CODE (exp) == SSA_NAME
3745 || (DECL_P (exp)
3746 && ! TREE_ADDRESSABLE (exp)
3747 && ! TREE_THIS_VOLATILE (exp)
3748 && ! DECL_NONLOCAL (exp)
3749 /* Don't regard global variables as simple. They may be
3750 allocated in ways unknown to the compiler (shared memory,
3751 #pragma weak, etc). */
3752 && ! TREE_PUBLIC (exp)
3753 && ! DECL_EXTERNAL (exp)
3754 /* Loading a static variable is unduly expensive, but global
3755 registers aren't expensive. */
3756 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3759 /* The following functions are subroutines to fold_range_test and allow it to
3760 try to change a logical combination of comparisons into a range test.
3762 For example, both
3763 X == 2 || X == 3 || X == 4 || X == 5
3765 X >= 2 && X <= 5
3766 are converted to
3767 (unsigned) (X - 2) <= 3
3769 We describe each set of comparisons as being either inside or outside
3770 a range, using a variable named like IN_P, and then describe the
3771 range with a lower and upper bound. If one of the bounds is omitted,
3772 it represents either the highest or lowest value of the type.
3774 In the comments below, we represent a range by two numbers in brackets
3775 preceded by a "+" to designate being inside that range, or a "-" to
3776 designate being outside that range, so the condition can be inverted by
3777 flipping the prefix. An omitted bound is represented by a "-". For
3778 example, "- [-, 10]" means being outside the range starting at the lowest
3779 possible value and ending at 10, in other words, being greater than 10.
3780 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3781 always false.
3783 We set up things so that the missing bounds are handled in a consistent
3784 manner so neither a missing bound nor "true" and "false" need to be
3785 handled using a special case. */
3787 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3788 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3789 and UPPER1_P are nonzero if the respective argument is an upper bound
3790 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3791 must be specified for a comparison. ARG1 will be converted to ARG0's
3792 type if both are specified. */
3794 static tree
3795 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3796 tree arg1, int upper1_p)
3798 tree tem;
3799 int result;
3800 int sgn0, sgn1;
3802 /* If neither arg represents infinity, do the normal operation.
3803 Else, if not a comparison, return infinity. Else handle the special
3804 comparison rules. Note that most of the cases below won't occur, but
3805 are handled for consistency. */
3807 if (arg0 != 0 && arg1 != 0)
3809 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3810 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3811 STRIP_NOPS (tem);
3812 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3815 if (TREE_CODE_CLASS (code) != tcc_comparison)
3816 return 0;
3818 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3819 for neither. In real maths, we cannot assume open ended ranges are
3820 the same. But, this is computer arithmetic, where numbers are finite.
3821 We can therefore make the transformation of any unbounded range with
3822 the value Z, Z being greater than any representable number. This permits
3823 us to treat unbounded ranges as equal. */
3824 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3825 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3826 switch (code)
3828 case EQ_EXPR:
3829 result = sgn0 == sgn1;
3830 break;
3831 case NE_EXPR:
3832 result = sgn0 != sgn1;
3833 break;
3834 case LT_EXPR:
3835 result = sgn0 < sgn1;
3836 break;
3837 case LE_EXPR:
3838 result = sgn0 <= sgn1;
3839 break;
3840 case GT_EXPR:
3841 result = sgn0 > sgn1;
3842 break;
3843 case GE_EXPR:
3844 result = sgn0 >= sgn1;
3845 break;
3846 default:
3847 gcc_unreachable ();
3850 return constant_boolean_node (result, type);
3853 /* Given EXP, a logical expression, set the range it is testing into
3854 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3855 actually being tested. *PLOW and *PHIGH will be made of the same type
3856 as the returned expression. If EXP is not a comparison, we will most
3857 likely not be returning a useful value and range. */
3859 static tree
3860 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3862 enum tree_code code;
3863 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3864 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3865 int in_p, n_in_p;
3866 tree low, high, n_low, n_high;
3868 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3869 and see if we can refine the range. Some of the cases below may not
3870 happen, but it doesn't seem worth worrying about this. We "continue"
3871 the outer loop when we've changed something; otherwise we "break"
3872 the switch, which will "break" the while. */
3874 in_p = 0;
3875 low = high = build_int_cst (TREE_TYPE (exp), 0);
3877 while (1)
3879 code = TREE_CODE (exp);
3880 exp_type = TREE_TYPE (exp);
3882 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3884 if (TREE_CODE_LENGTH (code) > 0)
3885 arg0 = TREE_OPERAND (exp, 0);
3886 if (TREE_CODE_CLASS (code) == tcc_comparison
3887 || TREE_CODE_CLASS (code) == tcc_unary
3888 || TREE_CODE_CLASS (code) == tcc_binary)
3889 arg0_type = TREE_TYPE (arg0);
3890 if (TREE_CODE_CLASS (code) == tcc_binary
3891 || TREE_CODE_CLASS (code) == tcc_comparison
3892 || (TREE_CODE_CLASS (code) == tcc_expression
3893 && TREE_CODE_LENGTH (code) > 1))
3894 arg1 = TREE_OPERAND (exp, 1);
3897 switch (code)
3899 case TRUTH_NOT_EXPR:
3900 in_p = ! in_p, exp = arg0;
3901 continue;
3903 case EQ_EXPR: case NE_EXPR:
3904 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3905 /* We can only do something if the range is testing for zero
3906 and if the second operand is an integer constant. Note that
3907 saying something is "in" the range we make is done by
3908 complementing IN_P since it will set in the initial case of
3909 being not equal to zero; "out" is leaving it alone. */
3910 if (low == 0 || high == 0
3911 || ! integer_zerop (low) || ! integer_zerop (high)
3912 || TREE_CODE (arg1) != INTEGER_CST)
3913 break;
3915 switch (code)
3917 case NE_EXPR: /* - [c, c] */
3918 low = high = arg1;
3919 break;
3920 case EQ_EXPR: /* + [c, c] */
3921 in_p = ! in_p, low = high = arg1;
3922 break;
3923 case GT_EXPR: /* - [-, c] */
3924 low = 0, high = arg1;
3925 break;
3926 case GE_EXPR: /* + [c, -] */
3927 in_p = ! in_p, low = arg1, high = 0;
3928 break;
3929 case LT_EXPR: /* - [c, -] */
3930 low = arg1, high = 0;
3931 break;
3932 case LE_EXPR: /* + [-, c] */
3933 in_p = ! in_p, low = 0, high = arg1;
3934 break;
3935 default:
3936 gcc_unreachable ();
3939 /* If this is an unsigned comparison, we also know that EXP is
3940 greater than or equal to zero. We base the range tests we make
3941 on that fact, so we record it here so we can parse existing
3942 range tests. We test arg0_type since often the return type
3943 of, e.g. EQ_EXPR, is boolean. */
3944 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3946 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3947 in_p, low, high, 1,
3948 build_int_cst (arg0_type, 0),
3949 NULL_TREE))
3950 break;
3952 in_p = n_in_p, low = n_low, high = n_high;
3954 /* If the high bound is missing, but we have a nonzero low
3955 bound, reverse the range so it goes from zero to the low bound
3956 minus 1. */
3957 if (high == 0 && low && ! integer_zerop (low))
3959 in_p = ! in_p;
3960 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3961 integer_one_node, 0);
3962 low = build_int_cst (arg0_type, 0);
3966 exp = arg0;
3967 continue;
3969 case NEGATE_EXPR:
3970 /* (-x) IN [a,b] -> x in [-b, -a] */
3971 n_low = range_binop (MINUS_EXPR, exp_type,
3972 build_int_cst (exp_type, 0),
3973 0, high, 1);
3974 n_high = range_binop (MINUS_EXPR, exp_type,
3975 build_int_cst (exp_type, 0),
3976 0, low, 0);
3977 low = n_low, high = n_high;
3978 exp = arg0;
3979 continue;
3981 case BIT_NOT_EXPR:
3982 /* ~ X -> -X - 1 */
3983 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3984 build_int_cst (exp_type, 1));
3985 continue;
3987 case PLUS_EXPR: case MINUS_EXPR:
3988 if (TREE_CODE (arg1) != INTEGER_CST)
3989 break;
3991 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3992 move a constant to the other side. */
3993 if (!TYPE_UNSIGNED (arg0_type)
3994 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3995 break;
3997 /* If EXP is signed, any overflow in the computation is undefined,
3998 so we don't worry about it so long as our computations on
3999 the bounds don't overflow. For unsigned, overflow is defined
4000 and this is exactly the right thing. */
4001 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4002 arg0_type, low, 0, arg1, 0);
4003 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4004 arg0_type, high, 1, arg1, 0);
4005 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4006 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4007 break;
4009 /* Check for an unsigned range which has wrapped around the maximum
4010 value thus making n_high < n_low, and normalize it. */
4011 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4013 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4014 integer_one_node, 0);
4015 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4016 integer_one_node, 0);
4018 /* If the range is of the form +/- [ x+1, x ], we won't
4019 be able to normalize it. But then, it represents the
4020 whole range or the empty set, so make it
4021 +/- [ -, - ]. */
4022 if (tree_int_cst_equal (n_low, low)
4023 && tree_int_cst_equal (n_high, high))
4024 low = high = 0;
4025 else
4026 in_p = ! in_p;
4028 else
4029 low = n_low, high = n_high;
4031 exp = arg0;
4032 continue;
4034 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4035 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4036 break;
4038 if (! INTEGRAL_TYPE_P (arg0_type)
4039 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4040 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4041 break;
4043 n_low = low, n_high = high;
4045 if (n_low != 0)
4046 n_low = fold_convert (arg0_type, n_low);
4048 if (n_high != 0)
4049 n_high = fold_convert (arg0_type, n_high);
4052 /* If we're converting arg0 from an unsigned type, to exp,
4053 a signed type, we will be doing the comparison as unsigned.
4054 The tests above have already verified that LOW and HIGH
4055 are both positive.
4057 So we have to ensure that we will handle large unsigned
4058 values the same way that the current signed bounds treat
4059 negative values. */
4061 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4063 tree high_positive;
4064 tree equiv_type = lang_hooks.types.type_for_mode
4065 (TYPE_MODE (arg0_type), 1);
4067 /* A range without an upper bound is, naturally, unbounded.
4068 Since convert would have cropped a very large value, use
4069 the max value for the destination type. */
4070 high_positive
4071 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4072 : TYPE_MAX_VALUE (arg0_type);
4074 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4075 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4076 fold_convert (arg0_type,
4077 high_positive),
4078 build_int_cst (arg0_type, 1));
4080 /* If the low bound is specified, "and" the range with the
4081 range for which the original unsigned value will be
4082 positive. */
4083 if (low != 0)
4085 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4086 1, n_low, n_high, 1,
4087 fold_convert (arg0_type,
4088 integer_zero_node),
4089 high_positive))
4090 break;
4092 in_p = (n_in_p == in_p);
4094 else
4096 /* Otherwise, "or" the range with the range of the input
4097 that will be interpreted as negative. */
4098 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4099 0, n_low, n_high, 1,
4100 fold_convert (arg0_type,
4101 integer_zero_node),
4102 high_positive))
4103 break;
4105 in_p = (in_p != n_in_p);
4109 exp = arg0;
4110 low = n_low, high = n_high;
4111 continue;
4113 default:
4114 break;
4117 break;
4120 /* If EXP is a constant, we can evaluate whether this is true or false. */
4121 if (TREE_CODE (exp) == INTEGER_CST)
4123 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4124 exp, 0, low, 0))
4125 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4126 exp, 1, high, 1)));
4127 low = high = 0;
4128 exp = 0;
4131 *pin_p = in_p, *plow = low, *phigh = high;
4132 return exp;
4135 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4136 type, TYPE, return an expression to test if EXP is in (or out of, depending
4137 on IN_P) the range. Return 0 if the test couldn't be created. */
4139 static tree
4140 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4142 tree etype = TREE_TYPE (exp);
4143 tree value;
4145 #ifdef HAVE_canonicalize_funcptr_for_compare
4146 /* Disable this optimization for function pointer expressions
4147 on targets that require function pointer canonicalization. */
4148 if (HAVE_canonicalize_funcptr_for_compare
4149 && TREE_CODE (etype) == POINTER_TYPE
4150 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4151 return NULL_TREE;
4152 #endif
4154 if (! in_p)
4156 value = build_range_check (type, exp, 1, low, high);
4157 if (value != 0)
4158 return invert_truthvalue (value);
4160 return 0;
4163 if (low == 0 && high == 0)
4164 return build_int_cst (type, 1);
4166 if (low == 0)
4167 return fold_build2 (LE_EXPR, type, exp,
4168 fold_convert (etype, high));
4170 if (high == 0)
4171 return fold_build2 (GE_EXPR, type, exp,
4172 fold_convert (etype, low));
4174 if (operand_equal_p (low, high, 0))
4175 return fold_build2 (EQ_EXPR, type, exp,
4176 fold_convert (etype, low));
4178 if (integer_zerop (low))
4180 if (! TYPE_UNSIGNED (etype))
4182 etype = lang_hooks.types.unsigned_type (etype);
4183 high = fold_convert (etype, high);
4184 exp = fold_convert (etype, exp);
4186 return build_range_check (type, exp, 1, 0, high);
4189 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4190 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4192 unsigned HOST_WIDE_INT lo;
4193 HOST_WIDE_INT hi;
4194 int prec;
4196 prec = TYPE_PRECISION (etype);
4197 if (prec <= HOST_BITS_PER_WIDE_INT)
4199 hi = 0;
4200 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4202 else
4204 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4205 lo = (unsigned HOST_WIDE_INT) -1;
4208 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4210 if (TYPE_UNSIGNED (etype))
4212 etype = lang_hooks.types.signed_type (etype);
4213 exp = fold_convert (etype, exp);
4215 return fold_build2 (GT_EXPR, type, exp,
4216 build_int_cst (etype, 0));
4220 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4221 This requires wrap-around arithmetics for the type of the expression. */
4222 switch (TREE_CODE (etype))
4224 case INTEGER_TYPE:
4225 /* There is no requirement that LOW be within the range of ETYPE
4226 if the latter is a subtype. It must, however, be within the base
4227 type of ETYPE. So be sure we do the subtraction in that type. */
4228 if (TREE_TYPE (etype))
4229 etype = TREE_TYPE (etype);
4230 break;
4232 case ENUMERAL_TYPE:
4233 case BOOLEAN_TYPE:
4234 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4235 TYPE_UNSIGNED (etype));
4236 break;
4238 default:
4239 break;
4242 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4243 if (TREE_CODE (etype) == INTEGER_TYPE
4244 && !TYPE_OVERFLOW_WRAPS (etype))
4246 tree utype, minv, maxv;
4248 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4249 for the type in question, as we rely on this here. */
4250 utype = lang_hooks.types.unsigned_type (etype);
4251 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4252 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4253 integer_one_node, 1);
4254 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4256 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4257 minv, 1, maxv, 1)))
4258 etype = utype;
4259 else
4260 return 0;
4263 high = fold_convert (etype, high);
4264 low = fold_convert (etype, low);
4265 exp = fold_convert (etype, exp);
4267 value = const_binop (MINUS_EXPR, high, low, 0);
4269 if (value != 0 && !TREE_OVERFLOW (value))
4270 return build_range_check (type,
4271 fold_build2 (MINUS_EXPR, etype, exp, low),
4272 1, build_int_cst (etype, 0), value);
4274 return 0;
4277 /* Return the predecessor of VAL in its type, handling the infinite case. */
4279 static tree
4280 range_predecessor (tree val)
4282 tree type = TREE_TYPE (val);
4284 if (INTEGRAL_TYPE_P (type)
4285 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4286 return 0;
4287 else
4288 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4291 /* Return the successor of VAL in its type, handling the infinite case. */
4293 static tree
4294 range_successor (tree val)
4296 tree type = TREE_TYPE (val);
4298 if (INTEGRAL_TYPE_P (type)
4299 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4300 return 0;
4301 else
4302 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4305 /* Given two ranges, see if we can merge them into one. Return 1 if we
4306 can, 0 if we can't. Set the output range into the specified parameters. */
4308 static int
4309 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4310 tree high0, int in1_p, tree low1, tree high1)
4312 int no_overlap;
4313 int subset;
4314 int temp;
4315 tree tem;
4316 int in_p;
4317 tree low, high;
4318 int lowequal = ((low0 == 0 && low1 == 0)
4319 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4320 low0, 0, low1, 0)));
4321 int highequal = ((high0 == 0 && high1 == 0)
4322 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4323 high0, 1, high1, 1)));
4325 /* Make range 0 be the range that starts first, or ends last if they
4326 start at the same value. Swap them if it isn't. */
4327 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4328 low0, 0, low1, 0))
4329 || (lowequal
4330 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4331 high1, 1, high0, 1))))
4333 temp = in0_p, in0_p = in1_p, in1_p = temp;
4334 tem = low0, low0 = low1, low1 = tem;
4335 tem = high0, high0 = high1, high1 = tem;
4338 /* Now flag two cases, whether the ranges are disjoint or whether the
4339 second range is totally subsumed in the first. Note that the tests
4340 below are simplified by the ones above. */
4341 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4342 high0, 1, low1, 0));
4343 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4344 high1, 1, high0, 1));
4346 /* We now have four cases, depending on whether we are including or
4347 excluding the two ranges. */
4348 if (in0_p && in1_p)
4350 /* If they don't overlap, the result is false. If the second range
4351 is a subset it is the result. Otherwise, the range is from the start
4352 of the second to the end of the first. */
4353 if (no_overlap)
4354 in_p = 0, low = high = 0;
4355 else if (subset)
4356 in_p = 1, low = low1, high = high1;
4357 else
4358 in_p = 1, low = low1, high = high0;
4361 else if (in0_p && ! in1_p)
4363 /* If they don't overlap, the result is the first range. If they are
4364 equal, the result is false. If the second range is a subset of the
4365 first, and the ranges begin at the same place, we go from just after
4366 the end of the second range to the end of the first. If the second
4367 range is not a subset of the first, or if it is a subset and both
4368 ranges end at the same place, the range starts at the start of the
4369 first range and ends just before the second range.
4370 Otherwise, we can't describe this as a single range. */
4371 if (no_overlap)
4372 in_p = 1, low = low0, high = high0;
4373 else if (lowequal && highequal)
4374 in_p = 0, low = high = 0;
4375 else if (subset && lowequal)
4377 low = range_successor (high1);
4378 high = high0;
4379 in_p = (low != 0);
4381 else if (! subset || highequal)
4383 low = low0;
4384 high = range_predecessor (low1);
4385 in_p = (high != 0);
4387 else
4388 return 0;
4391 else if (! in0_p && in1_p)
4393 /* If they don't overlap, the result is the second range. If the second
4394 is a subset of the first, the result is false. Otherwise,
4395 the range starts just after the first range and ends at the
4396 end of the second. */
4397 if (no_overlap)
4398 in_p = 1, low = low1, high = high1;
4399 else if (subset || highequal)
4400 in_p = 0, low = high = 0;
4401 else
4403 low = range_successor (high0);
4404 high = high1;
4405 in_p = (low != 0);
4409 else
4411 /* The case where we are excluding both ranges. Here the complex case
4412 is if they don't overlap. In that case, the only time we have a
4413 range is if they are adjacent. If the second is a subset of the
4414 first, the result is the first. Otherwise, the range to exclude
4415 starts at the beginning of the first range and ends at the end of the
4416 second. */
4417 if (no_overlap)
4419 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4420 range_successor (high0),
4421 1, low1, 0)))
4422 in_p = 0, low = low0, high = high1;
4423 else
4425 /* Canonicalize - [min, x] into - [-, x]. */
4426 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4427 switch (TREE_CODE (TREE_TYPE (low0)))
4429 case ENUMERAL_TYPE:
4430 if (TYPE_PRECISION (TREE_TYPE (low0))
4431 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4432 break;
4433 /* FALLTHROUGH */
4434 case INTEGER_TYPE:
4435 if (tree_int_cst_equal (low0,
4436 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4437 low0 = 0;
4438 break;
4439 case POINTER_TYPE:
4440 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4441 && integer_zerop (low0))
4442 low0 = 0;
4443 break;
4444 default:
4445 break;
4448 /* Canonicalize - [x, max] into - [x, -]. */
4449 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4450 switch (TREE_CODE (TREE_TYPE (high1)))
4452 case ENUMERAL_TYPE:
4453 if (TYPE_PRECISION (TREE_TYPE (high1))
4454 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4455 break;
4456 /* FALLTHROUGH */
4457 case INTEGER_TYPE:
4458 if (tree_int_cst_equal (high1,
4459 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4460 high1 = 0;
4461 break;
4462 case POINTER_TYPE:
4463 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4464 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4465 high1, 1,
4466 integer_one_node, 1)))
4467 high1 = 0;
4468 break;
4469 default:
4470 break;
4473 /* The ranges might be also adjacent between the maximum and
4474 minimum values of the given type. For
4475 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4476 return + [x + 1, y - 1]. */
4477 if (low0 == 0 && high1 == 0)
4479 low = range_successor (high0);
4480 high = range_predecessor (low1);
4481 if (low == 0 || high == 0)
4482 return 0;
4484 in_p = 1;
4486 else
4487 return 0;
4490 else if (subset)
4491 in_p = 0, low = low0, high = high0;
4492 else
4493 in_p = 0, low = low0, high = high1;
4496 *pin_p = in_p, *plow = low, *phigh = high;
4497 return 1;
4501 /* Subroutine of fold, looking inside expressions of the form
4502 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4503 of the COND_EXPR. This function is being used also to optimize
4504 A op B ? C : A, by reversing the comparison first.
4506 Return a folded expression whose code is not a COND_EXPR
4507 anymore, or NULL_TREE if no folding opportunity is found. */
4509 static tree
4510 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4512 enum tree_code comp_code = TREE_CODE (arg0);
4513 tree arg00 = TREE_OPERAND (arg0, 0);
4514 tree arg01 = TREE_OPERAND (arg0, 1);
4515 tree arg1_type = TREE_TYPE (arg1);
4516 tree tem;
4518 STRIP_NOPS (arg1);
4519 STRIP_NOPS (arg2);
4521 /* If we have A op 0 ? A : -A, consider applying the following
4522 transformations:
4524 A == 0? A : -A same as -A
4525 A != 0? A : -A same as A
4526 A >= 0? A : -A same as abs (A)
4527 A > 0? A : -A same as abs (A)
4528 A <= 0? A : -A same as -abs (A)
4529 A < 0? A : -A same as -abs (A)
4531 None of these transformations work for modes with signed
4532 zeros. If A is +/-0, the first two transformations will
4533 change the sign of the result (from +0 to -0, or vice
4534 versa). The last four will fix the sign of the result,
4535 even though the original expressions could be positive or
4536 negative, depending on the sign of A.
4538 Note that all these transformations are correct if A is
4539 NaN, since the two alternatives (A and -A) are also NaNs. */
4540 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4541 ? real_zerop (arg01)
4542 : integer_zerop (arg01))
4543 && ((TREE_CODE (arg2) == NEGATE_EXPR
4544 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4545 /* In the case that A is of the form X-Y, '-A' (arg2) may
4546 have already been folded to Y-X, check for that. */
4547 || (TREE_CODE (arg1) == MINUS_EXPR
4548 && TREE_CODE (arg2) == MINUS_EXPR
4549 && operand_equal_p (TREE_OPERAND (arg1, 0),
4550 TREE_OPERAND (arg2, 1), 0)
4551 && operand_equal_p (TREE_OPERAND (arg1, 1),
4552 TREE_OPERAND (arg2, 0), 0))))
4553 switch (comp_code)
4555 case EQ_EXPR:
4556 case UNEQ_EXPR:
4557 tem = fold_convert (arg1_type, arg1);
4558 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4559 case NE_EXPR:
4560 case LTGT_EXPR:
4561 return pedantic_non_lvalue (fold_convert (type, arg1));
4562 case UNGE_EXPR:
4563 case UNGT_EXPR:
4564 if (flag_trapping_math)
4565 break;
4566 /* Fall through. */
4567 case GE_EXPR:
4568 case GT_EXPR:
4569 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4570 arg1 = fold_convert (lang_hooks.types.signed_type
4571 (TREE_TYPE (arg1)), arg1);
4572 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4573 return pedantic_non_lvalue (fold_convert (type, tem));
4574 case UNLE_EXPR:
4575 case UNLT_EXPR:
4576 if (flag_trapping_math)
4577 break;
4578 case LE_EXPR:
4579 case LT_EXPR:
4580 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4581 arg1 = fold_convert (lang_hooks.types.signed_type
4582 (TREE_TYPE (arg1)), arg1);
4583 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4584 return negate_expr (fold_convert (type, tem));
4585 default:
4586 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4587 break;
4590 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4591 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4592 both transformations are correct when A is NaN: A != 0
4593 is then true, and A == 0 is false. */
4595 if (integer_zerop (arg01) && integer_zerop (arg2))
4597 if (comp_code == NE_EXPR)
4598 return pedantic_non_lvalue (fold_convert (type, arg1));
4599 else if (comp_code == EQ_EXPR)
4600 return build_int_cst (type, 0);
4603 /* Try some transformations of A op B ? A : B.
4605 A == B? A : B same as B
4606 A != B? A : B same as A
4607 A >= B? A : B same as max (A, B)
4608 A > B? A : B same as max (B, A)
4609 A <= B? A : B same as min (A, B)
4610 A < B? A : B same as min (B, A)
4612 As above, these transformations don't work in the presence
4613 of signed zeros. For example, if A and B are zeros of
4614 opposite sign, the first two transformations will change
4615 the sign of the result. In the last four, the original
4616 expressions give different results for (A=+0, B=-0) and
4617 (A=-0, B=+0), but the transformed expressions do not.
4619 The first two transformations are correct if either A or B
4620 is a NaN. In the first transformation, the condition will
4621 be false, and B will indeed be chosen. In the case of the
4622 second transformation, the condition A != B will be true,
4623 and A will be chosen.
4625 The conversions to max() and min() are not correct if B is
4626 a number and A is not. The conditions in the original
4627 expressions will be false, so all four give B. The min()
4628 and max() versions would give a NaN instead. */
4629 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4630 /* Avoid these transformations if the COND_EXPR may be used
4631 as an lvalue in the C++ front-end. PR c++/19199. */
4632 && (in_gimple_form
4633 || (strcmp (lang_hooks.name, "GNU C++") != 0
4634 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4635 || ! maybe_lvalue_p (arg1)
4636 || ! maybe_lvalue_p (arg2)))
4638 tree comp_op0 = arg00;
4639 tree comp_op1 = arg01;
4640 tree comp_type = TREE_TYPE (comp_op0);
4642 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4643 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4645 comp_type = type;
4646 comp_op0 = arg1;
4647 comp_op1 = arg2;
4650 switch (comp_code)
4652 case EQ_EXPR:
4653 return pedantic_non_lvalue (fold_convert (type, arg2));
4654 case NE_EXPR:
4655 return pedantic_non_lvalue (fold_convert (type, arg1));
4656 case LE_EXPR:
4657 case LT_EXPR:
4658 case UNLE_EXPR:
4659 case UNLT_EXPR:
4660 /* In C++ a ?: expression can be an lvalue, so put the
4661 operand which will be used if they are equal first
4662 so that we can convert this back to the
4663 corresponding COND_EXPR. */
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4666 comp_op0 = fold_convert (comp_type, comp_op0);
4667 comp_op1 = fold_convert (comp_type, comp_op1);
4668 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4669 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4670 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4671 return pedantic_non_lvalue (fold_convert (type, tem));
4673 break;
4674 case GE_EXPR:
4675 case GT_EXPR:
4676 case UNGE_EXPR:
4677 case UNGT_EXPR:
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 comp_op0 = fold_convert (comp_type, comp_op0);
4681 comp_op1 = fold_convert (comp_type, comp_op1);
4682 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4683 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4685 return pedantic_non_lvalue (fold_convert (type, tem));
4687 break;
4688 case UNEQ_EXPR:
4689 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4690 return pedantic_non_lvalue (fold_convert (type, arg2));
4691 break;
4692 case LTGT_EXPR:
4693 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4694 return pedantic_non_lvalue (fold_convert (type, arg1));
4695 break;
4696 default:
4697 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4698 break;
4702 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4703 we might still be able to simplify this. For example,
4704 if C1 is one less or one more than C2, this might have started
4705 out as a MIN or MAX and been transformed by this function.
4706 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4708 if (INTEGRAL_TYPE_P (type)
4709 && TREE_CODE (arg01) == INTEGER_CST
4710 && TREE_CODE (arg2) == INTEGER_CST)
4711 switch (comp_code)
4713 case EQ_EXPR:
4714 /* We can replace A with C1 in this case. */
4715 arg1 = fold_convert (type, arg01);
4716 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4718 case LT_EXPR:
4719 /* If C1 is C2 + 1, this is min(A, C2). */
4720 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4721 OEP_ONLY_CONST)
4722 && operand_equal_p (arg01,
4723 const_binop (PLUS_EXPR, arg2,
4724 build_int_cst (type, 1), 0),
4725 OEP_ONLY_CONST))
4726 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4727 type, arg1, arg2));
4728 break;
4730 case LE_EXPR:
4731 /* If C1 is C2 - 1, this is min(A, C2). */
4732 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4733 OEP_ONLY_CONST)
4734 && operand_equal_p (arg01,
4735 const_binop (MINUS_EXPR, arg2,
4736 build_int_cst (type, 1), 0),
4737 OEP_ONLY_CONST))
4738 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4739 type, arg1, arg2));
4740 break;
4742 case GT_EXPR:
4743 /* If C1 is C2 - 1, this is max(A, C2). */
4744 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4745 OEP_ONLY_CONST)
4746 && operand_equal_p (arg01,
4747 const_binop (MINUS_EXPR, arg2,
4748 build_int_cst (type, 1), 0),
4749 OEP_ONLY_CONST))
4750 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4751 type, arg1, arg2));
4752 break;
4754 case GE_EXPR:
4755 /* If C1 is C2 + 1, this is max(A, C2). */
4756 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4757 OEP_ONLY_CONST)
4758 && operand_equal_p (arg01,
4759 const_binop (PLUS_EXPR, arg2,
4760 build_int_cst (type, 1), 0),
4761 OEP_ONLY_CONST))
4762 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4763 type, arg1, arg2));
4764 break;
4765 case NE_EXPR:
4766 break;
4767 default:
4768 gcc_unreachable ();
4771 return NULL_TREE;
4776 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4777 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4778 #endif
4780 /* EXP is some logical combination of boolean tests. See if we can
4781 merge it into some range test. Return the new tree if so. */
4783 static tree
4784 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4786 int or_op = (code == TRUTH_ORIF_EXPR
4787 || code == TRUTH_OR_EXPR);
4788 int in0_p, in1_p, in_p;
4789 tree low0, low1, low, high0, high1, high;
4790 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4791 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4792 tree tem;
4794 /* If this is an OR operation, invert both sides; we will invert
4795 again at the end. */
4796 if (or_op)
4797 in0_p = ! in0_p, in1_p = ! in1_p;
4799 /* If both expressions are the same, if we can merge the ranges, and we
4800 can build the range test, return it or it inverted. If one of the
4801 ranges is always true or always false, consider it to be the same
4802 expression as the other. */
4803 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4804 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4805 in1_p, low1, high1)
4806 && 0 != (tem = (build_range_check (type,
4807 lhs != 0 ? lhs
4808 : rhs != 0 ? rhs : integer_zero_node,
4809 in_p, low, high))))
4810 return or_op ? invert_truthvalue (tem) : tem;
4812 /* On machines where the branch cost is expensive, if this is a
4813 short-circuited branch and the underlying object on both sides
4814 is the same, make a non-short-circuit operation. */
4815 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4816 && lhs != 0 && rhs != 0
4817 && (code == TRUTH_ANDIF_EXPR
4818 || code == TRUTH_ORIF_EXPR)
4819 && operand_equal_p (lhs, rhs, 0))
4821 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4822 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4823 which cases we can't do this. */
4824 if (simple_operand_p (lhs))
4825 return build2 (code == TRUTH_ANDIF_EXPR
4826 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4827 type, op0, op1);
4829 else if (lang_hooks.decls.global_bindings_p () == 0
4830 && ! CONTAINS_PLACEHOLDER_P (lhs))
4832 tree common = save_expr (lhs);
4834 if (0 != (lhs = build_range_check (type, common,
4835 or_op ? ! in0_p : in0_p,
4836 low0, high0))
4837 && (0 != (rhs = build_range_check (type, common,
4838 or_op ? ! in1_p : in1_p,
4839 low1, high1))))
4840 return build2 (code == TRUTH_ANDIF_EXPR
4841 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4842 type, lhs, rhs);
4846 return 0;
4849 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4850 bit value. Arrange things so the extra bits will be set to zero if and
4851 only if C is signed-extended to its full width. If MASK is nonzero,
4852 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4854 static tree
4855 unextend (tree c, int p, int unsignedp, tree mask)
4857 tree type = TREE_TYPE (c);
4858 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4859 tree temp;
4861 if (p == modesize || unsignedp)
4862 return c;
4864 /* We work by getting just the sign bit into the low-order bit, then
4865 into the high-order bit, then sign-extend. We then XOR that value
4866 with C. */
4867 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4868 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4870 /* We must use a signed type in order to get an arithmetic right shift.
4871 However, we must also avoid introducing accidental overflows, so that
4872 a subsequent call to integer_zerop will work. Hence we must
4873 do the type conversion here. At this point, the constant is either
4874 zero or one, and the conversion to a signed type can never overflow.
4875 We could get an overflow if this conversion is done anywhere else. */
4876 if (TYPE_UNSIGNED (type))
4877 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4879 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4880 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4881 if (mask != 0)
4882 temp = const_binop (BIT_AND_EXPR, temp,
4883 fold_convert (TREE_TYPE (c), mask), 0);
4884 /* If necessary, convert the type back to match the type of C. */
4885 if (TYPE_UNSIGNED (type))
4886 temp = fold_convert (type, temp);
4888 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4891 /* Find ways of folding logical expressions of LHS and RHS:
4892 Try to merge two comparisons to the same innermost item.
4893 Look for range tests like "ch >= '0' && ch <= '9'".
4894 Look for combinations of simple terms on machines with expensive branches
4895 and evaluate the RHS unconditionally.
4897 For example, if we have p->a == 2 && p->b == 4 and we can make an
4898 object large enough to span both A and B, we can do this with a comparison
4899 against the object ANDed with the a mask.
4901 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4902 operations to do this with one comparison.
4904 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4905 function and the one above.
4907 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4908 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4910 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4911 two operands.
4913 We return the simplified tree or 0 if no optimization is possible. */
4915 static tree
4916 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4918 /* If this is the "or" of two comparisons, we can do something if
4919 the comparisons are NE_EXPR. If this is the "and", we can do something
4920 if the comparisons are EQ_EXPR. I.e.,
4921 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4923 WANTED_CODE is this operation code. For single bit fields, we can
4924 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4925 comparison for one-bit fields. */
4927 enum tree_code wanted_code;
4928 enum tree_code lcode, rcode;
4929 tree ll_arg, lr_arg, rl_arg, rr_arg;
4930 tree ll_inner, lr_inner, rl_inner, rr_inner;
4931 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4932 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4933 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4934 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4935 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4936 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4937 enum machine_mode lnmode, rnmode;
4938 tree ll_mask, lr_mask, rl_mask, rr_mask;
4939 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4940 tree l_const, r_const;
4941 tree lntype, rntype, result;
4942 int first_bit, end_bit;
4943 int volatilep;
4944 tree orig_lhs = lhs, orig_rhs = rhs;
4945 enum tree_code orig_code = code;
4947 /* Start by getting the comparison codes. Fail if anything is volatile.
4948 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4949 it were surrounded with a NE_EXPR. */
4951 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4952 return 0;
4954 lcode = TREE_CODE (lhs);
4955 rcode = TREE_CODE (rhs);
4957 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4959 lhs = build2 (NE_EXPR, truth_type, lhs,
4960 build_int_cst (TREE_TYPE (lhs), 0));
4961 lcode = NE_EXPR;
4964 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4966 rhs = build2 (NE_EXPR, truth_type, rhs,
4967 build_int_cst (TREE_TYPE (rhs), 0));
4968 rcode = NE_EXPR;
4971 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4972 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4973 return 0;
4975 ll_arg = TREE_OPERAND (lhs, 0);
4976 lr_arg = TREE_OPERAND (lhs, 1);
4977 rl_arg = TREE_OPERAND (rhs, 0);
4978 rr_arg = TREE_OPERAND (rhs, 1);
4980 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4981 if (simple_operand_p (ll_arg)
4982 && simple_operand_p (lr_arg))
4984 tree result;
4985 if (operand_equal_p (ll_arg, rl_arg, 0)
4986 && operand_equal_p (lr_arg, rr_arg, 0))
4988 result = combine_comparisons (code, lcode, rcode,
4989 truth_type, ll_arg, lr_arg);
4990 if (result)
4991 return result;
4993 else if (operand_equal_p (ll_arg, rr_arg, 0)
4994 && operand_equal_p (lr_arg, rl_arg, 0))
4996 result = combine_comparisons (code, lcode,
4997 swap_tree_comparison (rcode),
4998 truth_type, ll_arg, lr_arg);
4999 if (result)
5000 return result;
5004 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5005 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5007 /* If the RHS can be evaluated unconditionally and its operands are
5008 simple, it wins to evaluate the RHS unconditionally on machines
5009 with expensive branches. In this case, this isn't a comparison
5010 that can be merged. Avoid doing this if the RHS is a floating-point
5011 comparison since those can trap. */
5013 if (BRANCH_COST >= 2
5014 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5015 && simple_operand_p (rl_arg)
5016 && simple_operand_p (rr_arg))
5018 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5019 if (code == TRUTH_OR_EXPR
5020 && lcode == NE_EXPR && integer_zerop (lr_arg)
5021 && rcode == NE_EXPR && integer_zerop (rr_arg)
5022 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5023 return build2 (NE_EXPR, truth_type,
5024 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5025 ll_arg, rl_arg),
5026 build_int_cst (TREE_TYPE (ll_arg), 0));
5028 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5029 if (code == TRUTH_AND_EXPR
5030 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5031 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5032 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5033 return build2 (EQ_EXPR, truth_type,
5034 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5035 ll_arg, rl_arg),
5036 build_int_cst (TREE_TYPE (ll_arg), 0));
5038 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5040 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5041 return build2 (code, truth_type, lhs, rhs);
5042 return NULL_TREE;
5046 /* See if the comparisons can be merged. Then get all the parameters for
5047 each side. */
5049 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5050 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5051 return 0;
5053 volatilep = 0;
5054 ll_inner = decode_field_reference (ll_arg,
5055 &ll_bitsize, &ll_bitpos, &ll_mode,
5056 &ll_unsignedp, &volatilep, &ll_mask,
5057 &ll_and_mask);
5058 lr_inner = decode_field_reference (lr_arg,
5059 &lr_bitsize, &lr_bitpos, &lr_mode,
5060 &lr_unsignedp, &volatilep, &lr_mask,
5061 &lr_and_mask);
5062 rl_inner = decode_field_reference (rl_arg,
5063 &rl_bitsize, &rl_bitpos, &rl_mode,
5064 &rl_unsignedp, &volatilep, &rl_mask,
5065 &rl_and_mask);
5066 rr_inner = decode_field_reference (rr_arg,
5067 &rr_bitsize, &rr_bitpos, &rr_mode,
5068 &rr_unsignedp, &volatilep, &rr_mask,
5069 &rr_and_mask);
5071 /* It must be true that the inner operation on the lhs of each
5072 comparison must be the same if we are to be able to do anything.
5073 Then see if we have constants. If not, the same must be true for
5074 the rhs's. */
5075 if (volatilep || ll_inner == 0 || rl_inner == 0
5076 || ! operand_equal_p (ll_inner, rl_inner, 0))
5077 return 0;
5079 if (TREE_CODE (lr_arg) == INTEGER_CST
5080 && TREE_CODE (rr_arg) == INTEGER_CST)
5081 l_const = lr_arg, r_const = rr_arg;
5082 else if (lr_inner == 0 || rr_inner == 0
5083 || ! operand_equal_p (lr_inner, rr_inner, 0))
5084 return 0;
5085 else
5086 l_const = r_const = 0;
5088 /* If either comparison code is not correct for our logical operation,
5089 fail. However, we can convert a one-bit comparison against zero into
5090 the opposite comparison against that bit being set in the field. */
5092 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5093 if (lcode != wanted_code)
5095 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5097 /* Make the left operand unsigned, since we are only interested
5098 in the value of one bit. Otherwise we are doing the wrong
5099 thing below. */
5100 ll_unsignedp = 1;
5101 l_const = ll_mask;
5103 else
5104 return 0;
5107 /* This is analogous to the code for l_const above. */
5108 if (rcode != wanted_code)
5110 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5112 rl_unsignedp = 1;
5113 r_const = rl_mask;
5115 else
5116 return 0;
5119 /* See if we can find a mode that contains both fields being compared on
5120 the left. If we can't, fail. Otherwise, update all constants and masks
5121 to be relative to a field of that size. */
5122 first_bit = MIN (ll_bitpos, rl_bitpos);
5123 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5124 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5125 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5126 volatilep);
5127 if (lnmode == VOIDmode)
5128 return 0;
5130 lnbitsize = GET_MODE_BITSIZE (lnmode);
5131 lnbitpos = first_bit & ~ (lnbitsize - 1);
5132 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5133 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5135 if (BYTES_BIG_ENDIAN)
5137 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5138 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5141 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5142 size_int (xll_bitpos), 0);
5143 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5144 size_int (xrl_bitpos), 0);
5146 if (l_const)
5148 l_const = fold_convert (lntype, l_const);
5149 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5150 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5151 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5152 fold_build1 (BIT_NOT_EXPR,
5153 lntype, ll_mask),
5154 0)))
5156 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5158 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5161 if (r_const)
5163 r_const = fold_convert (lntype, r_const);
5164 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5165 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5166 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5167 fold_build1 (BIT_NOT_EXPR,
5168 lntype, rl_mask),
5169 0)))
5171 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5173 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5177 /* If the right sides are not constant, do the same for it. Also,
5178 disallow this optimization if a size or signedness mismatch occurs
5179 between the left and right sides. */
5180 if (l_const == 0)
5182 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5183 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5184 /* Make sure the two fields on the right
5185 correspond to the left without being swapped. */
5186 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5187 return 0;
5189 first_bit = MIN (lr_bitpos, rr_bitpos);
5190 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5191 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5192 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5193 volatilep);
5194 if (rnmode == VOIDmode)
5195 return 0;
5197 rnbitsize = GET_MODE_BITSIZE (rnmode);
5198 rnbitpos = first_bit & ~ (rnbitsize - 1);
5199 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5200 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5202 if (BYTES_BIG_ENDIAN)
5204 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5205 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5208 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5209 size_int (xlr_bitpos), 0);
5210 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5211 size_int (xrr_bitpos), 0);
5213 /* Make a mask that corresponds to both fields being compared.
5214 Do this for both items being compared. If the operands are the
5215 same size and the bits being compared are in the same position
5216 then we can do this by masking both and comparing the masked
5217 results. */
5218 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5219 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5220 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5222 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5223 ll_unsignedp || rl_unsignedp);
5224 if (! all_ones_mask_p (ll_mask, lnbitsize))
5225 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5227 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5228 lr_unsignedp || rr_unsignedp);
5229 if (! all_ones_mask_p (lr_mask, rnbitsize))
5230 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5232 return build2 (wanted_code, truth_type, lhs, rhs);
5235 /* There is still another way we can do something: If both pairs of
5236 fields being compared are adjacent, we may be able to make a wider
5237 field containing them both.
5239 Note that we still must mask the lhs/rhs expressions. Furthermore,
5240 the mask must be shifted to account for the shift done by
5241 make_bit_field_ref. */
5242 if ((ll_bitsize + ll_bitpos == rl_bitpos
5243 && lr_bitsize + lr_bitpos == rr_bitpos)
5244 || (ll_bitpos == rl_bitpos + rl_bitsize
5245 && lr_bitpos == rr_bitpos + rr_bitsize))
5247 tree type;
5249 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5250 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5251 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5252 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5254 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5255 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5256 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5257 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5259 /* Convert to the smaller type before masking out unwanted bits. */
5260 type = lntype;
5261 if (lntype != rntype)
5263 if (lnbitsize > rnbitsize)
5265 lhs = fold_convert (rntype, lhs);
5266 ll_mask = fold_convert (rntype, ll_mask);
5267 type = rntype;
5269 else if (lnbitsize < rnbitsize)
5271 rhs = fold_convert (lntype, rhs);
5272 lr_mask = fold_convert (lntype, lr_mask);
5273 type = lntype;
5277 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5278 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5280 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5281 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5283 return build2 (wanted_code, truth_type, lhs, rhs);
5286 return 0;
5289 /* Handle the case of comparisons with constants. If there is something in
5290 common between the masks, those bits of the constants must be the same.
5291 If not, the condition is always false. Test for this to avoid generating
5292 incorrect code below. */
5293 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5294 if (! integer_zerop (result)
5295 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5296 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5298 if (wanted_code == NE_EXPR)
5300 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5301 return constant_boolean_node (true, truth_type);
5303 else
5305 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5306 return constant_boolean_node (false, truth_type);
5310 /* Construct the expression we will return. First get the component
5311 reference we will make. Unless the mask is all ones the width of
5312 that field, perform the mask operation. Then compare with the
5313 merged constant. */
5314 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5315 ll_unsignedp || rl_unsignedp);
5317 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5318 if (! all_ones_mask_p (ll_mask, lnbitsize))
5319 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5321 return build2 (wanted_code, truth_type, result,
5322 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5325 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5326 constant. */
5328 static tree
5329 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5331 tree arg0 = op0;
5332 enum tree_code op_code;
5333 tree comp_const = op1;
5334 tree minmax_const;
5335 int consts_equal, consts_lt;
5336 tree inner;
5338 STRIP_SIGN_NOPS (arg0);
5340 op_code = TREE_CODE (arg0);
5341 minmax_const = TREE_OPERAND (arg0, 1);
5342 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5343 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5344 inner = TREE_OPERAND (arg0, 0);
5346 /* If something does not permit us to optimize, return the original tree. */
5347 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5348 || TREE_CODE (comp_const) != INTEGER_CST
5349 || TREE_OVERFLOW (comp_const)
5350 || TREE_CODE (minmax_const) != INTEGER_CST
5351 || TREE_OVERFLOW (minmax_const))
5352 return NULL_TREE;
5354 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5355 and GT_EXPR, doing the rest with recursive calls using logical
5356 simplifications. */
5357 switch (code)
5359 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5361 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5362 type, op0, op1);
5363 if (tem)
5364 return invert_truthvalue (tem);
5365 return NULL_TREE;
5368 case GE_EXPR:
5369 return
5370 fold_build2 (TRUTH_ORIF_EXPR, type,
5371 optimize_minmax_comparison
5372 (EQ_EXPR, type, arg0, comp_const),
5373 optimize_minmax_comparison
5374 (GT_EXPR, type, arg0, comp_const));
5376 case EQ_EXPR:
5377 if (op_code == MAX_EXPR && consts_equal)
5378 /* MAX (X, 0) == 0 -> X <= 0 */
5379 return fold_build2 (LE_EXPR, type, inner, comp_const);
5381 else if (op_code == MAX_EXPR && consts_lt)
5382 /* MAX (X, 0) == 5 -> X == 5 */
5383 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5385 else if (op_code == MAX_EXPR)
5386 /* MAX (X, 0) == -1 -> false */
5387 return omit_one_operand (type, integer_zero_node, inner);
5389 else if (consts_equal)
5390 /* MIN (X, 0) == 0 -> X >= 0 */
5391 return fold_build2 (GE_EXPR, type, inner, comp_const);
5393 else if (consts_lt)
5394 /* MIN (X, 0) == 5 -> false */
5395 return omit_one_operand (type, integer_zero_node, inner);
5397 else
5398 /* MIN (X, 0) == -1 -> X == -1 */
5399 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5401 case GT_EXPR:
5402 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5403 /* MAX (X, 0) > 0 -> X > 0
5404 MAX (X, 0) > 5 -> X > 5 */
5405 return fold_build2 (GT_EXPR, type, inner, comp_const);
5407 else if (op_code == MAX_EXPR)
5408 /* MAX (X, 0) > -1 -> true */
5409 return omit_one_operand (type, integer_one_node, inner);
5411 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5412 /* MIN (X, 0) > 0 -> false
5413 MIN (X, 0) > 5 -> false */
5414 return omit_one_operand (type, integer_zero_node, inner);
5416 else
5417 /* MIN (X, 0) > -1 -> X > -1 */
5418 return fold_build2 (GT_EXPR, type, inner, comp_const);
5420 default:
5421 return NULL_TREE;
5425 /* T is an integer expression that is being multiplied, divided, or taken a
5426 modulus (CODE says which and what kind of divide or modulus) by a
5427 constant C. See if we can eliminate that operation by folding it with
5428 other operations already in T. WIDE_TYPE, if non-null, is a type that
5429 should be used for the computation if wider than our type.
5431 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5432 (X * 2) + (Y * 4). We must, however, be assured that either the original
5433 expression would not overflow or that overflow is undefined for the type
5434 in the language in question.
5436 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5437 the machine has a multiply-accumulate insn or that this is part of an
5438 addressing calculation.
5440 If we return a non-null expression, it is an equivalent form of the
5441 original computation, but need not be in the original type. */
5443 static tree
5444 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5446 /* To avoid exponential search depth, refuse to allow recursion past
5447 three levels. Beyond that (1) it's highly unlikely that we'll find
5448 something interesting and (2) we've probably processed it before
5449 when we built the inner expression. */
5451 static int depth;
5452 tree ret;
5454 if (depth > 3)
5455 return NULL;
5457 depth++;
5458 ret = extract_muldiv_1 (t, c, code, wide_type);
5459 depth--;
5461 return ret;
5464 static tree
5465 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5467 tree type = TREE_TYPE (t);
5468 enum tree_code tcode = TREE_CODE (t);
5469 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5470 > GET_MODE_SIZE (TYPE_MODE (type)))
5471 ? wide_type : type);
5472 tree t1, t2;
5473 int same_p = tcode == code;
5474 tree op0 = NULL_TREE, op1 = NULL_TREE;
5476 /* Don't deal with constants of zero here; they confuse the code below. */
5477 if (integer_zerop (c))
5478 return NULL_TREE;
5480 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5481 op0 = TREE_OPERAND (t, 0);
5483 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5484 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5486 /* Note that we need not handle conditional operations here since fold
5487 already handles those cases. So just do arithmetic here. */
5488 switch (tcode)
5490 case INTEGER_CST:
5491 /* For a constant, we can always simplify if we are a multiply
5492 or (for divide and modulus) if it is a multiple of our constant. */
5493 if (code == MULT_EXPR
5494 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5495 return const_binop (code, fold_convert (ctype, t),
5496 fold_convert (ctype, c), 0);
5497 break;
5499 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5500 /* If op0 is an expression ... */
5501 if ((COMPARISON_CLASS_P (op0)
5502 || UNARY_CLASS_P (op0)
5503 || BINARY_CLASS_P (op0)
5504 || EXPRESSION_CLASS_P (op0))
5505 /* ... and is unsigned, and its type is smaller than ctype,
5506 then we cannot pass through as widening. */
5507 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5508 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5509 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5510 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5511 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5512 /* ... or this is a truncation (t is narrower than op0),
5513 then we cannot pass through this narrowing. */
5514 || (GET_MODE_SIZE (TYPE_MODE (type))
5515 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5516 /* ... or signedness changes for division or modulus,
5517 then we cannot pass through this conversion. */
5518 || (code != MULT_EXPR
5519 && (TYPE_UNSIGNED (ctype)
5520 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5521 break;
5523 /* Pass the constant down and see if we can make a simplification. If
5524 we can, replace this expression with the inner simplification for
5525 possible later conversion to our or some other type. */
5526 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5527 && TREE_CODE (t2) == INTEGER_CST
5528 && !TREE_OVERFLOW (t2)
5529 && (0 != (t1 = extract_muldiv (op0, t2, code,
5530 code == MULT_EXPR
5531 ? ctype : NULL_TREE))))
5532 return t1;
5533 break;
5535 case ABS_EXPR:
5536 /* If widening the type changes it from signed to unsigned, then we
5537 must avoid building ABS_EXPR itself as unsigned. */
5538 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5540 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5541 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5543 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5544 return fold_convert (ctype, t1);
5546 break;
5548 /* FALLTHROUGH */
5549 case NEGATE_EXPR:
5550 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5551 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5552 break;
5554 case MIN_EXPR: case MAX_EXPR:
5555 /* If widening the type changes the signedness, then we can't perform
5556 this optimization as that changes the result. */
5557 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5558 break;
5560 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5561 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5562 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5564 if (tree_int_cst_sgn (c) < 0)
5565 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5567 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5568 fold_convert (ctype, t2));
5570 break;
5572 case LSHIFT_EXPR: case RSHIFT_EXPR:
5573 /* If the second operand is constant, this is a multiplication
5574 or floor division, by a power of two, so we can treat it that
5575 way unless the multiplier or divisor overflows. Signed
5576 left-shift overflow is implementation-defined rather than
5577 undefined in C90, so do not convert signed left shift into
5578 multiplication. */
5579 if (TREE_CODE (op1) == INTEGER_CST
5580 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5581 /* const_binop may not detect overflow correctly,
5582 so check for it explicitly here. */
5583 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5584 && TREE_INT_CST_HIGH (op1) == 0
5585 && 0 != (t1 = fold_convert (ctype,
5586 const_binop (LSHIFT_EXPR,
5587 size_one_node,
5588 op1, 0)))
5589 && !TREE_OVERFLOW (t1))
5590 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5591 ? MULT_EXPR : FLOOR_DIV_EXPR,
5592 ctype, fold_convert (ctype, op0), t1),
5593 c, code, wide_type);
5594 break;
5596 case PLUS_EXPR: case MINUS_EXPR:
5597 /* See if we can eliminate the operation on both sides. If we can, we
5598 can return a new PLUS or MINUS. If we can't, the only remaining
5599 cases where we can do anything are if the second operand is a
5600 constant. */
5601 t1 = extract_muldiv (op0, c, code, wide_type);
5602 t2 = extract_muldiv (op1, c, code, wide_type);
5603 if (t1 != 0 && t2 != 0
5604 && (code == MULT_EXPR
5605 /* If not multiplication, we can only do this if both operands
5606 are divisible by c. */
5607 || (multiple_of_p (ctype, op0, c)
5608 && multiple_of_p (ctype, op1, c))))
5609 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5610 fold_convert (ctype, t2));
5612 /* If this was a subtraction, negate OP1 and set it to be an addition.
5613 This simplifies the logic below. */
5614 if (tcode == MINUS_EXPR)
5615 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5617 if (TREE_CODE (op1) != INTEGER_CST)
5618 break;
5620 /* If either OP1 or C are negative, this optimization is not safe for
5621 some of the division and remainder types while for others we need
5622 to change the code. */
5623 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5625 if (code == CEIL_DIV_EXPR)
5626 code = FLOOR_DIV_EXPR;
5627 else if (code == FLOOR_DIV_EXPR)
5628 code = CEIL_DIV_EXPR;
5629 else if (code != MULT_EXPR
5630 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5631 break;
5634 /* If it's a multiply or a division/modulus operation of a multiple
5635 of our constant, do the operation and verify it doesn't overflow. */
5636 if (code == MULT_EXPR
5637 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5639 op1 = const_binop (code, fold_convert (ctype, op1),
5640 fold_convert (ctype, c), 0);
5641 /* We allow the constant to overflow with wrapping semantics. */
5642 if (op1 == 0
5643 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5644 break;
5646 else
5647 break;
5649 /* If we have an unsigned type is not a sizetype, we cannot widen
5650 the operation since it will change the result if the original
5651 computation overflowed. */
5652 if (TYPE_UNSIGNED (ctype)
5653 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5654 && ctype != type)
5655 break;
5657 /* If we were able to eliminate our operation from the first side,
5658 apply our operation to the second side and reform the PLUS. */
5659 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5660 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5662 /* The last case is if we are a multiply. In that case, we can
5663 apply the distributive law to commute the multiply and addition
5664 if the multiplication of the constants doesn't overflow. */
5665 if (code == MULT_EXPR)
5666 return fold_build2 (tcode, ctype,
5667 fold_build2 (code, ctype,
5668 fold_convert (ctype, op0),
5669 fold_convert (ctype, c)),
5670 op1);
5672 break;
5674 case MULT_EXPR:
5675 /* We have a special case here if we are doing something like
5676 (C * 8) % 4 since we know that's zero. */
5677 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5678 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5679 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5680 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5681 return omit_one_operand (type, integer_zero_node, op0);
5683 /* ... fall through ... */
5685 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5686 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5687 /* If we can extract our operation from the LHS, do so and return a
5688 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5689 do something only if the second operand is a constant. */
5690 if (same_p
5691 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5692 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5693 fold_convert (ctype, op1));
5694 else if (tcode == MULT_EXPR && code == MULT_EXPR
5695 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5696 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5697 fold_convert (ctype, t1));
5698 else if (TREE_CODE (op1) != INTEGER_CST)
5699 return 0;
5701 /* If these are the same operation types, we can associate them
5702 assuming no overflow. */
5703 if (tcode == code
5704 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5705 fold_convert (ctype, c), 0))
5706 && !TREE_OVERFLOW (t1))
5707 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5709 /* If these operations "cancel" each other, we have the main
5710 optimizations of this pass, which occur when either constant is a
5711 multiple of the other, in which case we replace this with either an
5712 operation or CODE or TCODE.
5714 If we have an unsigned type that is not a sizetype, we cannot do
5715 this since it will change the result if the original computation
5716 overflowed. */
5717 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5718 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5719 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5720 || (tcode == MULT_EXPR
5721 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5722 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5724 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5726 fold_convert (ctype,
5727 const_binop (TRUNC_DIV_EXPR,
5728 op1, c, 0)));
5729 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5730 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5731 fold_convert (ctype,
5732 const_binop (TRUNC_DIV_EXPR,
5733 c, op1, 0)));
5735 break;
5737 default:
5738 break;
5741 return 0;
5744 /* Return a node which has the indicated constant VALUE (either 0 or
5745 1), and is of the indicated TYPE. */
5747 tree
5748 constant_boolean_node (int value, tree type)
5750 if (type == integer_type_node)
5751 return value ? integer_one_node : integer_zero_node;
5752 else if (type == boolean_type_node)
5753 return value ? boolean_true_node : boolean_false_node;
5754 else
5755 return build_int_cst (type, value);
5759 /* Return true if expr looks like an ARRAY_REF and set base and
5760 offset to the appropriate trees. If there is no offset,
5761 offset is set to NULL_TREE. Base will be canonicalized to
5762 something you can get the element type from using
5763 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5764 in bytes to the base. */
5766 static bool
5767 extract_array_ref (tree expr, tree *base, tree *offset)
5769 /* One canonical form is a PLUS_EXPR with the first
5770 argument being an ADDR_EXPR with a possible NOP_EXPR
5771 attached. */
5772 if (TREE_CODE (expr) == PLUS_EXPR)
5774 tree op0 = TREE_OPERAND (expr, 0);
5775 tree inner_base, dummy1;
5776 /* Strip NOP_EXPRs here because the C frontends and/or
5777 folders present us (int *)&x.a + 4B possibly. */
5778 STRIP_NOPS (op0);
5779 if (extract_array_ref (op0, &inner_base, &dummy1))
5781 *base = inner_base;
5782 if (dummy1 == NULL_TREE)
5783 *offset = TREE_OPERAND (expr, 1);
5784 else
5785 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5786 dummy1, TREE_OPERAND (expr, 1));
5787 return true;
5790 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5791 which we transform into an ADDR_EXPR with appropriate
5792 offset. For other arguments to the ADDR_EXPR we assume
5793 zero offset and as such do not care about the ADDR_EXPR
5794 type and strip possible nops from it. */
5795 else if (TREE_CODE (expr) == ADDR_EXPR)
5797 tree op0 = TREE_OPERAND (expr, 0);
5798 if (TREE_CODE (op0) == ARRAY_REF)
5800 tree idx = TREE_OPERAND (op0, 1);
5801 *base = TREE_OPERAND (op0, 0);
5802 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5803 array_ref_element_size (op0));
5805 else
5807 /* Handle array-to-pointer decay as &a. */
5808 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5809 *base = TREE_OPERAND (expr, 0);
5810 else
5811 *base = expr;
5812 *offset = NULL_TREE;
5814 return true;
5816 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5817 else if (SSA_VAR_P (expr)
5818 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5820 *base = expr;
5821 *offset = NULL_TREE;
5822 return true;
5825 return false;
5829 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5830 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5831 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5832 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5833 COND is the first argument to CODE; otherwise (as in the example
5834 given here), it is the second argument. TYPE is the type of the
5835 original expression. Return NULL_TREE if no simplification is
5836 possible. */
5838 static tree
5839 fold_binary_op_with_conditional_arg (enum tree_code code,
5840 tree type, tree op0, tree op1,
5841 tree cond, tree arg, int cond_first_p)
5843 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5844 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5845 tree test, true_value, false_value;
5846 tree lhs = NULL_TREE;
5847 tree rhs = NULL_TREE;
5849 /* This transformation is only worthwhile if we don't have to wrap
5850 arg in a SAVE_EXPR, and the operation can be simplified on at least
5851 one of the branches once its pushed inside the COND_EXPR. */
5852 if (!TREE_CONSTANT (arg))
5853 return NULL_TREE;
5855 if (TREE_CODE (cond) == COND_EXPR)
5857 test = TREE_OPERAND (cond, 0);
5858 true_value = TREE_OPERAND (cond, 1);
5859 false_value = TREE_OPERAND (cond, 2);
5860 /* If this operand throws an expression, then it does not make
5861 sense to try to perform a logical or arithmetic operation
5862 involving it. */
5863 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5864 lhs = true_value;
5865 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5866 rhs = false_value;
5868 else
5870 tree testtype = TREE_TYPE (cond);
5871 test = cond;
5872 true_value = constant_boolean_node (true, testtype);
5873 false_value = constant_boolean_node (false, testtype);
5876 arg = fold_convert (arg_type, arg);
5877 if (lhs == 0)
5879 true_value = fold_convert (cond_type, true_value);
5880 if (cond_first_p)
5881 lhs = fold_build2 (code, type, true_value, arg);
5882 else
5883 lhs = fold_build2 (code, type, arg, true_value);
5885 if (rhs == 0)
5887 false_value = fold_convert (cond_type, false_value);
5888 if (cond_first_p)
5889 rhs = fold_build2 (code, type, false_value, arg);
5890 else
5891 rhs = fold_build2 (code, type, arg, false_value);
5894 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5895 return fold_convert (type, test);
5899 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5901 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5902 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5903 ADDEND is the same as X.
5905 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5906 and finite. The problematic cases are when X is zero, and its mode
5907 has signed zeros. In the case of rounding towards -infinity,
5908 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5909 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5911 static bool
5912 fold_real_zero_addition_p (tree type, tree addend, int negate)
5914 if (!real_zerop (addend))
5915 return false;
5917 /* Don't allow the fold with -fsignaling-nans. */
5918 if (HONOR_SNANS (TYPE_MODE (type)))
5919 return false;
5921 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5922 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5923 return true;
5925 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5926 if (TREE_CODE (addend) == REAL_CST
5927 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5928 negate = !negate;
5930 /* The mode has signed zeros, and we have to honor their sign.
5931 In this situation, there is only one case we can return true for.
5932 X - 0 is the same as X unless rounding towards -infinity is
5933 supported. */
5934 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5937 /* Subroutine of fold() that checks comparisons of built-in math
5938 functions against real constants.
5940 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5941 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5942 is the type of the result and ARG0 and ARG1 are the operands of the
5943 comparison. ARG1 must be a TREE_REAL_CST.
5945 The function returns the constant folded tree if a simplification
5946 can be made, and NULL_TREE otherwise. */
5948 static tree
5949 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5950 tree type, tree arg0, tree arg1)
5952 REAL_VALUE_TYPE c;
5954 if (BUILTIN_SQRT_P (fcode))
5956 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5957 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5959 c = TREE_REAL_CST (arg1);
5960 if (REAL_VALUE_NEGATIVE (c))
5962 /* sqrt(x) < y is always false, if y is negative. */
5963 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5964 return omit_one_operand (type, integer_zero_node, arg);
5966 /* sqrt(x) > y is always true, if y is negative and we
5967 don't care about NaNs, i.e. negative values of x. */
5968 if (code == NE_EXPR || !HONOR_NANS (mode))
5969 return omit_one_operand (type, integer_one_node, arg);
5971 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5972 return fold_build2 (GE_EXPR, type, arg,
5973 build_real (TREE_TYPE (arg), dconst0));
5975 else if (code == GT_EXPR || code == GE_EXPR)
5977 REAL_VALUE_TYPE c2;
5979 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5980 real_convert (&c2, mode, &c2);
5982 if (REAL_VALUE_ISINF (c2))
5984 /* sqrt(x) > y is x == +Inf, when y is very large. */
5985 if (HONOR_INFINITIES (mode))
5986 return fold_build2 (EQ_EXPR, type, arg,
5987 build_real (TREE_TYPE (arg), c2));
5989 /* sqrt(x) > y is always false, when y is very large
5990 and we don't care about infinities. */
5991 return omit_one_operand (type, integer_zero_node, arg);
5994 /* sqrt(x) > c is the same as x > c*c. */
5995 return fold_build2 (code, type, arg,
5996 build_real (TREE_TYPE (arg), c2));
5998 else if (code == LT_EXPR || code == LE_EXPR)
6000 REAL_VALUE_TYPE c2;
6002 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6003 real_convert (&c2, mode, &c2);
6005 if (REAL_VALUE_ISINF (c2))
6007 /* sqrt(x) < y is always true, when y is a very large
6008 value and we don't care about NaNs or Infinities. */
6009 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6010 return omit_one_operand (type, integer_one_node, arg);
6012 /* sqrt(x) < y is x != +Inf when y is very large and we
6013 don't care about NaNs. */
6014 if (! HONOR_NANS (mode))
6015 return fold_build2 (NE_EXPR, type, arg,
6016 build_real (TREE_TYPE (arg), c2));
6018 /* sqrt(x) < y is x >= 0 when y is very large and we
6019 don't care about Infinities. */
6020 if (! HONOR_INFINITIES (mode))
6021 return fold_build2 (GE_EXPR, type, arg,
6022 build_real (TREE_TYPE (arg), dconst0));
6024 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6025 if (lang_hooks.decls.global_bindings_p () != 0
6026 || CONTAINS_PLACEHOLDER_P (arg))
6027 return NULL_TREE;
6029 arg = save_expr (arg);
6030 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6031 fold_build2 (GE_EXPR, type, arg,
6032 build_real (TREE_TYPE (arg),
6033 dconst0)),
6034 fold_build2 (NE_EXPR, type, arg,
6035 build_real (TREE_TYPE (arg),
6036 c2)));
6039 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6040 if (! HONOR_NANS (mode))
6041 return fold_build2 (code, type, arg,
6042 build_real (TREE_TYPE (arg), c2));
6044 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6045 if (lang_hooks.decls.global_bindings_p () == 0
6046 && ! CONTAINS_PLACEHOLDER_P (arg))
6048 arg = save_expr (arg);
6049 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6050 fold_build2 (GE_EXPR, type, arg,
6051 build_real (TREE_TYPE (arg),
6052 dconst0)),
6053 fold_build2 (code, type, arg,
6054 build_real (TREE_TYPE (arg),
6055 c2)));
6060 return NULL_TREE;
6063 /* Subroutine of fold() that optimizes comparisons against Infinities,
6064 either +Inf or -Inf.
6066 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6067 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6068 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6070 The function returns the constant folded tree if a simplification
6071 can be made, and NULL_TREE otherwise. */
6073 static tree
6074 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6076 enum machine_mode mode;
6077 REAL_VALUE_TYPE max;
6078 tree temp;
6079 bool neg;
6081 mode = TYPE_MODE (TREE_TYPE (arg0));
6083 /* For negative infinity swap the sense of the comparison. */
6084 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6085 if (neg)
6086 code = swap_tree_comparison (code);
6088 switch (code)
6090 case GT_EXPR:
6091 /* x > +Inf is always false, if with ignore sNANs. */
6092 if (HONOR_SNANS (mode))
6093 return NULL_TREE;
6094 return omit_one_operand (type, integer_zero_node, arg0);
6096 case LE_EXPR:
6097 /* x <= +Inf is always true, if we don't case about NaNs. */
6098 if (! HONOR_NANS (mode))
6099 return omit_one_operand (type, integer_one_node, arg0);
6101 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6102 if (lang_hooks.decls.global_bindings_p () == 0
6103 && ! CONTAINS_PLACEHOLDER_P (arg0))
6105 arg0 = save_expr (arg0);
6106 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6108 break;
6110 case EQ_EXPR:
6111 case GE_EXPR:
6112 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6113 real_maxval (&max, neg, mode);
6114 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6115 arg0, build_real (TREE_TYPE (arg0), max));
6117 case LT_EXPR:
6118 /* x < +Inf is always equal to x <= DBL_MAX. */
6119 real_maxval (&max, neg, mode);
6120 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6121 arg0, build_real (TREE_TYPE (arg0), max));
6123 case NE_EXPR:
6124 /* x != +Inf is always equal to !(x > DBL_MAX). */
6125 real_maxval (&max, neg, mode);
6126 if (! HONOR_NANS (mode))
6127 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6128 arg0, build_real (TREE_TYPE (arg0), max));
6130 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6131 arg0, build_real (TREE_TYPE (arg0), max));
6132 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6134 default:
6135 break;
6138 return NULL_TREE;
6141 /* Subroutine of fold() that optimizes comparisons of a division by
6142 a nonzero integer constant against an integer constant, i.e.
6143 X/C1 op C2.
6145 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6146 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6147 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6149 The function returns the constant folded tree if a simplification
6150 can be made, and NULL_TREE otherwise. */
6152 static tree
6153 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6155 tree prod, tmp, hi, lo;
6156 tree arg00 = TREE_OPERAND (arg0, 0);
6157 tree arg01 = TREE_OPERAND (arg0, 1);
6158 unsigned HOST_WIDE_INT lpart;
6159 HOST_WIDE_INT hpart;
6160 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6161 bool neg_overflow;
6162 int overflow;
6164 /* We have to do this the hard way to detect unsigned overflow.
6165 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6166 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6167 TREE_INT_CST_HIGH (arg01),
6168 TREE_INT_CST_LOW (arg1),
6169 TREE_INT_CST_HIGH (arg1),
6170 &lpart, &hpart, unsigned_p);
6171 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6172 -1, overflow);
6173 neg_overflow = false;
6175 if (unsigned_p)
6177 tmp = int_const_binop (MINUS_EXPR, arg01,
6178 build_int_cst (TREE_TYPE (arg01), 1), 0);
6179 lo = prod;
6181 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6182 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6183 TREE_INT_CST_HIGH (prod),
6184 TREE_INT_CST_LOW (tmp),
6185 TREE_INT_CST_HIGH (tmp),
6186 &lpart, &hpart, unsigned_p);
6187 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6188 -1, overflow | TREE_OVERFLOW (prod));
6190 else if (tree_int_cst_sgn (arg01) >= 0)
6192 tmp = int_const_binop (MINUS_EXPR, arg01,
6193 build_int_cst (TREE_TYPE (arg01), 1), 0);
6194 switch (tree_int_cst_sgn (arg1))
6196 case -1:
6197 neg_overflow = true;
6198 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6199 hi = prod;
6200 break;
6202 case 0:
6203 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6204 hi = tmp;
6205 break;
6207 case 1:
6208 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6209 lo = prod;
6210 break;
6212 default:
6213 gcc_unreachable ();
6216 else
6218 /* A negative divisor reverses the relational operators. */
6219 code = swap_tree_comparison (code);
6221 tmp = int_const_binop (PLUS_EXPR, arg01,
6222 build_int_cst (TREE_TYPE (arg01), 1), 0);
6223 switch (tree_int_cst_sgn (arg1))
6225 case -1:
6226 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6227 lo = prod;
6228 break;
6230 case 0:
6231 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6232 lo = tmp;
6233 break;
6235 case 1:
6236 neg_overflow = true;
6237 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6238 hi = prod;
6239 break;
6241 default:
6242 gcc_unreachable ();
6246 switch (code)
6248 case EQ_EXPR:
6249 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6250 return omit_one_operand (type, integer_zero_node, arg00);
6251 if (TREE_OVERFLOW (hi))
6252 return fold_build2 (GE_EXPR, type, arg00, lo);
6253 if (TREE_OVERFLOW (lo))
6254 return fold_build2 (LE_EXPR, type, arg00, hi);
6255 return build_range_check (type, arg00, 1, lo, hi);
6257 case NE_EXPR:
6258 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6259 return omit_one_operand (type, integer_one_node, arg00);
6260 if (TREE_OVERFLOW (hi))
6261 return fold_build2 (LT_EXPR, type, arg00, lo);
6262 if (TREE_OVERFLOW (lo))
6263 return fold_build2 (GT_EXPR, type, arg00, hi);
6264 return build_range_check (type, arg00, 0, lo, hi);
6266 case LT_EXPR:
6267 if (TREE_OVERFLOW (lo))
6269 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6270 return omit_one_operand (type, tmp, arg00);
6272 return fold_build2 (LT_EXPR, type, arg00, lo);
6274 case LE_EXPR:
6275 if (TREE_OVERFLOW (hi))
6277 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6278 return omit_one_operand (type, tmp, arg00);
6280 return fold_build2 (LE_EXPR, type, arg00, hi);
6282 case GT_EXPR:
6283 if (TREE_OVERFLOW (hi))
6285 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6286 return omit_one_operand (type, tmp, arg00);
6288 return fold_build2 (GT_EXPR, type, arg00, hi);
6290 case GE_EXPR:
6291 if (TREE_OVERFLOW (lo))
6293 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6294 return omit_one_operand (type, tmp, arg00);
6296 return fold_build2 (GE_EXPR, type, arg00, lo);
6298 default:
6299 break;
6302 return NULL_TREE;
6306 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6307 equality/inequality test, then return a simplified form of the test
6308 using a sign testing. Otherwise return NULL. TYPE is the desired
6309 result type. */
6311 static tree
6312 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6313 tree result_type)
6315 /* If this is testing a single bit, we can optimize the test. */
6316 if ((code == NE_EXPR || code == EQ_EXPR)
6317 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6318 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6320 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6321 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6322 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6324 if (arg00 != NULL_TREE
6325 /* This is only a win if casting to a signed type is cheap,
6326 i.e. when arg00's type is not a partial mode. */
6327 && TYPE_PRECISION (TREE_TYPE (arg00))
6328 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6330 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6331 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6332 result_type, fold_convert (stype, arg00),
6333 build_int_cst (stype, 0));
6337 return NULL_TREE;
6340 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6341 equality/inequality test, then return a simplified form of
6342 the test using shifts and logical operations. Otherwise return
6343 NULL. TYPE is the desired result type. */
6345 tree
6346 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6347 tree result_type)
6349 /* If this is testing a single bit, we can optimize the test. */
6350 if ((code == NE_EXPR || code == EQ_EXPR)
6351 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6352 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6354 tree inner = TREE_OPERAND (arg0, 0);
6355 tree type = TREE_TYPE (arg0);
6356 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6357 enum machine_mode operand_mode = TYPE_MODE (type);
6358 int ops_unsigned;
6359 tree signed_type, unsigned_type, intermediate_type;
6360 tree tem, one;
6362 /* First, see if we can fold the single bit test into a sign-bit
6363 test. */
6364 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6365 result_type);
6366 if (tem)
6367 return tem;
6369 /* Otherwise we have (A & C) != 0 where C is a single bit,
6370 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6371 Similarly for (A & C) == 0. */
6373 /* If INNER is a right shift of a constant and it plus BITNUM does
6374 not overflow, adjust BITNUM and INNER. */
6375 if (TREE_CODE (inner) == RSHIFT_EXPR
6376 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6377 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6378 && bitnum < TYPE_PRECISION (type)
6379 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6380 bitnum - TYPE_PRECISION (type)))
6382 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6383 inner = TREE_OPERAND (inner, 0);
6386 /* If we are going to be able to omit the AND below, we must do our
6387 operations as unsigned. If we must use the AND, we have a choice.
6388 Normally unsigned is faster, but for some machines signed is. */
6389 #ifdef LOAD_EXTEND_OP
6390 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6391 && !flag_syntax_only) ? 0 : 1;
6392 #else
6393 ops_unsigned = 1;
6394 #endif
6396 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6397 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6398 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6399 inner = fold_convert (intermediate_type, inner);
6401 if (bitnum != 0)
6402 inner = build2 (RSHIFT_EXPR, intermediate_type,
6403 inner, size_int (bitnum));
6405 one = build_int_cst (intermediate_type, 1);
6407 if (code == EQ_EXPR)
6408 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6410 /* Put the AND last so it can combine with more things. */
6411 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6413 /* Make sure to return the proper type. */
6414 inner = fold_convert (result_type, inner);
6416 return inner;
6418 return NULL_TREE;
6421 /* Check whether we are allowed to reorder operands arg0 and arg1,
6422 such that the evaluation of arg1 occurs before arg0. */
6424 static bool
6425 reorder_operands_p (tree arg0, tree arg1)
6427 if (! flag_evaluation_order)
6428 return true;
6429 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6430 return true;
6431 return ! TREE_SIDE_EFFECTS (arg0)
6432 && ! TREE_SIDE_EFFECTS (arg1);
6435 /* Test whether it is preferable two swap two operands, ARG0 and
6436 ARG1, for example because ARG0 is an integer constant and ARG1
6437 isn't. If REORDER is true, only recommend swapping if we can
6438 evaluate the operands in reverse order. */
6440 bool
6441 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6443 STRIP_SIGN_NOPS (arg0);
6444 STRIP_SIGN_NOPS (arg1);
6446 if (TREE_CODE (arg1) == INTEGER_CST)
6447 return 0;
6448 if (TREE_CODE (arg0) == INTEGER_CST)
6449 return 1;
6451 if (TREE_CODE (arg1) == REAL_CST)
6452 return 0;
6453 if (TREE_CODE (arg0) == REAL_CST)
6454 return 1;
6456 if (TREE_CODE (arg1) == COMPLEX_CST)
6457 return 0;
6458 if (TREE_CODE (arg0) == COMPLEX_CST)
6459 return 1;
6461 if (TREE_CONSTANT (arg1))
6462 return 0;
6463 if (TREE_CONSTANT (arg0))
6464 return 1;
6466 if (optimize_size)
6467 return 0;
6469 if (reorder && flag_evaluation_order
6470 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6471 return 0;
6473 if (DECL_P (arg1))
6474 return 0;
6475 if (DECL_P (arg0))
6476 return 1;
6478 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6479 for commutative and comparison operators. Ensuring a canonical
6480 form allows the optimizers to find additional redundancies without
6481 having to explicitly check for both orderings. */
6482 if (TREE_CODE (arg0) == SSA_NAME
6483 && TREE_CODE (arg1) == SSA_NAME
6484 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6485 return 1;
6487 return 0;
6490 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6491 ARG0 is extended to a wider type. */
6493 static tree
6494 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6496 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6497 tree arg1_unw;
6498 tree shorter_type, outer_type;
6499 tree min, max;
6500 bool above, below;
6502 if (arg0_unw == arg0)
6503 return NULL_TREE;
6504 shorter_type = TREE_TYPE (arg0_unw);
6506 #ifdef HAVE_canonicalize_funcptr_for_compare
6507 /* Disable this optimization if we're casting a function pointer
6508 type on targets that require function pointer canonicalization. */
6509 if (HAVE_canonicalize_funcptr_for_compare
6510 && TREE_CODE (shorter_type) == POINTER_TYPE
6511 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6512 return NULL_TREE;
6513 #endif
6515 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6516 return NULL_TREE;
6518 arg1_unw = get_unwidened (arg1, shorter_type);
6520 /* If possible, express the comparison in the shorter mode. */
6521 if ((code == EQ_EXPR || code == NE_EXPR
6522 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6523 && (TREE_TYPE (arg1_unw) == shorter_type
6524 || (TREE_CODE (arg1_unw) == INTEGER_CST
6525 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6526 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6527 && int_fits_type_p (arg1_unw, shorter_type))))
6528 return fold_build2 (code, type, arg0_unw,
6529 fold_convert (shorter_type, arg1_unw));
6531 if (TREE_CODE (arg1_unw) != INTEGER_CST
6532 || TREE_CODE (shorter_type) != INTEGER_TYPE
6533 || !int_fits_type_p (arg1_unw, shorter_type))
6534 return NULL_TREE;
6536 /* If we are comparing with the integer that does not fit into the range
6537 of the shorter type, the result is known. */
6538 outer_type = TREE_TYPE (arg1_unw);
6539 min = lower_bound_in_type (outer_type, shorter_type);
6540 max = upper_bound_in_type (outer_type, shorter_type);
6542 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6543 max, arg1_unw));
6544 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6545 arg1_unw, min));
6547 switch (code)
6549 case EQ_EXPR:
6550 if (above || below)
6551 return omit_one_operand (type, integer_zero_node, arg0);
6552 break;
6554 case NE_EXPR:
6555 if (above || below)
6556 return omit_one_operand (type, integer_one_node, arg0);
6557 break;
6559 case LT_EXPR:
6560 case LE_EXPR:
6561 if (above)
6562 return omit_one_operand (type, integer_one_node, arg0);
6563 else if (below)
6564 return omit_one_operand (type, integer_zero_node, arg0);
6566 case GT_EXPR:
6567 case GE_EXPR:
6568 if (above)
6569 return omit_one_operand (type, integer_zero_node, arg0);
6570 else if (below)
6571 return omit_one_operand (type, integer_one_node, arg0);
6573 default:
6574 break;
6577 return NULL_TREE;
6580 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6581 ARG0 just the signedness is changed. */
6583 static tree
6584 fold_sign_changed_comparison (enum tree_code code, tree type,
6585 tree arg0, tree arg1)
6587 tree arg0_inner;
6588 tree inner_type, outer_type;
6590 if (TREE_CODE (arg0) != NOP_EXPR
6591 && TREE_CODE (arg0) != CONVERT_EXPR)
6592 return NULL_TREE;
6594 outer_type = TREE_TYPE (arg0);
6595 arg0_inner = TREE_OPERAND (arg0, 0);
6596 inner_type = TREE_TYPE (arg0_inner);
6598 #ifdef HAVE_canonicalize_funcptr_for_compare
6599 /* Disable this optimization if we're casting a function pointer
6600 type on targets that require function pointer canonicalization. */
6601 if (HAVE_canonicalize_funcptr_for_compare
6602 && TREE_CODE (inner_type) == POINTER_TYPE
6603 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6604 return NULL_TREE;
6605 #endif
6607 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6608 return NULL_TREE;
6610 if (TREE_CODE (arg1) != INTEGER_CST
6611 && !((TREE_CODE (arg1) == NOP_EXPR
6612 || TREE_CODE (arg1) == CONVERT_EXPR)
6613 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6614 return NULL_TREE;
6616 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6617 && code != NE_EXPR
6618 && code != EQ_EXPR)
6619 return NULL_TREE;
6621 if (TREE_CODE (arg1) == INTEGER_CST)
6622 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6623 TREE_INT_CST_HIGH (arg1), 0,
6624 TREE_OVERFLOW (arg1));
6625 else
6626 arg1 = fold_convert (inner_type, arg1);
6628 return fold_build2 (code, type, arg0_inner, arg1);
6631 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6632 step of the array. Reconstructs s and delta in the case of s * delta
6633 being an integer constant (and thus already folded).
6634 ADDR is the address. MULT is the multiplicative expression.
6635 If the function succeeds, the new address expression is returned. Otherwise
6636 NULL_TREE is returned. */
6638 static tree
6639 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6641 tree s, delta, step;
6642 tree ref = TREE_OPERAND (addr, 0), pref;
6643 tree ret, pos;
6644 tree itype;
6645 bool mdim = false;
6647 /* Canonicalize op1 into a possibly non-constant delta
6648 and an INTEGER_CST s. */
6649 if (TREE_CODE (op1) == MULT_EXPR)
6651 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6653 STRIP_NOPS (arg0);
6654 STRIP_NOPS (arg1);
6656 if (TREE_CODE (arg0) == INTEGER_CST)
6658 s = arg0;
6659 delta = arg1;
6661 else if (TREE_CODE (arg1) == INTEGER_CST)
6663 s = arg1;
6664 delta = arg0;
6666 else
6667 return NULL_TREE;
6669 else if (TREE_CODE (op1) == INTEGER_CST)
6671 delta = op1;
6672 s = NULL_TREE;
6674 else
6676 /* Simulate we are delta * 1. */
6677 delta = op1;
6678 s = integer_one_node;
6681 for (;; ref = TREE_OPERAND (ref, 0))
6683 if (TREE_CODE (ref) == ARRAY_REF)
6685 /* Remember if this was a multi-dimensional array. */
6686 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6687 mdim = true;
6689 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6690 if (! itype)
6691 continue;
6693 step = array_ref_element_size (ref);
6694 if (TREE_CODE (step) != INTEGER_CST)
6695 continue;
6697 if (s)
6699 if (! tree_int_cst_equal (step, s))
6700 continue;
6702 else
6704 /* Try if delta is a multiple of step. */
6705 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6706 if (! tmp)
6707 continue;
6708 delta = tmp;
6711 /* Only fold here if we can verify we do not overflow one
6712 dimension of a multi-dimensional array. */
6713 if (mdim)
6715 tree tmp;
6717 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6718 || !INTEGRAL_TYPE_P (itype)
6719 || !TYPE_MAX_VALUE (itype)
6720 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6721 continue;
6723 tmp = fold_binary (code, itype,
6724 fold_convert (itype,
6725 TREE_OPERAND (ref, 1)),
6726 fold_convert (itype, delta));
6727 if (!tmp
6728 || TREE_CODE (tmp) != INTEGER_CST
6729 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6730 continue;
6733 break;
6735 else
6736 mdim = false;
6738 if (!handled_component_p (ref))
6739 return NULL_TREE;
6742 /* We found the suitable array reference. So copy everything up to it,
6743 and replace the index. */
6745 pref = TREE_OPERAND (addr, 0);
6746 ret = copy_node (pref);
6747 pos = ret;
6749 while (pref != ref)
6751 pref = TREE_OPERAND (pref, 0);
6752 TREE_OPERAND (pos, 0) = copy_node (pref);
6753 pos = TREE_OPERAND (pos, 0);
6756 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6757 fold_convert (itype,
6758 TREE_OPERAND (pos, 1)),
6759 fold_convert (itype, delta));
6761 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6765 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6766 means A >= Y && A != MAX, but in this case we know that
6767 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6769 static tree
6770 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6772 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6774 if (TREE_CODE (bound) == LT_EXPR)
6775 a = TREE_OPERAND (bound, 0);
6776 else if (TREE_CODE (bound) == GT_EXPR)
6777 a = TREE_OPERAND (bound, 1);
6778 else
6779 return NULL_TREE;
6781 typea = TREE_TYPE (a);
6782 if (!INTEGRAL_TYPE_P (typea)
6783 && !POINTER_TYPE_P (typea))
6784 return NULL_TREE;
6786 if (TREE_CODE (ineq) == LT_EXPR)
6788 a1 = TREE_OPERAND (ineq, 1);
6789 y = TREE_OPERAND (ineq, 0);
6791 else if (TREE_CODE (ineq) == GT_EXPR)
6793 a1 = TREE_OPERAND (ineq, 0);
6794 y = TREE_OPERAND (ineq, 1);
6796 else
6797 return NULL_TREE;
6799 if (TREE_TYPE (a1) != typea)
6800 return NULL_TREE;
6802 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6803 if (!integer_onep (diff))
6804 return NULL_TREE;
6806 return fold_build2 (GE_EXPR, type, a, y);
6809 /* Fold a sum or difference of at least one multiplication.
6810 Returns the folded tree or NULL if no simplification could be made. */
6812 static tree
6813 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6815 tree arg00, arg01, arg10, arg11;
6816 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6818 /* (A * C) +- (B * C) -> (A+-B) * C.
6819 (A * C) +- A -> A * (C+-1).
6820 We are most concerned about the case where C is a constant,
6821 but other combinations show up during loop reduction. Since
6822 it is not difficult, try all four possibilities. */
6824 if (TREE_CODE (arg0) == MULT_EXPR)
6826 arg00 = TREE_OPERAND (arg0, 0);
6827 arg01 = TREE_OPERAND (arg0, 1);
6829 else
6831 arg00 = arg0;
6832 arg01 = build_one_cst (type);
6834 if (TREE_CODE (arg1) == MULT_EXPR)
6836 arg10 = TREE_OPERAND (arg1, 0);
6837 arg11 = TREE_OPERAND (arg1, 1);
6839 else
6841 arg10 = arg1;
6842 arg11 = build_one_cst (type);
6844 same = NULL_TREE;
6846 if (operand_equal_p (arg01, arg11, 0))
6847 same = arg01, alt0 = arg00, alt1 = arg10;
6848 else if (operand_equal_p (arg00, arg10, 0))
6849 same = arg00, alt0 = arg01, alt1 = arg11;
6850 else if (operand_equal_p (arg00, arg11, 0))
6851 same = arg00, alt0 = arg01, alt1 = arg10;
6852 else if (operand_equal_p (arg01, arg10, 0))
6853 same = arg01, alt0 = arg00, alt1 = arg11;
6855 /* No identical multiplicands; see if we can find a common
6856 power-of-two factor in non-power-of-two multiplies. This
6857 can help in multi-dimensional array access. */
6858 else if (host_integerp (arg01, 0)
6859 && host_integerp (arg11, 0))
6861 HOST_WIDE_INT int01, int11, tmp;
6862 bool swap = false;
6863 tree maybe_same;
6864 int01 = TREE_INT_CST_LOW (arg01);
6865 int11 = TREE_INT_CST_LOW (arg11);
6867 /* Move min of absolute values to int11. */
6868 if ((int01 >= 0 ? int01 : -int01)
6869 < (int11 >= 0 ? int11 : -int11))
6871 tmp = int01, int01 = int11, int11 = tmp;
6872 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6873 maybe_same = arg01;
6874 swap = true;
6876 else
6877 maybe_same = arg11;
6879 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6881 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6882 build_int_cst (TREE_TYPE (arg00),
6883 int01 / int11));
6884 alt1 = arg10;
6885 same = maybe_same;
6886 if (swap)
6887 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6891 if (same)
6892 return fold_build2 (MULT_EXPR, type,
6893 fold_build2 (code, type,
6894 fold_convert (type, alt0),
6895 fold_convert (type, alt1)),
6896 fold_convert (type, same));
6898 return NULL_TREE;
6901 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6902 specified by EXPR into the buffer PTR of length LEN bytes.
6903 Return the number of bytes placed in the buffer, or zero
6904 upon failure. */
6906 static int
6907 native_encode_int (tree expr, unsigned char *ptr, int len)
6909 tree type = TREE_TYPE (expr);
6910 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6911 int byte, offset, word, words;
6912 unsigned char value;
6914 if (total_bytes > len)
6915 return 0;
6916 words = total_bytes / UNITS_PER_WORD;
6918 for (byte = 0; byte < total_bytes; byte++)
6920 int bitpos = byte * BITS_PER_UNIT;
6921 if (bitpos < HOST_BITS_PER_WIDE_INT)
6922 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6923 else
6924 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6925 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6927 if (total_bytes > UNITS_PER_WORD)
6929 word = byte / UNITS_PER_WORD;
6930 if (WORDS_BIG_ENDIAN)
6931 word = (words - 1) - word;
6932 offset = word * UNITS_PER_WORD;
6933 if (BYTES_BIG_ENDIAN)
6934 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6935 else
6936 offset += byte % UNITS_PER_WORD;
6938 else
6939 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6940 ptr[offset] = value;
6942 return total_bytes;
6946 /* Subroutine of native_encode_expr. Encode the REAL_CST
6947 specified by EXPR into the buffer PTR of length LEN bytes.
6948 Return the number of bytes placed in the buffer, or zero
6949 upon failure. */
6951 static int
6952 native_encode_real (tree expr, unsigned char *ptr, int len)
6954 tree type = TREE_TYPE (expr);
6955 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6956 int byte, offset, word, words;
6957 unsigned char value;
6959 /* There are always 32 bits in each long, no matter the size of
6960 the hosts long. We handle floating point representations with
6961 up to 192 bits. */
6962 long tmp[6];
6964 if (total_bytes > len)
6965 return 0;
6966 words = total_bytes / UNITS_PER_WORD;
6968 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6970 for (byte = 0; byte < total_bytes; byte++)
6972 int bitpos = byte * BITS_PER_UNIT;
6973 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6975 if (total_bytes > UNITS_PER_WORD)
6977 word = byte / UNITS_PER_WORD;
6978 if (FLOAT_WORDS_BIG_ENDIAN)
6979 word = (words - 1) - word;
6980 offset = word * UNITS_PER_WORD;
6981 if (BYTES_BIG_ENDIAN)
6982 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6983 else
6984 offset += byte % UNITS_PER_WORD;
6986 else
6987 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6988 ptr[offset] = value;
6990 return total_bytes;
6993 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6994 specified by EXPR into the buffer PTR of length LEN bytes.
6995 Return the number of bytes placed in the buffer, or zero
6996 upon failure. */
6998 static int
6999 native_encode_complex (tree expr, unsigned char *ptr, int len)
7001 int rsize, isize;
7002 tree part;
7004 part = TREE_REALPART (expr);
7005 rsize = native_encode_expr (part, ptr, len);
7006 if (rsize == 0)
7007 return 0;
7008 part = TREE_IMAGPART (expr);
7009 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7010 if (isize != rsize)
7011 return 0;
7012 return rsize + isize;
7016 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7017 specified by EXPR into the buffer PTR of length LEN bytes.
7018 Return the number of bytes placed in the buffer, or zero
7019 upon failure. */
7021 static int
7022 native_encode_vector (tree expr, unsigned char *ptr, int len)
7024 int i, size, offset, count;
7025 tree itype, elem, elements;
7027 offset = 0;
7028 elements = TREE_VECTOR_CST_ELTS (expr);
7029 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7030 itype = TREE_TYPE (TREE_TYPE (expr));
7031 size = GET_MODE_SIZE (TYPE_MODE (itype));
7032 for (i = 0; i < count; i++)
7034 if (elements)
7036 elem = TREE_VALUE (elements);
7037 elements = TREE_CHAIN (elements);
7039 else
7040 elem = NULL_TREE;
7042 if (elem)
7044 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7045 return 0;
7047 else
7049 if (offset + size > len)
7050 return 0;
7051 memset (ptr+offset, 0, size);
7053 offset += size;
7055 return offset;
7059 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7060 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7061 buffer PTR of length LEN bytes. Return the number of bytes
7062 placed in the buffer, or zero upon failure. */
7064 static int
7065 native_encode_expr (tree expr, unsigned char *ptr, int len)
7067 switch (TREE_CODE (expr))
7069 case INTEGER_CST:
7070 return native_encode_int (expr, ptr, len);
7072 case REAL_CST:
7073 return native_encode_real (expr, ptr, len);
7075 case COMPLEX_CST:
7076 return native_encode_complex (expr, ptr, len);
7078 case VECTOR_CST:
7079 return native_encode_vector (expr, ptr, len);
7081 default:
7082 return 0;
7087 /* Subroutine of native_interpret_expr. Interpret the contents of
7088 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7089 If the buffer cannot be interpreted, return NULL_TREE. */
7091 static tree
7092 native_interpret_int (tree type, unsigned char *ptr, int len)
7094 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7095 int byte, offset, word, words;
7096 unsigned char value;
7097 unsigned int HOST_WIDE_INT lo = 0;
7098 HOST_WIDE_INT hi = 0;
7100 if (total_bytes > len)
7101 return NULL_TREE;
7102 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7103 return NULL_TREE;
7104 words = total_bytes / UNITS_PER_WORD;
7106 for (byte = 0; byte < total_bytes; byte++)
7108 int bitpos = byte * BITS_PER_UNIT;
7109 if (total_bytes > UNITS_PER_WORD)
7111 word = byte / UNITS_PER_WORD;
7112 if (WORDS_BIG_ENDIAN)
7113 word = (words - 1) - word;
7114 offset = word * UNITS_PER_WORD;
7115 if (BYTES_BIG_ENDIAN)
7116 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7117 else
7118 offset += byte % UNITS_PER_WORD;
7120 else
7121 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7122 value = ptr[offset];
7124 if (bitpos < HOST_BITS_PER_WIDE_INT)
7125 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7126 else
7127 hi |= (unsigned HOST_WIDE_INT) value
7128 << (bitpos - HOST_BITS_PER_WIDE_INT);
7131 return build_int_cst_wide_type (type, lo, hi);
7135 /* Subroutine of native_interpret_expr. Interpret the contents of
7136 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7137 If the buffer cannot be interpreted, return NULL_TREE. */
7139 static tree
7140 native_interpret_real (tree type, unsigned char *ptr, int len)
7142 enum machine_mode mode = TYPE_MODE (type);
7143 int total_bytes = GET_MODE_SIZE (mode);
7144 int byte, offset, word, words;
7145 unsigned char value;
7146 /* There are always 32 bits in each long, no matter the size of
7147 the hosts long. We handle floating point representations with
7148 up to 192 bits. */
7149 REAL_VALUE_TYPE r;
7150 long tmp[6];
7152 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7153 if (total_bytes > len || total_bytes > 24)
7154 return NULL_TREE;
7155 words = total_bytes / UNITS_PER_WORD;
7157 memset (tmp, 0, sizeof (tmp));
7158 for (byte = 0; byte < total_bytes; byte++)
7160 int bitpos = byte * BITS_PER_UNIT;
7161 if (total_bytes > UNITS_PER_WORD)
7163 word = byte / UNITS_PER_WORD;
7164 if (FLOAT_WORDS_BIG_ENDIAN)
7165 word = (words - 1) - word;
7166 offset = word * UNITS_PER_WORD;
7167 if (BYTES_BIG_ENDIAN)
7168 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7169 else
7170 offset += byte % UNITS_PER_WORD;
7172 else
7173 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7174 value = ptr[offset];
7176 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7179 real_from_target (&r, tmp, mode);
7180 return build_real (type, r);
7184 /* Subroutine of native_interpret_expr. Interpret the contents of
7185 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7186 If the buffer cannot be interpreted, return NULL_TREE. */
7188 static tree
7189 native_interpret_complex (tree type, unsigned char *ptr, int len)
7191 tree etype, rpart, ipart;
7192 int size;
7194 etype = TREE_TYPE (type);
7195 size = GET_MODE_SIZE (TYPE_MODE (etype));
7196 if (size * 2 > len)
7197 return NULL_TREE;
7198 rpart = native_interpret_expr (etype, ptr, size);
7199 if (!rpart)
7200 return NULL_TREE;
7201 ipart = native_interpret_expr (etype, ptr+size, size);
7202 if (!ipart)
7203 return NULL_TREE;
7204 return build_complex (type, rpart, ipart);
7208 /* Subroutine of native_interpret_expr. Interpret the contents of
7209 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7210 If the buffer cannot be interpreted, return NULL_TREE. */
7212 static tree
7213 native_interpret_vector (tree type, unsigned char *ptr, int len)
7215 tree etype, elem, elements;
7216 int i, size, count;
7218 etype = TREE_TYPE (type);
7219 size = GET_MODE_SIZE (TYPE_MODE (etype));
7220 count = TYPE_VECTOR_SUBPARTS (type);
7221 if (size * count > len)
7222 return NULL_TREE;
7224 elements = NULL_TREE;
7225 for (i = count - 1; i >= 0; i--)
7227 elem = native_interpret_expr (etype, ptr+(i*size), size);
7228 if (!elem)
7229 return NULL_TREE;
7230 elements = tree_cons (NULL_TREE, elem, elements);
7232 return build_vector (type, elements);
7236 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7237 the buffer PTR of length LEN as a constant of type TYPE. For
7238 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7239 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7240 return NULL_TREE. */
7242 static tree
7243 native_interpret_expr (tree type, unsigned char *ptr, int len)
7245 switch (TREE_CODE (type))
7247 case INTEGER_TYPE:
7248 case ENUMERAL_TYPE:
7249 case BOOLEAN_TYPE:
7250 return native_interpret_int (type, ptr, len);
7252 case REAL_TYPE:
7253 return native_interpret_real (type, ptr, len);
7255 case COMPLEX_TYPE:
7256 return native_interpret_complex (type, ptr, len);
7258 case VECTOR_TYPE:
7259 return native_interpret_vector (type, ptr, len);
7261 default:
7262 return NULL_TREE;
7267 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7268 TYPE at compile-time. If we're unable to perform the conversion
7269 return NULL_TREE. */
7271 static tree
7272 fold_view_convert_expr (tree type, tree expr)
7274 /* We support up to 512-bit values (for V8DFmode). */
7275 unsigned char buffer[64];
7276 int len;
7278 /* Check that the host and target are sane. */
7279 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7280 return NULL_TREE;
7282 len = native_encode_expr (expr, buffer, sizeof (buffer));
7283 if (len == 0)
7284 return NULL_TREE;
7286 return native_interpret_expr (type, buffer, len);
7290 /* Fold a unary expression of code CODE and type TYPE with operand
7291 OP0. Return the folded expression if folding is successful.
7292 Otherwise, return NULL_TREE. */
7294 tree
7295 fold_unary (enum tree_code code, tree type, tree op0)
7297 tree tem;
7298 tree arg0;
7299 enum tree_code_class kind = TREE_CODE_CLASS (code);
7301 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7302 && TREE_CODE_LENGTH (code) == 1);
7304 arg0 = op0;
7305 if (arg0)
7307 if (code == NOP_EXPR || code == CONVERT_EXPR
7308 || code == FLOAT_EXPR || code == ABS_EXPR)
7310 /* Don't use STRIP_NOPS, because signedness of argument type
7311 matters. */
7312 STRIP_SIGN_NOPS (arg0);
7314 else
7316 /* Strip any conversions that don't change the mode. This
7317 is safe for every expression, except for a comparison
7318 expression because its signedness is derived from its
7319 operands.
7321 Note that this is done as an internal manipulation within
7322 the constant folder, in order to find the simplest
7323 representation of the arguments so that their form can be
7324 studied. In any cases, the appropriate type conversions
7325 should be put back in the tree that will get out of the
7326 constant folder. */
7327 STRIP_NOPS (arg0);
7331 if (TREE_CODE_CLASS (code) == tcc_unary)
7333 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7334 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7335 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7336 else if (TREE_CODE (arg0) == COND_EXPR)
7338 tree arg01 = TREE_OPERAND (arg0, 1);
7339 tree arg02 = TREE_OPERAND (arg0, 2);
7340 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7341 arg01 = fold_build1 (code, type, arg01);
7342 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7343 arg02 = fold_build1 (code, type, arg02);
7344 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7345 arg01, arg02);
7347 /* If this was a conversion, and all we did was to move into
7348 inside the COND_EXPR, bring it back out. But leave it if
7349 it is a conversion from integer to integer and the
7350 result precision is no wider than a word since such a
7351 conversion is cheap and may be optimized away by combine,
7352 while it couldn't if it were outside the COND_EXPR. Then return
7353 so we don't get into an infinite recursion loop taking the
7354 conversion out and then back in. */
7356 if ((code == NOP_EXPR || code == CONVERT_EXPR
7357 || code == NON_LVALUE_EXPR)
7358 && TREE_CODE (tem) == COND_EXPR
7359 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7360 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7361 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7362 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7363 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7364 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7365 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7366 && (INTEGRAL_TYPE_P
7367 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7368 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7369 || flag_syntax_only))
7370 tem = build1 (code, type,
7371 build3 (COND_EXPR,
7372 TREE_TYPE (TREE_OPERAND
7373 (TREE_OPERAND (tem, 1), 0)),
7374 TREE_OPERAND (tem, 0),
7375 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7376 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7377 return tem;
7379 else if (COMPARISON_CLASS_P (arg0))
7381 if (TREE_CODE (type) == BOOLEAN_TYPE)
7383 arg0 = copy_node (arg0);
7384 TREE_TYPE (arg0) = type;
7385 return arg0;
7387 else if (TREE_CODE (type) != INTEGER_TYPE)
7388 return fold_build3 (COND_EXPR, type, arg0,
7389 fold_build1 (code, type,
7390 integer_one_node),
7391 fold_build1 (code, type,
7392 integer_zero_node));
7396 switch (code)
7398 case NOP_EXPR:
7399 case FLOAT_EXPR:
7400 case CONVERT_EXPR:
7401 case FIX_TRUNC_EXPR:
7402 if (TREE_TYPE (op0) == type)
7403 return op0;
7405 /* If we have (type) (a CMP b) and type is an integral type, return
7406 new expression involving the new type. */
7407 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7408 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7409 TREE_OPERAND (op0, 1));
7411 /* Handle cases of two conversions in a row. */
7412 if (TREE_CODE (op0) == NOP_EXPR
7413 || TREE_CODE (op0) == CONVERT_EXPR)
7415 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7416 tree inter_type = TREE_TYPE (op0);
7417 int inside_int = INTEGRAL_TYPE_P (inside_type);
7418 int inside_ptr = POINTER_TYPE_P (inside_type);
7419 int inside_float = FLOAT_TYPE_P (inside_type);
7420 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7421 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7422 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7423 int inter_int = INTEGRAL_TYPE_P (inter_type);
7424 int inter_ptr = POINTER_TYPE_P (inter_type);
7425 int inter_float = FLOAT_TYPE_P (inter_type);
7426 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7427 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7428 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7429 int final_int = INTEGRAL_TYPE_P (type);
7430 int final_ptr = POINTER_TYPE_P (type);
7431 int final_float = FLOAT_TYPE_P (type);
7432 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7433 unsigned int final_prec = TYPE_PRECISION (type);
7434 int final_unsignedp = TYPE_UNSIGNED (type);
7436 /* In addition to the cases of two conversions in a row
7437 handled below, if we are converting something to its own
7438 type via an object of identical or wider precision, neither
7439 conversion is needed. */
7440 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7441 && (((inter_int || inter_ptr) && final_int)
7442 || (inter_float && final_float))
7443 && inter_prec >= final_prec)
7444 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7446 /* Likewise, if the intermediate and final types are either both
7447 float or both integer, we don't need the middle conversion if
7448 it is wider than the final type and doesn't change the signedness
7449 (for integers). Avoid this if the final type is a pointer
7450 since then we sometimes need the inner conversion. Likewise if
7451 the outer has a precision not equal to the size of its mode. */
7452 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7453 || (inter_float && inside_float)
7454 || (inter_vec && inside_vec))
7455 && inter_prec >= inside_prec
7456 && (inter_float || inter_vec
7457 || inter_unsignedp == inside_unsignedp)
7458 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7459 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7460 && ! final_ptr
7461 && (! final_vec || inter_prec == inside_prec))
7462 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7464 /* If we have a sign-extension of a zero-extended value, we can
7465 replace that by a single zero-extension. */
7466 if (inside_int && inter_int && final_int
7467 && inside_prec < inter_prec && inter_prec < final_prec
7468 && inside_unsignedp && !inter_unsignedp)
7469 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7471 /* Two conversions in a row are not needed unless:
7472 - some conversion is floating-point (overstrict for now), or
7473 - some conversion is a vector (overstrict for now), or
7474 - the intermediate type is narrower than both initial and
7475 final, or
7476 - the intermediate type and innermost type differ in signedness,
7477 and the outermost type is wider than the intermediate, or
7478 - the initial type is a pointer type and the precisions of the
7479 intermediate and final types differ, or
7480 - the final type is a pointer type and the precisions of the
7481 initial and intermediate types differ.
7482 - the final type is a pointer type and the initial type not
7483 - the initial type is a pointer to an array and the final type
7484 not. */
7485 if (! inside_float && ! inter_float && ! final_float
7486 && ! inside_vec && ! inter_vec && ! final_vec
7487 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7488 && ! (inside_int && inter_int
7489 && inter_unsignedp != inside_unsignedp
7490 && inter_prec < final_prec)
7491 && ((inter_unsignedp && inter_prec > inside_prec)
7492 == (final_unsignedp && final_prec > inter_prec))
7493 && ! (inside_ptr && inter_prec != final_prec)
7494 && ! (final_ptr && inside_prec != inter_prec)
7495 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7496 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7497 && final_ptr == inside_ptr
7498 && ! (inside_ptr
7499 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7500 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7501 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7504 /* Handle (T *)&A.B.C for A being of type T and B and C
7505 living at offset zero. This occurs frequently in
7506 C++ upcasting and then accessing the base. */
7507 if (TREE_CODE (op0) == ADDR_EXPR
7508 && POINTER_TYPE_P (type)
7509 && handled_component_p (TREE_OPERAND (op0, 0)))
7511 HOST_WIDE_INT bitsize, bitpos;
7512 tree offset;
7513 enum machine_mode mode;
7514 int unsignedp, volatilep;
7515 tree base = TREE_OPERAND (op0, 0);
7516 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7517 &mode, &unsignedp, &volatilep, false);
7518 /* If the reference was to a (constant) zero offset, we can use
7519 the address of the base if it has the same base type
7520 as the result type. */
7521 if (! offset && bitpos == 0
7522 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7523 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7524 return fold_convert (type, build_fold_addr_expr (base));
7527 if ((TREE_CODE (op0) == MODIFY_EXPR
7528 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7529 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7530 /* Detect assigning a bitfield. */
7531 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7532 && DECL_BIT_FIELD
7533 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7535 /* Don't leave an assignment inside a conversion
7536 unless assigning a bitfield. */
7537 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7538 /* First do the assignment, then return converted constant. */
7539 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7540 TREE_NO_WARNING (tem) = 1;
7541 TREE_USED (tem) = 1;
7542 return tem;
7545 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7546 constants (if x has signed type, the sign bit cannot be set
7547 in c). This folds extension into the BIT_AND_EXPR. */
7548 if (INTEGRAL_TYPE_P (type)
7549 && TREE_CODE (type) != BOOLEAN_TYPE
7550 && TREE_CODE (op0) == BIT_AND_EXPR
7551 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7553 tree and = op0;
7554 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7555 int change = 0;
7557 if (TYPE_UNSIGNED (TREE_TYPE (and))
7558 || (TYPE_PRECISION (type)
7559 <= TYPE_PRECISION (TREE_TYPE (and))))
7560 change = 1;
7561 else if (TYPE_PRECISION (TREE_TYPE (and1))
7562 <= HOST_BITS_PER_WIDE_INT
7563 && host_integerp (and1, 1))
7565 unsigned HOST_WIDE_INT cst;
7567 cst = tree_low_cst (and1, 1);
7568 cst &= (HOST_WIDE_INT) -1
7569 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7570 change = (cst == 0);
7571 #ifdef LOAD_EXTEND_OP
7572 if (change
7573 && !flag_syntax_only
7574 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7575 == ZERO_EXTEND))
7577 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7578 and0 = fold_convert (uns, and0);
7579 and1 = fold_convert (uns, and1);
7581 #endif
7583 if (change)
7585 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7586 TREE_INT_CST_HIGH (and1), 0,
7587 TREE_OVERFLOW (and1));
7588 return fold_build2 (BIT_AND_EXPR, type,
7589 fold_convert (type, and0), tem);
7593 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7594 T2 being pointers to types of the same size. */
7595 if (POINTER_TYPE_P (type)
7596 && BINARY_CLASS_P (arg0)
7597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7598 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7600 tree arg00 = TREE_OPERAND (arg0, 0);
7601 tree t0 = type;
7602 tree t1 = TREE_TYPE (arg00);
7603 tree tt0 = TREE_TYPE (t0);
7604 tree tt1 = TREE_TYPE (t1);
7605 tree s0 = TYPE_SIZE (tt0);
7606 tree s1 = TYPE_SIZE (tt1);
7608 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7609 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7610 TREE_OPERAND (arg0, 1));
7613 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7614 of the same precision, and X is a integer type not narrower than
7615 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7616 if (INTEGRAL_TYPE_P (type)
7617 && TREE_CODE (op0) == BIT_NOT_EXPR
7618 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7619 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7620 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7621 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7623 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7624 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7625 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7626 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7629 tem = fold_convert_const (code, type, arg0);
7630 return tem ? tem : NULL_TREE;
7632 case VIEW_CONVERT_EXPR:
7633 if (TREE_TYPE (op0) == type)
7634 return op0;
7635 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7636 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7637 return fold_view_convert_expr (type, op0);
7639 case NEGATE_EXPR:
7640 tem = fold_negate_expr (arg0);
7641 if (tem)
7642 return fold_convert (type, tem);
7643 return NULL_TREE;
7645 case ABS_EXPR:
7646 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7647 return fold_abs_const (arg0, type);
7648 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7649 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7650 /* Convert fabs((double)float) into (double)fabsf(float). */
7651 else if (TREE_CODE (arg0) == NOP_EXPR
7652 && TREE_CODE (type) == REAL_TYPE)
7654 tree targ0 = strip_float_extensions (arg0);
7655 if (targ0 != arg0)
7656 return fold_convert (type, fold_build1 (ABS_EXPR,
7657 TREE_TYPE (targ0),
7658 targ0));
7660 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7661 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7662 return arg0;
7664 /* Strip sign ops from argument. */
7665 if (TREE_CODE (type) == REAL_TYPE)
7667 tem = fold_strip_sign_ops (arg0);
7668 if (tem)
7669 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7671 return NULL_TREE;
7673 case CONJ_EXPR:
7674 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7675 return fold_convert (type, arg0);
7676 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7678 tree itype = TREE_TYPE (type);
7679 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7680 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7681 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7683 if (TREE_CODE (arg0) == COMPLEX_CST)
7685 tree itype = TREE_TYPE (type);
7686 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7687 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7688 return build_complex (type, rpart, negate_expr (ipart));
7690 if (TREE_CODE (arg0) == CONJ_EXPR)
7691 return fold_convert (type, TREE_OPERAND (arg0, 0));
7692 return NULL_TREE;
7694 case BIT_NOT_EXPR:
7695 if (TREE_CODE (arg0) == INTEGER_CST)
7696 return fold_not_const (arg0, type);
7697 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7698 return TREE_OPERAND (arg0, 0);
7699 /* Convert ~ (-A) to A - 1. */
7700 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7701 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7702 build_int_cst (type, 1));
7703 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7704 else if (INTEGRAL_TYPE_P (type)
7705 && ((TREE_CODE (arg0) == MINUS_EXPR
7706 && integer_onep (TREE_OPERAND (arg0, 1)))
7707 || (TREE_CODE (arg0) == PLUS_EXPR
7708 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7709 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7710 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7711 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7712 && (tem = fold_unary (BIT_NOT_EXPR, type,
7713 fold_convert (type,
7714 TREE_OPERAND (arg0, 0)))))
7715 return fold_build2 (BIT_XOR_EXPR, type, tem,
7716 fold_convert (type, TREE_OPERAND (arg0, 1)));
7717 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7718 && (tem = fold_unary (BIT_NOT_EXPR, type,
7719 fold_convert (type,
7720 TREE_OPERAND (arg0, 1)))))
7721 return fold_build2 (BIT_XOR_EXPR, type,
7722 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7724 return NULL_TREE;
7726 case TRUTH_NOT_EXPR:
7727 /* The argument to invert_truthvalue must have Boolean type. */
7728 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7729 arg0 = fold_convert (boolean_type_node, arg0);
7731 /* Note that the operand of this must be an int
7732 and its values must be 0 or 1.
7733 ("true" is a fixed value perhaps depending on the language,
7734 but we don't handle values other than 1 correctly yet.) */
7735 tem = fold_truth_not_expr (arg0);
7736 if (!tem)
7737 return NULL_TREE;
7738 return fold_convert (type, tem);
7740 case REALPART_EXPR:
7741 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7742 return fold_convert (type, arg0);
7743 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7744 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7745 TREE_OPERAND (arg0, 1));
7746 if (TREE_CODE (arg0) == COMPLEX_CST)
7747 return fold_convert (type, TREE_REALPART (arg0));
7748 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7750 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7751 tem = fold_build2 (TREE_CODE (arg0), itype,
7752 fold_build1 (REALPART_EXPR, itype,
7753 TREE_OPERAND (arg0, 0)),
7754 fold_build1 (REALPART_EXPR, itype,
7755 TREE_OPERAND (arg0, 1)));
7756 return fold_convert (type, tem);
7758 if (TREE_CODE (arg0) == CONJ_EXPR)
7760 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7761 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7762 return fold_convert (type, tem);
7764 if (TREE_CODE (arg0) == CALL_EXPR)
7766 tree fn = get_callee_fndecl (arg0);
7767 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7768 switch (DECL_FUNCTION_CODE (fn))
7770 CASE_FLT_FN (BUILT_IN_CEXPI):
7771 fn = mathfn_built_in (type, BUILT_IN_COS);
7772 if (fn)
7773 return build_function_call_expr (fn,
7774 TREE_OPERAND (arg0, 1));
7775 break;
7777 default:
7778 break;
7781 return NULL_TREE;
7783 case IMAGPART_EXPR:
7784 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7785 return fold_convert (type, integer_zero_node);
7786 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7787 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7788 TREE_OPERAND (arg0, 0));
7789 if (TREE_CODE (arg0) == COMPLEX_CST)
7790 return fold_convert (type, TREE_IMAGPART (arg0));
7791 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7793 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7794 tem = fold_build2 (TREE_CODE (arg0), itype,
7795 fold_build1 (IMAGPART_EXPR, itype,
7796 TREE_OPERAND (arg0, 0)),
7797 fold_build1 (IMAGPART_EXPR, itype,
7798 TREE_OPERAND (arg0, 1)));
7799 return fold_convert (type, tem);
7801 if (TREE_CODE (arg0) == CONJ_EXPR)
7803 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7804 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7805 return fold_convert (type, negate_expr (tem));
7807 if (TREE_CODE (arg0) == CALL_EXPR)
7809 tree fn = get_callee_fndecl (arg0);
7810 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7811 switch (DECL_FUNCTION_CODE (fn))
7813 CASE_FLT_FN (BUILT_IN_CEXPI):
7814 fn = mathfn_built_in (type, BUILT_IN_SIN);
7815 if (fn)
7816 return build_function_call_expr (fn,
7817 TREE_OPERAND (arg0, 1));
7818 break;
7820 default:
7821 break;
7824 return NULL_TREE;
7826 default:
7827 return NULL_TREE;
7828 } /* switch (code) */
7831 /* Fold a binary expression of code CODE and type TYPE with operands
7832 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7833 Return the folded expression if folding is successful. Otherwise,
7834 return NULL_TREE. */
7836 static tree
7837 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7839 enum tree_code compl_code;
7841 if (code == MIN_EXPR)
7842 compl_code = MAX_EXPR;
7843 else if (code == MAX_EXPR)
7844 compl_code = MIN_EXPR;
7845 else
7846 gcc_unreachable ();
7848 /* MIN (MAX (a, b), b) == b. */
7849 if (TREE_CODE (op0) == compl_code
7850 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7851 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7853 /* MIN (MAX (b, a), b) == b. */
7854 if (TREE_CODE (op0) == compl_code
7855 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7856 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7857 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7859 /* MIN (a, MAX (a, b)) == a. */
7860 if (TREE_CODE (op1) == compl_code
7861 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7862 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7863 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7865 /* MIN (a, MAX (b, a)) == a. */
7866 if (TREE_CODE (op1) == compl_code
7867 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7868 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7869 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7871 return NULL_TREE;
7874 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7875 by changing CODE to reduce the magnitude of constants involved in
7876 ARG0 of the comparison.
7877 Returns a canonicalized comparison tree if a simplification was
7878 possible, otherwise returns NULL_TREE. */
7880 static tree
7881 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7882 tree arg0, tree arg1)
7884 enum tree_code code0 = TREE_CODE (arg0);
7885 tree t, cst0 = NULL_TREE;
7886 int sgn0;
7887 bool swap = false;
7889 /* Match A +- CST code arg1 and CST code arg1. */
7890 if (!(((code0 == MINUS_EXPR
7891 || code0 == PLUS_EXPR)
7892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7893 || code0 == INTEGER_CST))
7894 return NULL_TREE;
7896 /* Identify the constant in arg0 and its sign. */
7897 if (code0 == INTEGER_CST)
7898 cst0 = arg0;
7899 else
7900 cst0 = TREE_OPERAND (arg0, 1);
7901 sgn0 = tree_int_cst_sgn (cst0);
7903 /* Overflowed constants and zero will cause problems. */
7904 if (integer_zerop (cst0)
7905 || TREE_OVERFLOW (cst0))
7906 return NULL_TREE;
7908 /* See if we can reduce the magnitude of the constant in
7909 arg0 by changing the comparison code. */
7910 if (code0 == INTEGER_CST)
7912 /* CST <= arg1 -> CST-1 < arg1. */
7913 if (code == LE_EXPR && sgn0 == 1)
7914 code = LT_EXPR;
7915 /* -CST < arg1 -> -CST-1 <= arg1. */
7916 else if (code == LT_EXPR && sgn0 == -1)
7917 code = LE_EXPR;
7918 /* CST > arg1 -> CST-1 >= arg1. */
7919 else if (code == GT_EXPR && sgn0 == 1)
7920 code = GE_EXPR;
7921 /* -CST >= arg1 -> -CST-1 > arg1. */
7922 else if (code == GE_EXPR && sgn0 == -1)
7923 code = GT_EXPR;
7924 else
7925 return NULL_TREE;
7926 /* arg1 code' CST' might be more canonical. */
7927 swap = true;
7929 else
7931 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7932 if (code == LT_EXPR
7933 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7934 code = LE_EXPR;
7935 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7936 else if (code == GT_EXPR
7937 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7938 code = GE_EXPR;
7939 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7940 else if (code == LE_EXPR
7941 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7942 code = LT_EXPR;
7943 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7944 else if (code == GE_EXPR
7945 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7946 code = GT_EXPR;
7947 else
7948 return NULL_TREE;
7951 /* Now build the constant reduced in magnitude. */
7952 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7953 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7954 if (code0 != INTEGER_CST)
7955 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7957 /* If swapping might yield to a more canonical form, do so. */
7958 if (swap)
7959 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7960 else
7961 return fold_build2 (code, type, t, arg1);
7964 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7965 overflow further. Try to decrease the magnitude of constants involved
7966 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7967 and put sole constants at the second argument position.
7968 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7970 static tree
7971 maybe_canonicalize_comparison (enum tree_code code, tree type,
7972 tree arg0, tree arg1)
7974 tree t;
7976 /* In principle pointers also have undefined overflow behavior,
7977 but that causes problems elsewhere. */
7978 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
7979 || POINTER_TYPE_P (TREE_TYPE (arg0)))
7980 return NULL_TREE;
7982 /* Try canonicalization by simplifying arg0. */
7983 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7984 if (t)
7985 return t;
7987 /* Try canonicalization by simplifying arg1 using the swapped
7988 comparison. */
7989 code = swap_tree_comparison (code);
7990 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7993 /* Subroutine of fold_binary. This routine performs all of the
7994 transformations that are common to the equality/inequality
7995 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7996 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7997 fold_binary should call fold_binary. Fold a comparison with
7998 tree code CODE and type TYPE with operands OP0 and OP1. Return
7999 the folded comparison or NULL_TREE. */
8001 static tree
8002 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8004 tree arg0, arg1, tem;
8006 arg0 = op0;
8007 arg1 = op1;
8009 STRIP_SIGN_NOPS (arg0);
8010 STRIP_SIGN_NOPS (arg1);
8012 tem = fold_relational_const (code, type, arg0, arg1);
8013 if (tem != NULL_TREE)
8014 return tem;
8016 /* If one arg is a real or integer constant, put it last. */
8017 if (tree_swap_operands_p (arg0, arg1, true))
8018 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8020 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8021 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8022 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8023 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8024 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8025 && (TREE_CODE (arg1) == INTEGER_CST
8026 && !TREE_OVERFLOW (arg1)))
8028 tree const1 = TREE_OPERAND (arg0, 1);
8029 tree const2 = arg1;
8030 tree variable = TREE_OPERAND (arg0, 0);
8031 tree lhs;
8032 int lhs_add;
8033 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8035 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8036 TREE_TYPE (arg1), const2, const1);
8037 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8038 && (TREE_CODE (lhs) != INTEGER_CST
8039 || !TREE_OVERFLOW (lhs)))
8040 return fold_build2 (code, type, variable, lhs);
8043 /* For comparisons of pointers we can decompose it to a compile time
8044 comparison of the base objects and the offsets into the object.
8045 This requires at least one operand being an ADDR_EXPR to do more
8046 than the operand_equal_p test below. */
8047 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8048 && (TREE_CODE (arg0) == ADDR_EXPR
8049 || TREE_CODE (arg1) == ADDR_EXPR))
8051 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8052 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8053 enum machine_mode mode;
8054 int volatilep, unsignedp;
8055 bool indirect_base0 = false;
8057 /* Get base and offset for the access. Strip ADDR_EXPR for
8058 get_inner_reference, but put it back by stripping INDIRECT_REF
8059 off the base object if possible. */
8060 base0 = arg0;
8061 if (TREE_CODE (arg0) == ADDR_EXPR)
8063 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8064 &bitsize, &bitpos0, &offset0, &mode,
8065 &unsignedp, &volatilep, false);
8066 if (TREE_CODE (base0) == INDIRECT_REF)
8067 base0 = TREE_OPERAND (base0, 0);
8068 else
8069 indirect_base0 = true;
8072 base1 = arg1;
8073 if (TREE_CODE (arg1) == ADDR_EXPR)
8075 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8076 &bitsize, &bitpos1, &offset1, &mode,
8077 &unsignedp, &volatilep, false);
8078 /* We have to make sure to have an indirect/non-indirect base1
8079 just the same as we did for base0. */
8080 if (TREE_CODE (base1) == INDIRECT_REF
8081 && !indirect_base0)
8082 base1 = TREE_OPERAND (base1, 0);
8083 else if (!indirect_base0)
8084 base1 = NULL_TREE;
8086 else if (indirect_base0)
8087 base1 = NULL_TREE;
8089 /* If we have equivalent bases we might be able to simplify. */
8090 if (base0 && base1
8091 && operand_equal_p (base0, base1, 0))
8093 /* We can fold this expression to a constant if the non-constant
8094 offset parts are equal. */
8095 if (offset0 == offset1
8096 || (offset0 && offset1
8097 && operand_equal_p (offset0, offset1, 0)))
8099 switch (code)
8101 case EQ_EXPR:
8102 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8103 case NE_EXPR:
8104 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8105 case LT_EXPR:
8106 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8107 case LE_EXPR:
8108 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8109 case GE_EXPR:
8110 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8111 case GT_EXPR:
8112 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8113 default:;
8116 /* We can simplify the comparison to a comparison of the variable
8117 offset parts if the constant offset parts are equal.
8118 Be careful to use signed size type here because otherwise we
8119 mess with array offsets in the wrong way. This is possible
8120 because pointer arithmetic is restricted to retain within an
8121 object and overflow on pointer differences is undefined as of
8122 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8123 else if (bitpos0 == bitpos1)
8125 tree signed_size_type_node;
8126 signed_size_type_node = signed_type_for (size_type_node);
8128 /* By converting to signed size type we cover middle-end pointer
8129 arithmetic which operates on unsigned pointer types of size
8130 type size and ARRAY_REF offsets which are properly sign or
8131 zero extended from their type in case it is narrower than
8132 size type. */
8133 if (offset0 == NULL_TREE)
8134 offset0 = build_int_cst (signed_size_type_node, 0);
8135 else
8136 offset0 = fold_convert (signed_size_type_node, offset0);
8137 if (offset1 == NULL_TREE)
8138 offset1 = build_int_cst (signed_size_type_node, 0);
8139 else
8140 offset1 = fold_convert (signed_size_type_node, offset1);
8142 return fold_build2 (code, type, offset0, offset1);
8147 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8148 same object, then we can fold this to a comparison of the two offsets in
8149 signed size type. This is possible because pointer arithmetic is
8150 restricted to retain within an object and overflow on pointer differences
8151 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8153 We check flag_wrapv directly because pointers types are unsigned,
8154 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8155 normally what we want to avoid certain odd overflow cases, but
8156 not here. */
8157 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8158 && !flag_wrapv
8159 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8161 tree base0, offset0, base1, offset1;
8163 if (extract_array_ref (arg0, &base0, &offset0)
8164 && extract_array_ref (arg1, &base1, &offset1)
8165 && operand_equal_p (base0, base1, 0))
8167 tree signed_size_type_node;
8168 signed_size_type_node = signed_type_for (size_type_node);
8170 /* By converting to signed size type we cover middle-end pointer
8171 arithmetic which operates on unsigned pointer types of size
8172 type size and ARRAY_REF offsets which are properly sign or
8173 zero extended from their type in case it is narrower than
8174 size type. */
8175 if (offset0 == NULL_TREE)
8176 offset0 = build_int_cst (signed_size_type_node, 0);
8177 else
8178 offset0 = fold_convert (signed_size_type_node, offset0);
8179 if (offset1 == NULL_TREE)
8180 offset1 = build_int_cst (signed_size_type_node, 0);
8181 else
8182 offset1 = fold_convert (signed_size_type_node, offset1);
8184 return fold_build2 (code, type, offset0, offset1);
8188 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8189 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8190 the resulting offset is smaller in absolute value than the
8191 original one. */
8192 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8193 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8194 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8195 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8196 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8197 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8198 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8200 tree const1 = TREE_OPERAND (arg0, 1);
8201 tree const2 = TREE_OPERAND (arg1, 1);
8202 tree variable1 = TREE_OPERAND (arg0, 0);
8203 tree variable2 = TREE_OPERAND (arg1, 0);
8204 tree cst;
8206 /* Put the constant on the side where it doesn't overflow and is
8207 of lower absolute value than before. */
8208 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8209 ? MINUS_EXPR : PLUS_EXPR,
8210 const2, const1, 0);
8211 if (!TREE_OVERFLOW (cst)
8212 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8213 return fold_build2 (code, type,
8214 variable1,
8215 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8216 variable2, cst));
8218 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8219 ? MINUS_EXPR : PLUS_EXPR,
8220 const1, const2, 0);
8221 if (!TREE_OVERFLOW (cst)
8222 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8223 return fold_build2 (code, type,
8224 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8225 variable1, cst),
8226 variable2);
8229 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8230 signed arithmetic case. That form is created by the compiler
8231 often enough for folding it to be of value. One example is in
8232 computing loop trip counts after Operator Strength Reduction. */
8233 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8234 && TREE_CODE (arg0) == MULT_EXPR
8235 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8236 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8237 && integer_zerop (arg1))
8239 tree const1 = TREE_OPERAND (arg0, 1);
8240 tree const2 = arg1; /* zero */
8241 tree variable1 = TREE_OPERAND (arg0, 0);
8242 enum tree_code cmp_code = code;
8244 gcc_assert (!integer_zerop (const1));
8246 /* If const1 is negative we swap the sense of the comparison. */
8247 if (tree_int_cst_sgn (const1) < 0)
8248 cmp_code = swap_tree_comparison (cmp_code);
8250 return fold_build2 (cmp_code, type, variable1, const2);
8253 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8254 if (tem)
8255 return tem;
8257 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8259 tree targ0 = strip_float_extensions (arg0);
8260 tree targ1 = strip_float_extensions (arg1);
8261 tree newtype = TREE_TYPE (targ0);
8263 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8264 newtype = TREE_TYPE (targ1);
8266 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8267 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8268 return fold_build2 (code, type, fold_convert (newtype, targ0),
8269 fold_convert (newtype, targ1));
8271 /* (-a) CMP (-b) -> b CMP a */
8272 if (TREE_CODE (arg0) == NEGATE_EXPR
8273 && TREE_CODE (arg1) == NEGATE_EXPR)
8274 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8275 TREE_OPERAND (arg0, 0));
8277 if (TREE_CODE (arg1) == REAL_CST)
8279 REAL_VALUE_TYPE cst;
8280 cst = TREE_REAL_CST (arg1);
8282 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8283 if (TREE_CODE (arg0) == NEGATE_EXPR)
8284 return fold_build2 (swap_tree_comparison (code), type,
8285 TREE_OPERAND (arg0, 0),
8286 build_real (TREE_TYPE (arg1),
8287 REAL_VALUE_NEGATE (cst)));
8289 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8290 /* a CMP (-0) -> a CMP 0 */
8291 if (REAL_VALUE_MINUS_ZERO (cst))
8292 return fold_build2 (code, type, arg0,
8293 build_real (TREE_TYPE (arg1), dconst0));
8295 /* x != NaN is always true, other ops are always false. */
8296 if (REAL_VALUE_ISNAN (cst)
8297 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8299 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8300 return omit_one_operand (type, tem, arg0);
8303 /* Fold comparisons against infinity. */
8304 if (REAL_VALUE_ISINF (cst))
8306 tem = fold_inf_compare (code, type, arg0, arg1);
8307 if (tem != NULL_TREE)
8308 return tem;
8312 /* If this is a comparison of a real constant with a PLUS_EXPR
8313 or a MINUS_EXPR of a real constant, we can convert it into a
8314 comparison with a revised real constant as long as no overflow
8315 occurs when unsafe_math_optimizations are enabled. */
8316 if (flag_unsafe_math_optimizations
8317 && TREE_CODE (arg1) == REAL_CST
8318 && (TREE_CODE (arg0) == PLUS_EXPR
8319 || TREE_CODE (arg0) == MINUS_EXPR)
8320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8321 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8322 ? MINUS_EXPR : PLUS_EXPR,
8323 arg1, TREE_OPERAND (arg0, 1), 0))
8324 && !TREE_OVERFLOW (tem))
8325 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8327 /* Likewise, we can simplify a comparison of a real constant with
8328 a MINUS_EXPR whose first operand is also a real constant, i.e.
8329 (c1 - x) < c2 becomes x > c1-c2. */
8330 if (flag_unsafe_math_optimizations
8331 && TREE_CODE (arg1) == REAL_CST
8332 && TREE_CODE (arg0) == MINUS_EXPR
8333 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8334 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8335 arg1, 0))
8336 && !TREE_OVERFLOW (tem))
8337 return fold_build2 (swap_tree_comparison (code), type,
8338 TREE_OPERAND (arg0, 1), tem);
8340 /* Fold comparisons against built-in math functions. */
8341 if (TREE_CODE (arg1) == REAL_CST
8342 && flag_unsafe_math_optimizations
8343 && ! flag_errno_math)
8345 enum built_in_function fcode = builtin_mathfn_code (arg0);
8347 if (fcode != END_BUILTINS)
8349 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8350 if (tem != NULL_TREE)
8351 return tem;
8356 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8357 if (TREE_CONSTANT (arg1)
8358 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8359 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8360 /* This optimization is invalid for ordered comparisons
8361 if CONST+INCR overflows or if foo+incr might overflow.
8362 This optimization is invalid for floating point due to rounding.
8363 For pointer types we assume overflow doesn't happen. */
8364 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8365 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8366 && (code == EQ_EXPR || code == NE_EXPR))))
8368 tree varop, newconst;
8370 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8372 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8373 arg1, TREE_OPERAND (arg0, 1));
8374 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8375 TREE_OPERAND (arg0, 0),
8376 TREE_OPERAND (arg0, 1));
8378 else
8380 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8381 arg1, TREE_OPERAND (arg0, 1));
8382 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8383 TREE_OPERAND (arg0, 0),
8384 TREE_OPERAND (arg0, 1));
8388 /* If VAROP is a reference to a bitfield, we must mask
8389 the constant by the width of the field. */
8390 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8391 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8392 && host_integerp (DECL_SIZE (TREE_OPERAND
8393 (TREE_OPERAND (varop, 0), 1)), 1))
8395 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8396 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8397 tree folded_compare, shift;
8399 /* First check whether the comparison would come out
8400 always the same. If we don't do that we would
8401 change the meaning with the masking. */
8402 folded_compare = fold_build2 (code, type,
8403 TREE_OPERAND (varop, 0), arg1);
8404 if (TREE_CODE (folded_compare) == INTEGER_CST)
8405 return omit_one_operand (type, folded_compare, varop);
8407 shift = build_int_cst (NULL_TREE,
8408 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8409 shift = fold_convert (TREE_TYPE (varop), shift);
8410 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8411 newconst, shift);
8412 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8413 newconst, shift);
8416 return fold_build2 (code, type, varop, newconst);
8419 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8420 && (TREE_CODE (arg0) == NOP_EXPR
8421 || TREE_CODE (arg0) == CONVERT_EXPR))
8423 /* If we are widening one operand of an integer comparison,
8424 see if the other operand is similarly being widened. Perhaps we
8425 can do the comparison in the narrower type. */
8426 tem = fold_widened_comparison (code, type, arg0, arg1);
8427 if (tem)
8428 return tem;
8430 /* Or if we are changing signedness. */
8431 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8432 if (tem)
8433 return tem;
8436 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8437 constant, we can simplify it. */
8438 if (TREE_CODE (arg1) == INTEGER_CST
8439 && (TREE_CODE (arg0) == MIN_EXPR
8440 || TREE_CODE (arg0) == MAX_EXPR)
8441 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8443 tem = optimize_minmax_comparison (code, type, op0, op1);
8444 if (tem)
8445 return tem;
8448 /* Simplify comparison of something with itself. (For IEEE
8449 floating-point, we can only do some of these simplifications.) */
8450 if (operand_equal_p (arg0, arg1, 0))
8452 switch (code)
8454 case EQ_EXPR:
8455 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8456 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8457 return constant_boolean_node (1, type);
8458 break;
8460 case GE_EXPR:
8461 case LE_EXPR:
8462 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8463 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8464 return constant_boolean_node (1, type);
8465 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8467 case NE_EXPR:
8468 /* For NE, we can only do this simplification if integer
8469 or we don't honor IEEE floating point NaNs. */
8470 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8471 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8472 break;
8473 /* ... fall through ... */
8474 case GT_EXPR:
8475 case LT_EXPR:
8476 return constant_boolean_node (0, type);
8477 default:
8478 gcc_unreachable ();
8482 /* If we are comparing an expression that just has comparisons
8483 of two integer values, arithmetic expressions of those comparisons,
8484 and constants, we can simplify it. There are only three cases
8485 to check: the two values can either be equal, the first can be
8486 greater, or the second can be greater. Fold the expression for
8487 those three values. Since each value must be 0 or 1, we have
8488 eight possibilities, each of which corresponds to the constant 0
8489 or 1 or one of the six possible comparisons.
8491 This handles common cases like (a > b) == 0 but also handles
8492 expressions like ((x > y) - (y > x)) > 0, which supposedly
8493 occur in macroized code. */
8495 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8497 tree cval1 = 0, cval2 = 0;
8498 int save_p = 0;
8500 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8501 /* Don't handle degenerate cases here; they should already
8502 have been handled anyway. */
8503 && cval1 != 0 && cval2 != 0
8504 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8505 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8506 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8507 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8508 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8509 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8510 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8512 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8513 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8515 /* We can't just pass T to eval_subst in case cval1 or cval2
8516 was the same as ARG1. */
8518 tree high_result
8519 = fold_build2 (code, type,
8520 eval_subst (arg0, cval1, maxval,
8521 cval2, minval),
8522 arg1);
8523 tree equal_result
8524 = fold_build2 (code, type,
8525 eval_subst (arg0, cval1, maxval,
8526 cval2, maxval),
8527 arg1);
8528 tree low_result
8529 = fold_build2 (code, type,
8530 eval_subst (arg0, cval1, minval,
8531 cval2, maxval),
8532 arg1);
8534 /* All three of these results should be 0 or 1. Confirm they are.
8535 Then use those values to select the proper code to use. */
8537 if (TREE_CODE (high_result) == INTEGER_CST
8538 && TREE_CODE (equal_result) == INTEGER_CST
8539 && TREE_CODE (low_result) == INTEGER_CST)
8541 /* Make a 3-bit mask with the high-order bit being the
8542 value for `>', the next for '=', and the low for '<'. */
8543 switch ((integer_onep (high_result) * 4)
8544 + (integer_onep (equal_result) * 2)
8545 + integer_onep (low_result))
8547 case 0:
8548 /* Always false. */
8549 return omit_one_operand (type, integer_zero_node, arg0);
8550 case 1:
8551 code = LT_EXPR;
8552 break;
8553 case 2:
8554 code = EQ_EXPR;
8555 break;
8556 case 3:
8557 code = LE_EXPR;
8558 break;
8559 case 4:
8560 code = GT_EXPR;
8561 break;
8562 case 5:
8563 code = NE_EXPR;
8564 break;
8565 case 6:
8566 code = GE_EXPR;
8567 break;
8568 case 7:
8569 /* Always true. */
8570 return omit_one_operand (type, integer_one_node, arg0);
8573 if (save_p)
8574 return save_expr (build2 (code, type, cval1, cval2));
8575 return fold_build2 (code, type, cval1, cval2);
8580 /* Fold a comparison of the address of COMPONENT_REFs with the same
8581 type and component to a comparison of the address of the base
8582 object. In short, &x->a OP &y->a to x OP y and
8583 &x->a OP &y.a to x OP &y */
8584 if (TREE_CODE (arg0) == ADDR_EXPR
8585 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8586 && TREE_CODE (arg1) == ADDR_EXPR
8587 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8589 tree cref0 = TREE_OPERAND (arg0, 0);
8590 tree cref1 = TREE_OPERAND (arg1, 0);
8591 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8593 tree op0 = TREE_OPERAND (cref0, 0);
8594 tree op1 = TREE_OPERAND (cref1, 0);
8595 return fold_build2 (code, type,
8596 build_fold_addr_expr (op0),
8597 build_fold_addr_expr (op1));
8601 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8602 into a single range test. */
8603 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8604 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8605 && TREE_CODE (arg1) == INTEGER_CST
8606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8607 && !integer_zerop (TREE_OPERAND (arg0, 1))
8608 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8609 && !TREE_OVERFLOW (arg1))
8611 tem = fold_div_compare (code, type, arg0, arg1);
8612 if (tem != NULL_TREE)
8613 return tem;
8616 /* Fold ~X op ~Y as Y op X. */
8617 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8618 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8619 return fold_build2 (code, type,
8620 TREE_OPERAND (arg1, 0),
8621 TREE_OPERAND (arg0, 0));
8623 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8624 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8625 && TREE_CODE (arg1) == INTEGER_CST)
8626 return fold_build2 (swap_tree_comparison (code), type,
8627 TREE_OPERAND (arg0, 0),
8628 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8630 return NULL_TREE;
8634 /* Subroutine of fold_binary. Optimize complex multiplications of the
8635 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8636 argument EXPR represents the expression "z" of type TYPE. */
8638 static tree
8639 fold_mult_zconjz (tree type, tree expr)
8641 tree itype = TREE_TYPE (type);
8642 tree rpart, ipart, tem;
8644 if (TREE_CODE (expr) == COMPLEX_EXPR)
8646 rpart = TREE_OPERAND (expr, 0);
8647 ipart = TREE_OPERAND (expr, 1);
8649 else if (TREE_CODE (expr) == COMPLEX_CST)
8651 rpart = TREE_REALPART (expr);
8652 ipart = TREE_IMAGPART (expr);
8654 else
8656 expr = save_expr (expr);
8657 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8658 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8661 rpart = save_expr (rpart);
8662 ipart = save_expr (ipart);
8663 tem = fold_build2 (PLUS_EXPR, itype,
8664 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8665 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8666 return fold_build2 (COMPLEX_EXPR, type, tem,
8667 fold_convert (itype, integer_zero_node));
8671 /* Fold a binary expression of code CODE and type TYPE with operands
8672 OP0 and OP1. Return the folded expression if folding is
8673 successful. Otherwise, return NULL_TREE. */
8675 tree
8676 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8678 enum tree_code_class kind = TREE_CODE_CLASS (code);
8679 tree arg0, arg1, tem;
8680 tree t1 = NULL_TREE;
8682 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8683 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8684 && TREE_CODE_LENGTH (code) == 2
8685 && op0 != NULL_TREE
8686 && op1 != NULL_TREE);
8688 arg0 = op0;
8689 arg1 = op1;
8691 /* Strip any conversions that don't change the mode. This is
8692 safe for every expression, except for a comparison expression
8693 because its signedness is derived from its operands. So, in
8694 the latter case, only strip conversions that don't change the
8695 signedness.
8697 Note that this is done as an internal manipulation within the
8698 constant folder, in order to find the simplest representation
8699 of the arguments so that their form can be studied. In any
8700 cases, the appropriate type conversions should be put back in
8701 the tree that will get out of the constant folder. */
8703 if (kind == tcc_comparison)
8705 STRIP_SIGN_NOPS (arg0);
8706 STRIP_SIGN_NOPS (arg1);
8708 else
8710 STRIP_NOPS (arg0);
8711 STRIP_NOPS (arg1);
8714 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8715 constant but we can't do arithmetic on them. */
8716 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8717 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8718 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8719 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8721 if (kind == tcc_binary)
8722 tem = const_binop (code, arg0, arg1, 0);
8723 else if (kind == tcc_comparison)
8724 tem = fold_relational_const (code, type, arg0, arg1);
8725 else
8726 tem = NULL_TREE;
8728 if (tem != NULL_TREE)
8730 if (TREE_TYPE (tem) != type)
8731 tem = fold_convert (type, tem);
8732 return tem;
8736 /* If this is a commutative operation, and ARG0 is a constant, move it
8737 to ARG1 to reduce the number of tests below. */
8738 if (commutative_tree_code (code)
8739 && tree_swap_operands_p (arg0, arg1, true))
8740 return fold_build2 (code, type, op1, op0);
8742 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8744 First check for cases where an arithmetic operation is applied to a
8745 compound, conditional, or comparison operation. Push the arithmetic
8746 operation inside the compound or conditional to see if any folding
8747 can then be done. Convert comparison to conditional for this purpose.
8748 The also optimizes non-constant cases that used to be done in
8749 expand_expr.
8751 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8752 one of the operands is a comparison and the other is a comparison, a
8753 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8754 code below would make the expression more complex. Change it to a
8755 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8756 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8758 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8759 || code == EQ_EXPR || code == NE_EXPR)
8760 && ((truth_value_p (TREE_CODE (arg0))
8761 && (truth_value_p (TREE_CODE (arg1))
8762 || (TREE_CODE (arg1) == BIT_AND_EXPR
8763 && integer_onep (TREE_OPERAND (arg1, 1)))))
8764 || (truth_value_p (TREE_CODE (arg1))
8765 && (truth_value_p (TREE_CODE (arg0))
8766 || (TREE_CODE (arg0) == BIT_AND_EXPR
8767 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8769 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8770 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8771 : TRUTH_XOR_EXPR,
8772 boolean_type_node,
8773 fold_convert (boolean_type_node, arg0),
8774 fold_convert (boolean_type_node, arg1));
8776 if (code == EQ_EXPR)
8777 tem = invert_truthvalue (tem);
8779 return fold_convert (type, tem);
8782 if (TREE_CODE_CLASS (code) == tcc_binary
8783 || TREE_CODE_CLASS (code) == tcc_comparison)
8785 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8786 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8787 fold_build2 (code, type,
8788 TREE_OPERAND (arg0, 1), op1));
8789 if (TREE_CODE (arg1) == COMPOUND_EXPR
8790 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8791 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8792 fold_build2 (code, type,
8793 op0, TREE_OPERAND (arg1, 1)));
8795 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8797 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8798 arg0, arg1,
8799 /*cond_first_p=*/1);
8800 if (tem != NULL_TREE)
8801 return tem;
8804 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8806 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8807 arg1, arg0,
8808 /*cond_first_p=*/0);
8809 if (tem != NULL_TREE)
8810 return tem;
8814 switch (code)
8816 case PLUS_EXPR:
8817 /* A + (-B) -> A - B */
8818 if (TREE_CODE (arg1) == NEGATE_EXPR)
8819 return fold_build2 (MINUS_EXPR, type,
8820 fold_convert (type, arg0),
8821 fold_convert (type, TREE_OPERAND (arg1, 0)));
8822 /* (-A) + B -> B - A */
8823 if (TREE_CODE (arg0) == NEGATE_EXPR
8824 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8825 return fold_build2 (MINUS_EXPR, type,
8826 fold_convert (type, arg1),
8827 fold_convert (type, TREE_OPERAND (arg0, 0)));
8828 /* Convert ~A + 1 to -A. */
8829 if (INTEGRAL_TYPE_P (type)
8830 && TREE_CODE (arg0) == BIT_NOT_EXPR
8831 && integer_onep (arg1))
8832 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8834 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8835 same or one. */
8836 if ((TREE_CODE (arg0) == MULT_EXPR
8837 || TREE_CODE (arg1) == MULT_EXPR)
8838 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8840 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8841 if (tem)
8842 return tem;
8845 if (! FLOAT_TYPE_P (type))
8847 if (integer_zerop (arg1))
8848 return non_lvalue (fold_convert (type, arg0));
8850 /* ~X + X is -1. */
8851 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8853 && !TYPE_OVERFLOW_TRAPS (type))
8855 t1 = build_int_cst_type (type, -1);
8856 return omit_one_operand (type, t1, arg1);
8859 /* X + ~X is -1. */
8860 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8861 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8862 && !TYPE_OVERFLOW_TRAPS (type))
8864 t1 = build_int_cst_type (type, -1);
8865 return omit_one_operand (type, t1, arg0);
8868 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8869 with a constant, and the two constants have no bits in common,
8870 we should treat this as a BIT_IOR_EXPR since this may produce more
8871 simplifications. */
8872 if (TREE_CODE (arg0) == BIT_AND_EXPR
8873 && TREE_CODE (arg1) == BIT_AND_EXPR
8874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8875 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8876 && integer_zerop (const_binop (BIT_AND_EXPR,
8877 TREE_OPERAND (arg0, 1),
8878 TREE_OPERAND (arg1, 1), 0)))
8880 code = BIT_IOR_EXPR;
8881 goto bit_ior;
8884 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8885 (plus (plus (mult) (mult)) (foo)) so that we can
8886 take advantage of the factoring cases below. */
8887 if (((TREE_CODE (arg0) == PLUS_EXPR
8888 || TREE_CODE (arg0) == MINUS_EXPR)
8889 && TREE_CODE (arg1) == MULT_EXPR)
8890 || ((TREE_CODE (arg1) == PLUS_EXPR
8891 || TREE_CODE (arg1) == MINUS_EXPR)
8892 && TREE_CODE (arg0) == MULT_EXPR))
8894 tree parg0, parg1, parg, marg;
8895 enum tree_code pcode;
8897 if (TREE_CODE (arg1) == MULT_EXPR)
8898 parg = arg0, marg = arg1;
8899 else
8900 parg = arg1, marg = arg0;
8901 pcode = TREE_CODE (parg);
8902 parg0 = TREE_OPERAND (parg, 0);
8903 parg1 = TREE_OPERAND (parg, 1);
8904 STRIP_NOPS (parg0);
8905 STRIP_NOPS (parg1);
8907 if (TREE_CODE (parg0) == MULT_EXPR
8908 && TREE_CODE (parg1) != MULT_EXPR)
8909 return fold_build2 (pcode, type,
8910 fold_build2 (PLUS_EXPR, type,
8911 fold_convert (type, parg0),
8912 fold_convert (type, marg)),
8913 fold_convert (type, parg1));
8914 if (TREE_CODE (parg0) != MULT_EXPR
8915 && TREE_CODE (parg1) == MULT_EXPR)
8916 return fold_build2 (PLUS_EXPR, type,
8917 fold_convert (type, parg0),
8918 fold_build2 (pcode, type,
8919 fold_convert (type, marg),
8920 fold_convert (type,
8921 parg1)));
8924 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8925 of the array. Loop optimizer sometimes produce this type of
8926 expressions. */
8927 if (TREE_CODE (arg0) == ADDR_EXPR)
8929 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8930 if (tem)
8931 return fold_convert (type, tem);
8933 else if (TREE_CODE (arg1) == ADDR_EXPR)
8935 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8936 if (tem)
8937 return fold_convert (type, tem);
8940 else
8942 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8943 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8944 return non_lvalue (fold_convert (type, arg0));
8946 /* Likewise if the operands are reversed. */
8947 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8948 return non_lvalue (fold_convert (type, arg1));
8950 /* Convert X + -C into X - C. */
8951 if (TREE_CODE (arg1) == REAL_CST
8952 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8954 tem = fold_negate_const (arg1, type);
8955 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8956 return fold_build2 (MINUS_EXPR, type,
8957 fold_convert (type, arg0),
8958 fold_convert (type, tem));
8961 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8962 to __complex__ ( x, y ). This is not the same for SNaNs or
8963 if signed zeros are involved. */
8964 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8965 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8966 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8968 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8969 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8970 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8971 bool arg0rz = false, arg0iz = false;
8972 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8973 || (arg0i && (arg0iz = real_zerop (arg0i))))
8975 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8976 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8977 if (arg0rz && arg1i && real_zerop (arg1i))
8979 tree rp = arg1r ? arg1r
8980 : build1 (REALPART_EXPR, rtype, arg1);
8981 tree ip = arg0i ? arg0i
8982 : build1 (IMAGPART_EXPR, rtype, arg0);
8983 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8985 else if (arg0iz && arg1r && real_zerop (arg1r))
8987 tree rp = arg0r ? arg0r
8988 : build1 (REALPART_EXPR, rtype, arg0);
8989 tree ip = arg1i ? arg1i
8990 : build1 (IMAGPART_EXPR, rtype, arg1);
8991 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8996 if (flag_unsafe_math_optimizations
8997 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8998 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8999 && (tem = distribute_real_division (code, type, arg0, arg1)))
9000 return tem;
9002 /* Convert x+x into x*2.0. */
9003 if (operand_equal_p (arg0, arg1, 0)
9004 && SCALAR_FLOAT_TYPE_P (type))
9005 return fold_build2 (MULT_EXPR, type, arg0,
9006 build_real (type, dconst2));
9008 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9009 if (flag_unsafe_math_optimizations
9010 && TREE_CODE (arg1) == PLUS_EXPR
9011 && TREE_CODE (arg0) != MULT_EXPR)
9013 tree tree10 = TREE_OPERAND (arg1, 0);
9014 tree tree11 = TREE_OPERAND (arg1, 1);
9015 if (TREE_CODE (tree11) == MULT_EXPR
9016 && TREE_CODE (tree10) == MULT_EXPR)
9018 tree tree0;
9019 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9020 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9023 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9024 if (flag_unsafe_math_optimizations
9025 && TREE_CODE (arg0) == PLUS_EXPR
9026 && TREE_CODE (arg1) != MULT_EXPR)
9028 tree tree00 = TREE_OPERAND (arg0, 0);
9029 tree tree01 = TREE_OPERAND (arg0, 1);
9030 if (TREE_CODE (tree01) == MULT_EXPR
9031 && TREE_CODE (tree00) == MULT_EXPR)
9033 tree tree0;
9034 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9035 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9040 bit_rotate:
9041 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9042 is a rotate of A by C1 bits. */
9043 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9044 is a rotate of A by B bits. */
9046 enum tree_code code0, code1;
9047 code0 = TREE_CODE (arg0);
9048 code1 = TREE_CODE (arg1);
9049 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9050 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9051 && operand_equal_p (TREE_OPERAND (arg0, 0),
9052 TREE_OPERAND (arg1, 0), 0)
9053 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9055 tree tree01, tree11;
9056 enum tree_code code01, code11;
9058 tree01 = TREE_OPERAND (arg0, 1);
9059 tree11 = TREE_OPERAND (arg1, 1);
9060 STRIP_NOPS (tree01);
9061 STRIP_NOPS (tree11);
9062 code01 = TREE_CODE (tree01);
9063 code11 = TREE_CODE (tree11);
9064 if (code01 == INTEGER_CST
9065 && code11 == INTEGER_CST
9066 && TREE_INT_CST_HIGH (tree01) == 0
9067 && TREE_INT_CST_HIGH (tree11) == 0
9068 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9069 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9070 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9071 code0 == LSHIFT_EXPR ? tree01 : tree11);
9072 else if (code11 == MINUS_EXPR)
9074 tree tree110, tree111;
9075 tree110 = TREE_OPERAND (tree11, 0);
9076 tree111 = TREE_OPERAND (tree11, 1);
9077 STRIP_NOPS (tree110);
9078 STRIP_NOPS (tree111);
9079 if (TREE_CODE (tree110) == INTEGER_CST
9080 && 0 == compare_tree_int (tree110,
9081 TYPE_PRECISION
9082 (TREE_TYPE (TREE_OPERAND
9083 (arg0, 0))))
9084 && operand_equal_p (tree01, tree111, 0))
9085 return build2 ((code0 == LSHIFT_EXPR
9086 ? LROTATE_EXPR
9087 : RROTATE_EXPR),
9088 type, TREE_OPERAND (arg0, 0), tree01);
9090 else if (code01 == MINUS_EXPR)
9092 tree tree010, tree011;
9093 tree010 = TREE_OPERAND (tree01, 0);
9094 tree011 = TREE_OPERAND (tree01, 1);
9095 STRIP_NOPS (tree010);
9096 STRIP_NOPS (tree011);
9097 if (TREE_CODE (tree010) == INTEGER_CST
9098 && 0 == compare_tree_int (tree010,
9099 TYPE_PRECISION
9100 (TREE_TYPE (TREE_OPERAND
9101 (arg0, 0))))
9102 && operand_equal_p (tree11, tree011, 0))
9103 return build2 ((code0 != LSHIFT_EXPR
9104 ? LROTATE_EXPR
9105 : RROTATE_EXPR),
9106 type, TREE_OPERAND (arg0, 0), tree11);
9111 associate:
9112 /* In most languages, can't associate operations on floats through
9113 parentheses. Rather than remember where the parentheses were, we
9114 don't associate floats at all, unless the user has specified
9115 -funsafe-math-optimizations. */
9117 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9119 tree var0, con0, lit0, minus_lit0;
9120 tree var1, con1, lit1, minus_lit1;
9122 /* Split both trees into variables, constants, and literals. Then
9123 associate each group together, the constants with literals,
9124 then the result with variables. This increases the chances of
9125 literals being recombined later and of generating relocatable
9126 expressions for the sum of a constant and literal. */
9127 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9128 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9129 code == MINUS_EXPR);
9131 /* Only do something if we found more than two objects. Otherwise,
9132 nothing has changed and we risk infinite recursion. */
9133 if (2 < ((var0 != 0) + (var1 != 0)
9134 + (con0 != 0) + (con1 != 0)
9135 + (lit0 != 0) + (lit1 != 0)
9136 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9138 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9139 if (code == MINUS_EXPR)
9140 code = PLUS_EXPR;
9142 var0 = associate_trees (var0, var1, code, type);
9143 con0 = associate_trees (con0, con1, code, type);
9144 lit0 = associate_trees (lit0, lit1, code, type);
9145 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9147 /* Preserve the MINUS_EXPR if the negative part of the literal is
9148 greater than the positive part. Otherwise, the multiplicative
9149 folding code (i.e extract_muldiv) may be fooled in case
9150 unsigned constants are subtracted, like in the following
9151 example: ((X*2 + 4) - 8U)/2. */
9152 if (minus_lit0 && lit0)
9154 if (TREE_CODE (lit0) == INTEGER_CST
9155 && TREE_CODE (minus_lit0) == INTEGER_CST
9156 && tree_int_cst_lt (lit0, minus_lit0))
9158 minus_lit0 = associate_trees (minus_lit0, lit0,
9159 MINUS_EXPR, type);
9160 lit0 = 0;
9162 else
9164 lit0 = associate_trees (lit0, minus_lit0,
9165 MINUS_EXPR, type);
9166 minus_lit0 = 0;
9169 if (minus_lit0)
9171 if (con0 == 0)
9172 return fold_convert (type,
9173 associate_trees (var0, minus_lit0,
9174 MINUS_EXPR, type));
9175 else
9177 con0 = associate_trees (con0, minus_lit0,
9178 MINUS_EXPR, type);
9179 return fold_convert (type,
9180 associate_trees (var0, con0,
9181 PLUS_EXPR, type));
9185 con0 = associate_trees (con0, lit0, code, type);
9186 return fold_convert (type, associate_trees (var0, con0,
9187 code, type));
9191 return NULL_TREE;
9193 case MINUS_EXPR:
9194 /* A - (-B) -> A + B */
9195 if (TREE_CODE (arg1) == NEGATE_EXPR)
9196 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9197 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9198 if (TREE_CODE (arg0) == NEGATE_EXPR
9199 && (FLOAT_TYPE_P (type)
9200 || INTEGRAL_TYPE_P (type))
9201 && negate_expr_p (arg1)
9202 && reorder_operands_p (arg0, arg1))
9203 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9204 TREE_OPERAND (arg0, 0));
9205 /* Convert -A - 1 to ~A. */
9206 if (INTEGRAL_TYPE_P (type)
9207 && TREE_CODE (arg0) == NEGATE_EXPR
9208 && integer_onep (arg1)
9209 && !TYPE_OVERFLOW_TRAPS (type))
9210 return fold_build1 (BIT_NOT_EXPR, type,
9211 fold_convert (type, TREE_OPERAND (arg0, 0)));
9213 /* Convert -1 - A to ~A. */
9214 if (INTEGRAL_TYPE_P (type)
9215 && integer_all_onesp (arg0))
9216 return fold_build1 (BIT_NOT_EXPR, type, op1);
9218 if (! FLOAT_TYPE_P (type))
9220 if (integer_zerop (arg0))
9221 return negate_expr (fold_convert (type, arg1));
9222 if (integer_zerop (arg1))
9223 return non_lvalue (fold_convert (type, arg0));
9225 /* Fold A - (A & B) into ~B & A. */
9226 if (!TREE_SIDE_EFFECTS (arg0)
9227 && TREE_CODE (arg1) == BIT_AND_EXPR)
9229 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9230 return fold_build2 (BIT_AND_EXPR, type,
9231 fold_build1 (BIT_NOT_EXPR, type,
9232 TREE_OPERAND (arg1, 0)),
9233 arg0);
9234 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9235 return fold_build2 (BIT_AND_EXPR, type,
9236 fold_build1 (BIT_NOT_EXPR, type,
9237 TREE_OPERAND (arg1, 1)),
9238 arg0);
9241 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9242 any power of 2 minus 1. */
9243 if (TREE_CODE (arg0) == BIT_AND_EXPR
9244 && TREE_CODE (arg1) == BIT_AND_EXPR
9245 && operand_equal_p (TREE_OPERAND (arg0, 0),
9246 TREE_OPERAND (arg1, 0), 0))
9248 tree mask0 = TREE_OPERAND (arg0, 1);
9249 tree mask1 = TREE_OPERAND (arg1, 1);
9250 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9252 if (operand_equal_p (tem, mask1, 0))
9254 tem = fold_build2 (BIT_XOR_EXPR, type,
9255 TREE_OPERAND (arg0, 0), mask1);
9256 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9261 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9262 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9263 return non_lvalue (fold_convert (type, arg0));
9265 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9266 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9267 (-ARG1 + ARG0) reduces to -ARG1. */
9268 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9269 return negate_expr (fold_convert (type, arg1));
9271 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9272 __complex__ ( x, -y ). This is not the same for SNaNs or if
9273 signed zeros are involved. */
9274 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9275 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9276 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9278 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9279 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9280 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9281 bool arg0rz = false, arg0iz = false;
9282 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9283 || (arg0i && (arg0iz = real_zerop (arg0i))))
9285 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9286 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9287 if (arg0rz && arg1i && real_zerop (arg1i))
9289 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9290 arg1r ? arg1r
9291 : build1 (REALPART_EXPR, rtype, arg1));
9292 tree ip = arg0i ? arg0i
9293 : build1 (IMAGPART_EXPR, rtype, arg0);
9294 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9296 else if (arg0iz && arg1r && real_zerop (arg1r))
9298 tree rp = arg0r ? arg0r
9299 : build1 (REALPART_EXPR, rtype, arg0);
9300 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9301 arg1i ? arg1i
9302 : build1 (IMAGPART_EXPR, rtype, arg1));
9303 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9308 /* Fold &x - &x. This can happen from &x.foo - &x.
9309 This is unsafe for certain floats even in non-IEEE formats.
9310 In IEEE, it is unsafe because it does wrong for NaNs.
9311 Also note that operand_equal_p is always false if an operand
9312 is volatile. */
9314 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9315 && operand_equal_p (arg0, arg1, 0))
9316 return fold_convert (type, integer_zero_node);
9318 /* A - B -> A + (-B) if B is easily negatable. */
9319 if (negate_expr_p (arg1)
9320 && ((FLOAT_TYPE_P (type)
9321 /* Avoid this transformation if B is a positive REAL_CST. */
9322 && (TREE_CODE (arg1) != REAL_CST
9323 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9324 || INTEGRAL_TYPE_P (type)))
9325 return fold_build2 (PLUS_EXPR, type,
9326 fold_convert (type, arg0),
9327 fold_convert (type, negate_expr (arg1)));
9329 /* Try folding difference of addresses. */
9331 HOST_WIDE_INT diff;
9333 if ((TREE_CODE (arg0) == ADDR_EXPR
9334 || TREE_CODE (arg1) == ADDR_EXPR)
9335 && ptr_difference_const (arg0, arg1, &diff))
9336 return build_int_cst_type (type, diff);
9339 /* Fold &a[i] - &a[j] to i-j. */
9340 if (TREE_CODE (arg0) == ADDR_EXPR
9341 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9342 && TREE_CODE (arg1) == ADDR_EXPR
9343 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9345 tree aref0 = TREE_OPERAND (arg0, 0);
9346 tree aref1 = TREE_OPERAND (arg1, 0);
9347 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9348 TREE_OPERAND (aref1, 0), 0))
9350 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9351 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9352 tree esz = array_ref_element_size (aref0);
9353 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9354 return fold_build2 (MULT_EXPR, type, diff,
9355 fold_convert (type, esz));
9360 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9361 of the array. Loop optimizer sometimes produce this type of
9362 expressions. */
9363 if (TREE_CODE (arg0) == ADDR_EXPR)
9365 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9366 if (tem)
9367 return fold_convert (type, tem);
9370 if (flag_unsafe_math_optimizations
9371 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9372 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9373 && (tem = distribute_real_division (code, type, arg0, arg1)))
9374 return tem;
9376 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9377 same or one. */
9378 if ((TREE_CODE (arg0) == MULT_EXPR
9379 || TREE_CODE (arg1) == MULT_EXPR)
9380 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9382 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9383 if (tem)
9384 return tem;
9387 goto associate;
9389 case MULT_EXPR:
9390 /* (-A) * (-B) -> A * B */
9391 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9392 return fold_build2 (MULT_EXPR, type,
9393 fold_convert (type, TREE_OPERAND (arg0, 0)),
9394 fold_convert (type, negate_expr (arg1)));
9395 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9396 return fold_build2 (MULT_EXPR, type,
9397 fold_convert (type, negate_expr (arg0)),
9398 fold_convert (type, TREE_OPERAND (arg1, 0)));
9400 if (! FLOAT_TYPE_P (type))
9402 if (integer_zerop (arg1))
9403 return omit_one_operand (type, arg1, arg0);
9404 if (integer_onep (arg1))
9405 return non_lvalue (fold_convert (type, arg0));
9406 /* Transform x * -1 into -x. */
9407 if (integer_all_onesp (arg1))
9408 return fold_convert (type, negate_expr (arg0));
9409 /* Transform x * -C into -x * C if x is easily negatable. */
9410 if (TREE_CODE (arg1) == INTEGER_CST
9411 && tree_int_cst_sgn (arg1) == -1
9412 && negate_expr_p (arg0)
9413 && (tem = negate_expr (arg1)) != arg1
9414 && !TREE_OVERFLOW (tem))
9415 return fold_build2 (MULT_EXPR, type,
9416 negate_expr (arg0), tem);
9418 /* (a * (1 << b)) is (a << b) */
9419 if (TREE_CODE (arg1) == LSHIFT_EXPR
9420 && integer_onep (TREE_OPERAND (arg1, 0)))
9421 return fold_build2 (LSHIFT_EXPR, type, arg0,
9422 TREE_OPERAND (arg1, 1));
9423 if (TREE_CODE (arg0) == LSHIFT_EXPR
9424 && integer_onep (TREE_OPERAND (arg0, 0)))
9425 return fold_build2 (LSHIFT_EXPR, type, arg1,
9426 TREE_OPERAND (arg0, 1));
9428 if (TREE_CODE (arg1) == INTEGER_CST
9429 && 0 != (tem = extract_muldiv (op0,
9430 fold_convert (type, arg1),
9431 code, NULL_TREE)))
9432 return fold_convert (type, tem);
9434 /* Optimize z * conj(z) for integer complex numbers. */
9435 if (TREE_CODE (arg0) == CONJ_EXPR
9436 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9437 return fold_mult_zconjz (type, arg1);
9438 if (TREE_CODE (arg1) == CONJ_EXPR
9439 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9440 return fold_mult_zconjz (type, arg0);
9442 else
9444 /* Maybe fold x * 0 to 0. The expressions aren't the same
9445 when x is NaN, since x * 0 is also NaN. Nor are they the
9446 same in modes with signed zeros, since multiplying a
9447 negative value by 0 gives -0, not +0. */
9448 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9449 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9450 && real_zerop (arg1))
9451 return omit_one_operand (type, arg1, arg0);
9452 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9453 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9454 && real_onep (arg1))
9455 return non_lvalue (fold_convert (type, arg0));
9457 /* Transform x * -1.0 into -x. */
9458 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9459 && real_minus_onep (arg1))
9460 return fold_convert (type, negate_expr (arg0));
9462 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9463 if (flag_unsafe_math_optimizations
9464 && TREE_CODE (arg0) == RDIV_EXPR
9465 && TREE_CODE (arg1) == REAL_CST
9466 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9468 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9469 arg1, 0);
9470 if (tem)
9471 return fold_build2 (RDIV_EXPR, type, tem,
9472 TREE_OPERAND (arg0, 1));
9475 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9476 if (operand_equal_p (arg0, arg1, 0))
9478 tree tem = fold_strip_sign_ops (arg0);
9479 if (tem != NULL_TREE)
9481 tem = fold_convert (type, tem);
9482 return fold_build2 (MULT_EXPR, type, tem, tem);
9486 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9487 This is not the same for NaNs or if signed zeros are
9488 involved. */
9489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9490 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9491 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9492 && TREE_CODE (arg1) == COMPLEX_CST
9493 && real_zerop (TREE_REALPART (arg1)))
9495 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9496 if (real_onep (TREE_IMAGPART (arg1)))
9497 return fold_build2 (COMPLEX_EXPR, type,
9498 negate_expr (fold_build1 (IMAGPART_EXPR,
9499 rtype, arg0)),
9500 fold_build1 (REALPART_EXPR, rtype, arg0));
9501 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9502 return fold_build2 (COMPLEX_EXPR, type,
9503 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9504 negate_expr (fold_build1 (REALPART_EXPR,
9505 rtype, arg0)));
9508 /* Optimize z * conj(z) for floating point complex numbers.
9509 Guarded by flag_unsafe_math_optimizations as non-finite
9510 imaginary components don't produce scalar results. */
9511 if (flag_unsafe_math_optimizations
9512 && TREE_CODE (arg0) == CONJ_EXPR
9513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9514 return fold_mult_zconjz (type, arg1);
9515 if (flag_unsafe_math_optimizations
9516 && TREE_CODE (arg1) == CONJ_EXPR
9517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9518 return fold_mult_zconjz (type, arg0);
9520 if (flag_unsafe_math_optimizations)
9522 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9523 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9525 /* Optimizations of root(...)*root(...). */
9526 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9528 tree rootfn, arg, arglist;
9529 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9530 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9532 /* Optimize sqrt(x)*sqrt(x) as x. */
9533 if (BUILTIN_SQRT_P (fcode0)
9534 && operand_equal_p (arg00, arg10, 0)
9535 && ! HONOR_SNANS (TYPE_MODE (type)))
9536 return arg00;
9538 /* Optimize root(x)*root(y) as root(x*y). */
9539 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9540 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9541 arglist = build_tree_list (NULL_TREE, arg);
9542 return build_function_call_expr (rootfn, arglist);
9545 /* Optimize expN(x)*expN(y) as expN(x+y). */
9546 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9548 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9549 tree arg = fold_build2 (PLUS_EXPR, type,
9550 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9551 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9552 tree arglist = build_tree_list (NULL_TREE, arg);
9553 return build_function_call_expr (expfn, arglist);
9556 /* Optimizations of pow(...)*pow(...). */
9557 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9558 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9559 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9561 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9562 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9563 1)));
9564 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9565 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9566 1)));
9568 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9569 if (operand_equal_p (arg01, arg11, 0))
9571 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9572 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9573 tree arglist = tree_cons (NULL_TREE, arg,
9574 build_tree_list (NULL_TREE,
9575 arg01));
9576 return build_function_call_expr (powfn, arglist);
9579 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9580 if (operand_equal_p (arg00, arg10, 0))
9582 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9583 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9584 tree arglist = tree_cons (NULL_TREE, arg00,
9585 build_tree_list (NULL_TREE,
9586 arg));
9587 return build_function_call_expr (powfn, arglist);
9591 /* Optimize tan(x)*cos(x) as sin(x). */
9592 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9593 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9594 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9595 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9596 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9597 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9598 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9599 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9601 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9603 if (sinfn != NULL_TREE)
9604 return build_function_call_expr (sinfn,
9605 TREE_OPERAND (arg0, 1));
9608 /* Optimize x*pow(x,c) as pow(x,c+1). */
9609 if (fcode1 == BUILT_IN_POW
9610 || fcode1 == BUILT_IN_POWF
9611 || fcode1 == BUILT_IN_POWL)
9613 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9614 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9615 1)));
9616 if (TREE_CODE (arg11) == REAL_CST
9617 && !TREE_OVERFLOW (arg11)
9618 && operand_equal_p (arg0, arg10, 0))
9620 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9621 REAL_VALUE_TYPE c;
9622 tree arg, arglist;
9624 c = TREE_REAL_CST (arg11);
9625 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9626 arg = build_real (type, c);
9627 arglist = build_tree_list (NULL_TREE, arg);
9628 arglist = tree_cons (NULL_TREE, arg0, arglist);
9629 return build_function_call_expr (powfn, arglist);
9633 /* Optimize pow(x,c)*x as pow(x,c+1). */
9634 if (fcode0 == BUILT_IN_POW
9635 || fcode0 == BUILT_IN_POWF
9636 || fcode0 == BUILT_IN_POWL)
9638 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9639 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9640 1)));
9641 if (TREE_CODE (arg01) == REAL_CST
9642 && !TREE_OVERFLOW (arg01)
9643 && operand_equal_p (arg1, arg00, 0))
9645 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9646 REAL_VALUE_TYPE c;
9647 tree arg, arglist;
9649 c = TREE_REAL_CST (arg01);
9650 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9651 arg = build_real (type, c);
9652 arglist = build_tree_list (NULL_TREE, arg);
9653 arglist = tree_cons (NULL_TREE, arg1, arglist);
9654 return build_function_call_expr (powfn, arglist);
9658 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9659 if (! optimize_size
9660 && operand_equal_p (arg0, arg1, 0))
9662 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9664 if (powfn)
9666 tree arg = build_real (type, dconst2);
9667 tree arglist = build_tree_list (NULL_TREE, arg);
9668 arglist = tree_cons (NULL_TREE, arg0, arglist);
9669 return build_function_call_expr (powfn, arglist);
9674 goto associate;
9676 case BIT_IOR_EXPR:
9677 bit_ior:
9678 if (integer_all_onesp (arg1))
9679 return omit_one_operand (type, arg1, arg0);
9680 if (integer_zerop (arg1))
9681 return non_lvalue (fold_convert (type, arg0));
9682 if (operand_equal_p (arg0, arg1, 0))
9683 return non_lvalue (fold_convert (type, arg0));
9685 /* ~X | X is -1. */
9686 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9689 t1 = build_int_cst_type (type, -1);
9690 return omit_one_operand (type, t1, arg1);
9693 /* X | ~X is -1. */
9694 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9695 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9697 t1 = build_int_cst_type (type, -1);
9698 return omit_one_operand (type, t1, arg0);
9701 /* Canonicalize (X & C1) | C2. */
9702 if (TREE_CODE (arg0) == BIT_AND_EXPR
9703 && TREE_CODE (arg1) == INTEGER_CST
9704 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9706 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9707 int width = TYPE_PRECISION (type);
9708 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9709 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9710 hi2 = TREE_INT_CST_HIGH (arg1);
9711 lo2 = TREE_INT_CST_LOW (arg1);
9713 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9714 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9715 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9717 if (width > HOST_BITS_PER_WIDE_INT)
9719 mhi = (unsigned HOST_WIDE_INT) -1
9720 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9721 mlo = -1;
9723 else
9725 mhi = 0;
9726 mlo = (unsigned HOST_WIDE_INT) -1
9727 >> (HOST_BITS_PER_WIDE_INT - width);
9730 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9731 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9732 return fold_build2 (BIT_IOR_EXPR, type,
9733 TREE_OPERAND (arg0, 0), arg1);
9735 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9736 hi1 &= mhi;
9737 lo1 &= mlo;
9738 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9739 return fold_build2 (BIT_IOR_EXPR, type,
9740 fold_build2 (BIT_AND_EXPR, type,
9741 TREE_OPERAND (arg0, 0),
9742 build_int_cst_wide (type,
9743 lo1 & ~lo2,
9744 hi1 & ~hi2)),
9745 arg1);
9748 /* (X & Y) | Y is (X, Y). */
9749 if (TREE_CODE (arg0) == BIT_AND_EXPR
9750 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9751 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9752 /* (X & Y) | X is (Y, X). */
9753 if (TREE_CODE (arg0) == BIT_AND_EXPR
9754 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9755 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9756 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9757 /* X | (X & Y) is (Y, X). */
9758 if (TREE_CODE (arg1) == BIT_AND_EXPR
9759 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9760 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9761 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9762 /* X | (Y & X) is (Y, X). */
9763 if (TREE_CODE (arg1) == BIT_AND_EXPR
9764 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9765 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9766 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9768 t1 = distribute_bit_expr (code, type, arg0, arg1);
9769 if (t1 != NULL_TREE)
9770 return t1;
9772 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9774 This results in more efficient code for machines without a NAND
9775 instruction. Combine will canonicalize to the first form
9776 which will allow use of NAND instructions provided by the
9777 backend if they exist. */
9778 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9779 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9781 return fold_build1 (BIT_NOT_EXPR, type,
9782 build2 (BIT_AND_EXPR, type,
9783 TREE_OPERAND (arg0, 0),
9784 TREE_OPERAND (arg1, 0)));
9787 /* See if this can be simplified into a rotate first. If that
9788 is unsuccessful continue in the association code. */
9789 goto bit_rotate;
9791 case BIT_XOR_EXPR:
9792 if (integer_zerop (arg1))
9793 return non_lvalue (fold_convert (type, arg0));
9794 if (integer_all_onesp (arg1))
9795 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9796 if (operand_equal_p (arg0, arg1, 0))
9797 return omit_one_operand (type, integer_zero_node, arg0);
9799 /* ~X ^ X is -1. */
9800 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9801 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9803 t1 = build_int_cst_type (type, -1);
9804 return omit_one_operand (type, t1, arg1);
9807 /* X ^ ~X is -1. */
9808 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9809 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9811 t1 = build_int_cst_type (type, -1);
9812 return omit_one_operand (type, t1, arg0);
9815 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9816 with a constant, and the two constants have no bits in common,
9817 we should treat this as a BIT_IOR_EXPR since this may produce more
9818 simplifications. */
9819 if (TREE_CODE (arg0) == BIT_AND_EXPR
9820 && TREE_CODE (arg1) == BIT_AND_EXPR
9821 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9822 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9823 && integer_zerop (const_binop (BIT_AND_EXPR,
9824 TREE_OPERAND (arg0, 1),
9825 TREE_OPERAND (arg1, 1), 0)))
9827 code = BIT_IOR_EXPR;
9828 goto bit_ior;
9831 /* (X | Y) ^ X -> Y & ~ X*/
9832 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9833 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9835 tree t2 = TREE_OPERAND (arg0, 1);
9836 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9837 arg1);
9838 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9839 fold_convert (type, t1));
9840 return t1;
9843 /* (Y | X) ^ X -> Y & ~ X*/
9844 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9845 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9847 tree t2 = TREE_OPERAND (arg0, 0);
9848 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9849 arg1);
9850 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9851 fold_convert (type, t1));
9852 return t1;
9855 /* X ^ (X | Y) -> Y & ~ X*/
9856 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9857 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9859 tree t2 = TREE_OPERAND (arg1, 1);
9860 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9861 arg0);
9862 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9863 fold_convert (type, t1));
9864 return t1;
9867 /* X ^ (Y | X) -> Y & ~ X*/
9868 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9869 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9871 tree t2 = TREE_OPERAND (arg1, 0);
9872 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9873 arg0);
9874 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9875 fold_convert (type, t1));
9876 return t1;
9879 /* Convert ~X ^ ~Y to X ^ Y. */
9880 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9881 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9882 return fold_build2 (code, type,
9883 fold_convert (type, TREE_OPERAND (arg0, 0)),
9884 fold_convert (type, TREE_OPERAND (arg1, 0)));
9886 /* Convert ~X ^ C to X ^ ~C. */
9887 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9888 && TREE_CODE (arg1) == INTEGER_CST)
9889 return fold_build2 (code, type,
9890 fold_convert (type, TREE_OPERAND (arg0, 0)),
9891 fold_build1 (BIT_NOT_EXPR, type, arg1));
9893 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9894 if (TREE_CODE (arg0) == BIT_AND_EXPR
9895 && integer_onep (TREE_OPERAND (arg0, 1))
9896 && integer_onep (arg1))
9897 return fold_build2 (EQ_EXPR, type, arg0,
9898 build_int_cst (TREE_TYPE (arg0), 0));
9900 /* Fold (X & Y) ^ Y as ~X & Y. */
9901 if (TREE_CODE (arg0) == BIT_AND_EXPR
9902 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9904 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9905 return fold_build2 (BIT_AND_EXPR, type,
9906 fold_build1 (BIT_NOT_EXPR, type, tem),
9907 fold_convert (type, arg1));
9909 /* Fold (X & Y) ^ X as ~Y & X. */
9910 if (TREE_CODE (arg0) == BIT_AND_EXPR
9911 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9912 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9914 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9915 return fold_build2 (BIT_AND_EXPR, type,
9916 fold_build1 (BIT_NOT_EXPR, type, tem),
9917 fold_convert (type, arg1));
9919 /* Fold X ^ (X & Y) as X & ~Y. */
9920 if (TREE_CODE (arg1) == BIT_AND_EXPR
9921 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9923 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9924 return fold_build2 (BIT_AND_EXPR, type,
9925 fold_convert (type, arg0),
9926 fold_build1 (BIT_NOT_EXPR, type, tem));
9928 /* Fold X ^ (Y & X) as ~Y & X. */
9929 if (TREE_CODE (arg1) == BIT_AND_EXPR
9930 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9931 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9933 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9934 return fold_build2 (BIT_AND_EXPR, type,
9935 fold_build1 (BIT_NOT_EXPR, type, tem),
9936 fold_convert (type, arg0));
9939 /* See if this can be simplified into a rotate first. If that
9940 is unsuccessful continue in the association code. */
9941 goto bit_rotate;
9943 case BIT_AND_EXPR:
9944 if (integer_all_onesp (arg1))
9945 return non_lvalue (fold_convert (type, arg0));
9946 if (integer_zerop (arg1))
9947 return omit_one_operand (type, arg1, arg0);
9948 if (operand_equal_p (arg0, arg1, 0))
9949 return non_lvalue (fold_convert (type, arg0));
9951 /* ~X & X is always zero. */
9952 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9953 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9954 return omit_one_operand (type, integer_zero_node, arg1);
9956 /* X & ~X is always zero. */
9957 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9958 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9959 return omit_one_operand (type, integer_zero_node, arg0);
9961 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9962 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9963 && TREE_CODE (arg1) == INTEGER_CST
9964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9965 return fold_build2 (BIT_IOR_EXPR, type,
9966 fold_build2 (BIT_AND_EXPR, type,
9967 TREE_OPERAND (arg0, 0), arg1),
9968 fold_build2 (BIT_AND_EXPR, type,
9969 TREE_OPERAND (arg0, 1), arg1));
9971 /* (X | Y) & Y is (X, Y). */
9972 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9973 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9974 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9975 /* (X | Y) & X is (Y, X). */
9976 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9977 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9978 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9979 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9980 /* X & (X | Y) is (Y, X). */
9981 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9983 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9984 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9985 /* X & (Y | X) is (Y, X). */
9986 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9988 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9989 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9991 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9992 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9993 && integer_onep (TREE_OPERAND (arg0, 1))
9994 && integer_onep (arg1))
9996 tem = TREE_OPERAND (arg0, 0);
9997 return fold_build2 (EQ_EXPR, type,
9998 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9999 build_int_cst (TREE_TYPE (tem), 1)),
10000 build_int_cst (TREE_TYPE (tem), 0));
10002 /* Fold ~X & 1 as (X & 1) == 0. */
10003 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10004 && integer_onep (arg1))
10006 tem = TREE_OPERAND (arg0, 0);
10007 return fold_build2 (EQ_EXPR, type,
10008 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10009 build_int_cst (TREE_TYPE (tem), 1)),
10010 build_int_cst (TREE_TYPE (tem), 0));
10013 /* Fold (X ^ Y) & Y as ~X & Y. */
10014 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10015 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10017 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10018 return fold_build2 (BIT_AND_EXPR, type,
10019 fold_build1 (BIT_NOT_EXPR, type, tem),
10020 fold_convert (type, arg1));
10022 /* Fold (X ^ Y) & X as ~Y & X. */
10023 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10024 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10025 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10027 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10028 return fold_build2 (BIT_AND_EXPR, type,
10029 fold_build1 (BIT_NOT_EXPR, type, tem),
10030 fold_convert (type, arg1));
10032 /* Fold X & (X ^ Y) as X & ~Y. */
10033 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10034 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10036 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10037 return fold_build2 (BIT_AND_EXPR, type,
10038 fold_convert (type, arg0),
10039 fold_build1 (BIT_NOT_EXPR, type, tem));
10041 /* Fold X & (Y ^ X) as ~Y & X. */
10042 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10043 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10044 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10046 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10047 return fold_build2 (BIT_AND_EXPR, type,
10048 fold_build1 (BIT_NOT_EXPR, type, tem),
10049 fold_convert (type, arg0));
10052 t1 = distribute_bit_expr (code, type, arg0, arg1);
10053 if (t1 != NULL_TREE)
10054 return t1;
10055 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10056 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10057 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10059 unsigned int prec
10060 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10062 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10063 && (~TREE_INT_CST_LOW (arg1)
10064 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10065 return fold_convert (type, TREE_OPERAND (arg0, 0));
10068 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10070 This results in more efficient code for machines without a NOR
10071 instruction. Combine will canonicalize to the first form
10072 which will allow use of NOR instructions provided by the
10073 backend if they exist. */
10074 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10075 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10077 return fold_build1 (BIT_NOT_EXPR, type,
10078 build2 (BIT_IOR_EXPR, type,
10079 TREE_OPERAND (arg0, 0),
10080 TREE_OPERAND (arg1, 0)));
10083 goto associate;
10085 case RDIV_EXPR:
10086 /* Don't touch a floating-point divide by zero unless the mode
10087 of the constant can represent infinity. */
10088 if (TREE_CODE (arg1) == REAL_CST
10089 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10090 && real_zerop (arg1))
10091 return NULL_TREE;
10093 /* Optimize A / A to 1.0 if we don't care about
10094 NaNs or Infinities. Skip the transformation
10095 for non-real operands. */
10096 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10097 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10098 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10099 && operand_equal_p (arg0, arg1, 0))
10101 tree r = build_real (TREE_TYPE (arg0), dconst1);
10103 return omit_two_operands (type, r, arg0, arg1);
10106 /* The complex version of the above A / A optimization. */
10107 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10108 && operand_equal_p (arg0, arg1, 0))
10110 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10111 if (! HONOR_NANS (TYPE_MODE (elem_type))
10112 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10114 tree r = build_real (elem_type, dconst1);
10115 /* omit_two_operands will call fold_convert for us. */
10116 return omit_two_operands (type, r, arg0, arg1);
10120 /* (-A) / (-B) -> A / B */
10121 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10122 return fold_build2 (RDIV_EXPR, type,
10123 TREE_OPERAND (arg0, 0),
10124 negate_expr (arg1));
10125 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10126 return fold_build2 (RDIV_EXPR, type,
10127 negate_expr (arg0),
10128 TREE_OPERAND (arg1, 0));
10130 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10131 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10132 && real_onep (arg1))
10133 return non_lvalue (fold_convert (type, arg0));
10135 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10136 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10137 && real_minus_onep (arg1))
10138 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10140 /* If ARG1 is a constant, we can convert this to a multiply by the
10141 reciprocal. This does not have the same rounding properties,
10142 so only do this if -funsafe-math-optimizations. We can actually
10143 always safely do it if ARG1 is a power of two, but it's hard to
10144 tell if it is or not in a portable manner. */
10145 if (TREE_CODE (arg1) == REAL_CST)
10147 if (flag_unsafe_math_optimizations
10148 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10149 arg1, 0)))
10150 return fold_build2 (MULT_EXPR, type, arg0, tem);
10151 /* Find the reciprocal if optimizing and the result is exact. */
10152 if (optimize)
10154 REAL_VALUE_TYPE r;
10155 r = TREE_REAL_CST (arg1);
10156 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10158 tem = build_real (type, r);
10159 return fold_build2 (MULT_EXPR, type,
10160 fold_convert (type, arg0), tem);
10164 /* Convert A/B/C to A/(B*C). */
10165 if (flag_unsafe_math_optimizations
10166 && TREE_CODE (arg0) == RDIV_EXPR)
10167 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10168 fold_build2 (MULT_EXPR, type,
10169 TREE_OPERAND (arg0, 1), arg1));
10171 /* Convert A/(B/C) to (A/B)*C. */
10172 if (flag_unsafe_math_optimizations
10173 && TREE_CODE (arg1) == RDIV_EXPR)
10174 return fold_build2 (MULT_EXPR, type,
10175 fold_build2 (RDIV_EXPR, type, arg0,
10176 TREE_OPERAND (arg1, 0)),
10177 TREE_OPERAND (arg1, 1));
10179 /* Convert C1/(X*C2) into (C1/C2)/X. */
10180 if (flag_unsafe_math_optimizations
10181 && TREE_CODE (arg1) == MULT_EXPR
10182 && TREE_CODE (arg0) == REAL_CST
10183 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10185 tree tem = const_binop (RDIV_EXPR, arg0,
10186 TREE_OPERAND (arg1, 1), 0);
10187 if (tem)
10188 return fold_build2 (RDIV_EXPR, type, tem,
10189 TREE_OPERAND (arg1, 0));
10192 if (flag_unsafe_math_optimizations)
10194 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10195 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10197 /* Optimize sin(x)/cos(x) as tan(x). */
10198 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10199 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10200 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10201 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10202 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10204 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10206 if (tanfn != NULL_TREE)
10207 return build_function_call_expr (tanfn,
10208 TREE_OPERAND (arg0, 1));
10211 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10212 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10213 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10214 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10215 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10216 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10218 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10220 if (tanfn != NULL_TREE)
10222 tree tmp = TREE_OPERAND (arg0, 1);
10223 tmp = build_function_call_expr (tanfn, tmp);
10224 return fold_build2 (RDIV_EXPR, type,
10225 build_real (type, dconst1), tmp);
10229 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10230 NaNs or Infinities. */
10231 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10232 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10233 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10235 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10236 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10238 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10239 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10240 && operand_equal_p (arg00, arg01, 0))
10242 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10244 if (cosfn != NULL_TREE)
10245 return build_function_call_expr (cosfn,
10246 TREE_OPERAND (arg0, 1));
10250 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10251 NaNs or Infinities. */
10252 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10253 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10254 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10256 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10257 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10259 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10260 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10261 && operand_equal_p (arg00, arg01, 0))
10263 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10265 if (cosfn != NULL_TREE)
10267 tree tmp = TREE_OPERAND (arg0, 1);
10268 tmp = build_function_call_expr (cosfn, tmp);
10269 return fold_build2 (RDIV_EXPR, type,
10270 build_real (type, dconst1),
10271 tmp);
10276 /* Optimize pow(x,c)/x as pow(x,c-1). */
10277 if (fcode0 == BUILT_IN_POW
10278 || fcode0 == BUILT_IN_POWF
10279 || fcode0 == BUILT_IN_POWL)
10281 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10282 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10283 if (TREE_CODE (arg01) == REAL_CST
10284 && !TREE_OVERFLOW (arg01)
10285 && operand_equal_p (arg1, arg00, 0))
10287 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10288 REAL_VALUE_TYPE c;
10289 tree arg, arglist;
10291 c = TREE_REAL_CST (arg01);
10292 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10293 arg = build_real (type, c);
10294 arglist = build_tree_list (NULL_TREE, arg);
10295 arglist = tree_cons (NULL_TREE, arg1, arglist);
10296 return build_function_call_expr (powfn, arglist);
10300 /* Optimize x/expN(y) into x*expN(-y). */
10301 if (BUILTIN_EXPONENT_P (fcode1))
10303 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10304 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10305 tree arglist = build_tree_list (NULL_TREE,
10306 fold_convert (type, arg));
10307 arg1 = build_function_call_expr (expfn, arglist);
10308 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10311 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10312 if (fcode1 == BUILT_IN_POW
10313 || fcode1 == BUILT_IN_POWF
10314 || fcode1 == BUILT_IN_POWL)
10316 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10317 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10318 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10319 tree neg11 = fold_convert (type, negate_expr (arg11));
10320 tree arglist = tree_cons (NULL_TREE, arg10,
10321 build_tree_list (NULL_TREE, neg11));
10322 arg1 = build_function_call_expr (powfn, arglist);
10323 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10326 return NULL_TREE;
10328 case TRUNC_DIV_EXPR:
10329 case FLOOR_DIV_EXPR:
10330 /* Simplify A / (B << N) where A and B are positive and B is
10331 a power of 2, to A >> (N + log2(B)). */
10332 if (TREE_CODE (arg1) == LSHIFT_EXPR
10333 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10335 tree sval = TREE_OPERAND (arg1, 0);
10336 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10338 tree sh_cnt = TREE_OPERAND (arg1, 1);
10339 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10341 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10342 sh_cnt, build_int_cst (NULL_TREE, pow2));
10343 return fold_build2 (RSHIFT_EXPR, type,
10344 fold_convert (type, arg0), sh_cnt);
10347 /* Fall thru */
10349 case ROUND_DIV_EXPR:
10350 case CEIL_DIV_EXPR:
10351 case EXACT_DIV_EXPR:
10352 if (integer_onep (arg1))
10353 return non_lvalue (fold_convert (type, arg0));
10354 if (integer_zerop (arg1))
10355 return NULL_TREE;
10356 /* X / -1 is -X. */
10357 if (!TYPE_UNSIGNED (type)
10358 && TREE_CODE (arg1) == INTEGER_CST
10359 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10360 && TREE_INT_CST_HIGH (arg1) == -1)
10361 return fold_convert (type, negate_expr (arg0));
10363 /* Convert -A / -B to A / B when the type is signed and overflow is
10364 undefined. */
10365 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10366 && TREE_CODE (arg0) == NEGATE_EXPR
10367 && negate_expr_p (arg1))
10368 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10369 negate_expr (arg1));
10370 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10371 && TREE_CODE (arg1) == NEGATE_EXPR
10372 && negate_expr_p (arg0))
10373 return fold_build2 (code, type, negate_expr (arg0),
10374 TREE_OPERAND (arg1, 0));
10376 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10377 operation, EXACT_DIV_EXPR.
10379 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10380 At one time others generated faster code, it's not clear if they do
10381 after the last round to changes to the DIV code in expmed.c. */
10382 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10383 && multiple_of_p (type, arg0, arg1))
10384 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10386 if (TREE_CODE (arg1) == INTEGER_CST
10387 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10388 return fold_convert (type, tem);
10390 return NULL_TREE;
10392 case CEIL_MOD_EXPR:
10393 case FLOOR_MOD_EXPR:
10394 case ROUND_MOD_EXPR:
10395 case TRUNC_MOD_EXPR:
10396 /* X % 1 is always zero, but be sure to preserve any side
10397 effects in X. */
10398 if (integer_onep (arg1))
10399 return omit_one_operand (type, integer_zero_node, arg0);
10401 /* X % 0, return X % 0 unchanged so that we can get the
10402 proper warnings and errors. */
10403 if (integer_zerop (arg1))
10404 return NULL_TREE;
10406 /* 0 % X is always zero, but be sure to preserve any side
10407 effects in X. Place this after checking for X == 0. */
10408 if (integer_zerop (arg0))
10409 return omit_one_operand (type, integer_zero_node, arg1);
10411 /* X % -1 is zero. */
10412 if (!TYPE_UNSIGNED (type)
10413 && TREE_CODE (arg1) == INTEGER_CST
10414 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10415 && TREE_INT_CST_HIGH (arg1) == -1)
10416 return omit_one_operand (type, integer_zero_node, arg0);
10418 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10419 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10420 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10421 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10423 tree c = arg1;
10424 /* Also optimize A % (C << N) where C is a power of 2,
10425 to A & ((C << N) - 1). */
10426 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10427 c = TREE_OPERAND (arg1, 0);
10429 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10431 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10432 build_int_cst (TREE_TYPE (arg1), 1));
10433 return fold_build2 (BIT_AND_EXPR, type,
10434 fold_convert (type, arg0),
10435 fold_convert (type, mask));
10439 /* X % -C is the same as X % C. */
10440 if (code == TRUNC_MOD_EXPR
10441 && !TYPE_UNSIGNED (type)
10442 && TREE_CODE (arg1) == INTEGER_CST
10443 && !TREE_OVERFLOW (arg1)
10444 && TREE_INT_CST_HIGH (arg1) < 0
10445 && !TYPE_OVERFLOW_TRAPS (type)
10446 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10447 && !sign_bit_p (arg1, arg1))
10448 return fold_build2 (code, type, fold_convert (type, arg0),
10449 fold_convert (type, negate_expr (arg1)));
10451 /* X % -Y is the same as X % Y. */
10452 if (code == TRUNC_MOD_EXPR
10453 && !TYPE_UNSIGNED (type)
10454 && TREE_CODE (arg1) == NEGATE_EXPR
10455 && !TYPE_OVERFLOW_TRAPS (type))
10456 return fold_build2 (code, type, fold_convert (type, arg0),
10457 fold_convert (type, TREE_OPERAND (arg1, 0)));
10459 if (TREE_CODE (arg1) == INTEGER_CST
10460 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10461 return fold_convert (type, tem);
10463 return NULL_TREE;
10465 case LROTATE_EXPR:
10466 case RROTATE_EXPR:
10467 if (integer_all_onesp (arg0))
10468 return omit_one_operand (type, arg0, arg1);
10469 goto shift;
10471 case RSHIFT_EXPR:
10472 /* Optimize -1 >> x for arithmetic right shifts. */
10473 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10474 return omit_one_operand (type, arg0, arg1);
10475 /* ... fall through ... */
10477 case LSHIFT_EXPR:
10478 shift:
10479 if (integer_zerop (arg1))
10480 return non_lvalue (fold_convert (type, arg0));
10481 if (integer_zerop (arg0))
10482 return omit_one_operand (type, arg0, arg1);
10484 /* Since negative shift count is not well-defined,
10485 don't try to compute it in the compiler. */
10486 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10487 return NULL_TREE;
10489 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10490 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10491 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10492 && host_integerp (TREE_OPERAND (arg0, 1), false)
10493 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10495 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10496 + TREE_INT_CST_LOW (arg1));
10498 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10499 being well defined. */
10500 if (low >= TYPE_PRECISION (type))
10502 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10503 low = low % TYPE_PRECISION (type);
10504 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10505 return build_int_cst (type, 0);
10506 else
10507 low = TYPE_PRECISION (type) - 1;
10510 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10511 build_int_cst (type, low));
10514 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10515 into x & ((unsigned)-1 >> c) for unsigned types. */
10516 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10517 || (TYPE_UNSIGNED (type)
10518 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10519 && host_integerp (arg1, false)
10520 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10521 && host_integerp (TREE_OPERAND (arg0, 1), false)
10522 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10524 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10525 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10526 tree lshift;
10527 tree arg00;
10529 if (low0 == low1)
10531 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10533 lshift = build_int_cst (type, -1);
10534 lshift = int_const_binop (code, lshift, arg1, 0);
10536 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10540 /* Rewrite an LROTATE_EXPR by a constant into an
10541 RROTATE_EXPR by a new constant. */
10542 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10544 tree tem = build_int_cst (TREE_TYPE (arg1),
10545 GET_MODE_BITSIZE (TYPE_MODE (type)));
10546 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10547 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10550 /* If we have a rotate of a bit operation with the rotate count and
10551 the second operand of the bit operation both constant,
10552 permute the two operations. */
10553 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10554 && (TREE_CODE (arg0) == BIT_AND_EXPR
10555 || TREE_CODE (arg0) == BIT_IOR_EXPR
10556 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10557 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10558 return fold_build2 (TREE_CODE (arg0), type,
10559 fold_build2 (code, type,
10560 TREE_OPERAND (arg0, 0), arg1),
10561 fold_build2 (code, type,
10562 TREE_OPERAND (arg0, 1), arg1));
10564 /* Two consecutive rotates adding up to the width of the mode can
10565 be ignored. */
10566 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10567 && TREE_CODE (arg0) == RROTATE_EXPR
10568 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10569 && TREE_INT_CST_HIGH (arg1) == 0
10570 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10571 && ((TREE_INT_CST_LOW (arg1)
10572 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10573 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10574 return TREE_OPERAND (arg0, 0);
10576 return NULL_TREE;
10578 case MIN_EXPR:
10579 if (operand_equal_p (arg0, arg1, 0))
10580 return omit_one_operand (type, arg0, arg1);
10581 if (INTEGRAL_TYPE_P (type)
10582 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10583 return omit_one_operand (type, arg1, arg0);
10584 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10585 if (tem)
10586 return tem;
10587 goto associate;
10589 case MAX_EXPR:
10590 if (operand_equal_p (arg0, arg1, 0))
10591 return omit_one_operand (type, arg0, arg1);
10592 if (INTEGRAL_TYPE_P (type)
10593 && TYPE_MAX_VALUE (type)
10594 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10595 return omit_one_operand (type, arg1, arg0);
10596 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10597 if (tem)
10598 return tem;
10599 goto associate;
10601 case TRUTH_ANDIF_EXPR:
10602 /* Note that the operands of this must be ints
10603 and their values must be 0 or 1.
10604 ("true" is a fixed value perhaps depending on the language.) */
10605 /* If first arg is constant zero, return it. */
10606 if (integer_zerop (arg0))
10607 return fold_convert (type, arg0);
10608 case TRUTH_AND_EXPR:
10609 /* If either arg is constant true, drop it. */
10610 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10611 return non_lvalue (fold_convert (type, arg1));
10612 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10613 /* Preserve sequence points. */
10614 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10615 return non_lvalue (fold_convert (type, arg0));
10616 /* If second arg is constant zero, result is zero, but first arg
10617 must be evaluated. */
10618 if (integer_zerop (arg1))
10619 return omit_one_operand (type, arg1, arg0);
10620 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10621 case will be handled here. */
10622 if (integer_zerop (arg0))
10623 return omit_one_operand (type, arg0, arg1);
10625 /* !X && X is always false. */
10626 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10627 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10628 return omit_one_operand (type, integer_zero_node, arg1);
10629 /* X && !X is always false. */
10630 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10632 return omit_one_operand (type, integer_zero_node, arg0);
10634 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10635 means A >= Y && A != MAX, but in this case we know that
10636 A < X <= MAX. */
10638 if (!TREE_SIDE_EFFECTS (arg0)
10639 && !TREE_SIDE_EFFECTS (arg1))
10641 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10642 if (tem && !operand_equal_p (tem, arg0, 0))
10643 return fold_build2 (code, type, tem, arg1);
10645 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10646 if (tem && !operand_equal_p (tem, arg1, 0))
10647 return fold_build2 (code, type, arg0, tem);
10650 truth_andor:
10651 /* We only do these simplifications if we are optimizing. */
10652 if (!optimize)
10653 return NULL_TREE;
10655 /* Check for things like (A || B) && (A || C). We can convert this
10656 to A || (B && C). Note that either operator can be any of the four
10657 truth and/or operations and the transformation will still be
10658 valid. Also note that we only care about order for the
10659 ANDIF and ORIF operators. If B contains side effects, this
10660 might change the truth-value of A. */
10661 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10662 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10663 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10664 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10665 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10668 tree a00 = TREE_OPERAND (arg0, 0);
10669 tree a01 = TREE_OPERAND (arg0, 1);
10670 tree a10 = TREE_OPERAND (arg1, 0);
10671 tree a11 = TREE_OPERAND (arg1, 1);
10672 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10673 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10674 && (code == TRUTH_AND_EXPR
10675 || code == TRUTH_OR_EXPR));
10677 if (operand_equal_p (a00, a10, 0))
10678 return fold_build2 (TREE_CODE (arg0), type, a00,
10679 fold_build2 (code, type, a01, a11));
10680 else if (commutative && operand_equal_p (a00, a11, 0))
10681 return fold_build2 (TREE_CODE (arg0), type, a00,
10682 fold_build2 (code, type, a01, a10));
10683 else if (commutative && operand_equal_p (a01, a10, 0))
10684 return fold_build2 (TREE_CODE (arg0), type, a01,
10685 fold_build2 (code, type, a00, a11));
10687 /* This case if tricky because we must either have commutative
10688 operators or else A10 must not have side-effects. */
10690 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10691 && operand_equal_p (a01, a11, 0))
10692 return fold_build2 (TREE_CODE (arg0), type,
10693 fold_build2 (code, type, a00, a10),
10694 a01);
10697 /* See if we can build a range comparison. */
10698 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10699 return tem;
10701 /* Check for the possibility of merging component references. If our
10702 lhs is another similar operation, try to merge its rhs with our
10703 rhs. Then try to merge our lhs and rhs. */
10704 if (TREE_CODE (arg0) == code
10705 && 0 != (tem = fold_truthop (code, type,
10706 TREE_OPERAND (arg0, 1), arg1)))
10707 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10709 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10710 return tem;
10712 return NULL_TREE;
10714 case TRUTH_ORIF_EXPR:
10715 /* Note that the operands of this must be ints
10716 and their values must be 0 or true.
10717 ("true" is a fixed value perhaps depending on the language.) */
10718 /* If first arg is constant true, return it. */
10719 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10720 return fold_convert (type, arg0);
10721 case TRUTH_OR_EXPR:
10722 /* If either arg is constant zero, drop it. */
10723 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10724 return non_lvalue (fold_convert (type, arg1));
10725 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10726 /* Preserve sequence points. */
10727 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10728 return non_lvalue (fold_convert (type, arg0));
10729 /* If second arg is constant true, result is true, but we must
10730 evaluate first arg. */
10731 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10732 return omit_one_operand (type, arg1, arg0);
10733 /* Likewise for first arg, but note this only occurs here for
10734 TRUTH_OR_EXPR. */
10735 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10736 return omit_one_operand (type, arg0, arg1);
10738 /* !X || X is always true. */
10739 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10740 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10741 return omit_one_operand (type, integer_one_node, arg1);
10742 /* X || !X is always true. */
10743 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10745 return omit_one_operand (type, integer_one_node, arg0);
10747 goto truth_andor;
10749 case TRUTH_XOR_EXPR:
10750 /* If the second arg is constant zero, drop it. */
10751 if (integer_zerop (arg1))
10752 return non_lvalue (fold_convert (type, arg0));
10753 /* If the second arg is constant true, this is a logical inversion. */
10754 if (integer_onep (arg1))
10756 /* Only call invert_truthvalue if operand is a truth value. */
10757 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10758 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10759 else
10760 tem = invert_truthvalue (arg0);
10761 return non_lvalue (fold_convert (type, tem));
10763 /* Identical arguments cancel to zero. */
10764 if (operand_equal_p (arg0, arg1, 0))
10765 return omit_one_operand (type, integer_zero_node, arg0);
10767 /* !X ^ X is always true. */
10768 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10769 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10770 return omit_one_operand (type, integer_one_node, arg1);
10772 /* X ^ !X is always true. */
10773 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10774 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10775 return omit_one_operand (type, integer_one_node, arg0);
10777 return NULL_TREE;
10779 case EQ_EXPR:
10780 case NE_EXPR:
10781 tem = fold_comparison (code, type, op0, op1);
10782 if (tem != NULL_TREE)
10783 return tem;
10785 /* bool_var != 0 becomes bool_var. */
10786 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10787 && code == NE_EXPR)
10788 return non_lvalue (fold_convert (type, arg0));
10790 /* bool_var == 1 becomes bool_var. */
10791 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10792 && code == EQ_EXPR)
10793 return non_lvalue (fold_convert (type, arg0));
10795 /* bool_var != 1 becomes !bool_var. */
10796 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10797 && code == NE_EXPR)
10798 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10800 /* bool_var == 0 becomes !bool_var. */
10801 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10802 && code == EQ_EXPR)
10803 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10805 /* If this is an equality comparison of the address of a non-weak
10806 object against zero, then we know the result. */
10807 if (TREE_CODE (arg0) == ADDR_EXPR
10808 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10809 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10810 && integer_zerop (arg1))
10811 return constant_boolean_node (code != EQ_EXPR, type);
10813 /* If this is an equality comparison of the address of two non-weak,
10814 unaliased symbols neither of which are extern (since we do not
10815 have access to attributes for externs), then we know the result. */
10816 if (TREE_CODE (arg0) == ADDR_EXPR
10817 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10818 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10819 && ! lookup_attribute ("alias",
10820 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10821 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10822 && TREE_CODE (arg1) == ADDR_EXPR
10823 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10824 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10825 && ! lookup_attribute ("alias",
10826 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10827 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10829 /* We know that we're looking at the address of two
10830 non-weak, unaliased, static _DECL nodes.
10832 It is both wasteful and incorrect to call operand_equal_p
10833 to compare the two ADDR_EXPR nodes. It is wasteful in that
10834 all we need to do is test pointer equality for the arguments
10835 to the two ADDR_EXPR nodes. It is incorrect to use
10836 operand_equal_p as that function is NOT equivalent to a
10837 C equality test. It can in fact return false for two
10838 objects which would test as equal using the C equality
10839 operator. */
10840 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10841 return constant_boolean_node (equal
10842 ? code == EQ_EXPR : code != EQ_EXPR,
10843 type);
10846 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10847 a MINUS_EXPR of a constant, we can convert it into a comparison with
10848 a revised constant as long as no overflow occurs. */
10849 if (TREE_CODE (arg1) == INTEGER_CST
10850 && (TREE_CODE (arg0) == PLUS_EXPR
10851 || TREE_CODE (arg0) == MINUS_EXPR)
10852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10853 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10854 ? MINUS_EXPR : PLUS_EXPR,
10855 fold_convert (TREE_TYPE (arg0), arg1),
10856 TREE_OPERAND (arg0, 1), 0))
10857 && !TREE_OVERFLOW (tem))
10858 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10860 /* Similarly for a NEGATE_EXPR. */
10861 if (TREE_CODE (arg0) == NEGATE_EXPR
10862 && TREE_CODE (arg1) == INTEGER_CST
10863 && 0 != (tem = negate_expr (arg1))
10864 && TREE_CODE (tem) == INTEGER_CST
10865 && !TREE_OVERFLOW (tem))
10866 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10868 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10869 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10870 && TREE_CODE (arg1) == INTEGER_CST
10871 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10872 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10873 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10874 fold_convert (TREE_TYPE (arg0), arg1),
10875 TREE_OPERAND (arg0, 1)));
10877 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10878 for !=. Don't do this for ordered comparisons due to overflow. */
10879 if (TREE_CODE (arg0) == MINUS_EXPR
10880 && integer_zerop (arg1))
10881 return fold_build2 (code, type,
10882 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10884 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10885 if (TREE_CODE (arg0) == ABS_EXPR
10886 && (integer_zerop (arg1) || real_zerop (arg1)))
10887 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10889 /* If this is an EQ or NE comparison with zero and ARG0 is
10890 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10891 two operations, but the latter can be done in one less insn
10892 on machines that have only two-operand insns or on which a
10893 constant cannot be the first operand. */
10894 if (TREE_CODE (arg0) == BIT_AND_EXPR
10895 && integer_zerop (arg1))
10897 tree arg00 = TREE_OPERAND (arg0, 0);
10898 tree arg01 = TREE_OPERAND (arg0, 1);
10899 if (TREE_CODE (arg00) == LSHIFT_EXPR
10900 && integer_onep (TREE_OPERAND (arg00, 0)))
10901 return
10902 fold_build2 (code, type,
10903 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10904 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10905 arg01, TREE_OPERAND (arg00, 1)),
10906 fold_convert (TREE_TYPE (arg0),
10907 integer_one_node)),
10908 arg1);
10909 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10910 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10911 return
10912 fold_build2 (code, type,
10913 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10914 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10915 arg00, TREE_OPERAND (arg01, 1)),
10916 fold_convert (TREE_TYPE (arg0),
10917 integer_one_node)),
10918 arg1);
10921 /* If this is an NE or EQ comparison of zero against the result of a
10922 signed MOD operation whose second operand is a power of 2, make
10923 the MOD operation unsigned since it is simpler and equivalent. */
10924 if (integer_zerop (arg1)
10925 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10926 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10927 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10928 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10929 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10930 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10932 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10933 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10934 fold_convert (newtype,
10935 TREE_OPERAND (arg0, 0)),
10936 fold_convert (newtype,
10937 TREE_OPERAND (arg0, 1)));
10939 return fold_build2 (code, type, newmod,
10940 fold_convert (newtype, arg1));
10943 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10944 C1 is a valid shift constant, and C2 is a power of two, i.e.
10945 a single bit. */
10946 if (TREE_CODE (arg0) == BIT_AND_EXPR
10947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10948 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10949 == INTEGER_CST
10950 && integer_pow2p (TREE_OPERAND (arg0, 1))
10951 && integer_zerop (arg1))
10953 tree itype = TREE_TYPE (arg0);
10954 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10955 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10957 /* Check for a valid shift count. */
10958 if (TREE_INT_CST_HIGH (arg001) == 0
10959 && TREE_INT_CST_LOW (arg001) < prec)
10961 tree arg01 = TREE_OPERAND (arg0, 1);
10962 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10963 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10964 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10965 can be rewritten as (X & (C2 << C1)) != 0. */
10966 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10968 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10969 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10970 return fold_build2 (code, type, tem, arg1);
10972 /* Otherwise, for signed (arithmetic) shifts,
10973 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10974 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10975 else if (!TYPE_UNSIGNED (itype))
10976 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10977 arg000, build_int_cst (itype, 0));
10978 /* Otherwise, of unsigned (logical) shifts,
10979 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10980 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10981 else
10982 return omit_one_operand (type,
10983 code == EQ_EXPR ? integer_one_node
10984 : integer_zero_node,
10985 arg000);
10989 /* If this is an NE comparison of zero with an AND of one, remove the
10990 comparison since the AND will give the correct value. */
10991 if (code == NE_EXPR
10992 && integer_zerop (arg1)
10993 && TREE_CODE (arg0) == BIT_AND_EXPR
10994 && integer_onep (TREE_OPERAND (arg0, 1)))
10995 return fold_convert (type, arg0);
10997 /* If we have (A & C) == C where C is a power of 2, convert this into
10998 (A & C) != 0. Similarly for NE_EXPR. */
10999 if (TREE_CODE (arg0) == BIT_AND_EXPR
11000 && integer_pow2p (TREE_OPERAND (arg0, 1))
11001 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11002 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11003 arg0, fold_convert (TREE_TYPE (arg0),
11004 integer_zero_node));
11006 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11007 bit, then fold the expression into A < 0 or A >= 0. */
11008 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11009 if (tem)
11010 return tem;
11012 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11013 Similarly for NE_EXPR. */
11014 if (TREE_CODE (arg0) == BIT_AND_EXPR
11015 && TREE_CODE (arg1) == INTEGER_CST
11016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11018 tree notc = fold_build1 (BIT_NOT_EXPR,
11019 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11020 TREE_OPERAND (arg0, 1));
11021 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11022 arg1, notc);
11023 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11024 if (integer_nonzerop (dandnotc))
11025 return omit_one_operand (type, rslt, arg0);
11028 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11029 Similarly for NE_EXPR. */
11030 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11031 && TREE_CODE (arg1) == INTEGER_CST
11032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11034 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11035 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11036 TREE_OPERAND (arg0, 1), notd);
11037 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11038 if (integer_nonzerop (candnotd))
11039 return omit_one_operand (type, rslt, arg0);
11042 /* If this is a comparison of a field, we may be able to simplify it. */
11043 if ((TREE_CODE (arg0) == COMPONENT_REF
11044 || TREE_CODE (arg0) == BIT_FIELD_REF)
11045 /* Handle the constant case even without -O
11046 to make sure the warnings are given. */
11047 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11049 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11050 if (t1)
11051 return t1;
11054 /* Optimize comparisons of strlen vs zero to a compare of the
11055 first character of the string vs zero. To wit,
11056 strlen(ptr) == 0 => *ptr == 0
11057 strlen(ptr) != 0 => *ptr != 0
11058 Other cases should reduce to one of these two (or a constant)
11059 due to the return value of strlen being unsigned. */
11060 if (TREE_CODE (arg0) == CALL_EXPR
11061 && integer_zerop (arg1))
11063 tree fndecl = get_callee_fndecl (arg0);
11064 tree arglist;
11066 if (fndecl
11067 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11068 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11069 && (arglist = TREE_OPERAND (arg0, 1))
11070 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
11071 && ! TREE_CHAIN (arglist))
11073 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
11074 return fold_build2 (code, type, iref,
11075 build_int_cst (TREE_TYPE (iref), 0));
11079 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11080 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11081 if (TREE_CODE (arg0) == RSHIFT_EXPR
11082 && integer_zerop (arg1)
11083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11085 tree arg00 = TREE_OPERAND (arg0, 0);
11086 tree arg01 = TREE_OPERAND (arg0, 1);
11087 tree itype = TREE_TYPE (arg00);
11088 if (TREE_INT_CST_HIGH (arg01) == 0
11089 && TREE_INT_CST_LOW (arg01)
11090 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11092 if (TYPE_UNSIGNED (itype))
11094 itype = lang_hooks.types.signed_type (itype);
11095 arg00 = fold_convert (itype, arg00);
11097 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11098 type, arg00, build_int_cst (itype, 0));
11102 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11103 if (integer_zerop (arg1)
11104 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11105 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11106 TREE_OPERAND (arg0, 1));
11108 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11109 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11110 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11111 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11112 build_int_cst (TREE_TYPE (arg1), 0));
11113 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11114 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11116 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11117 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11118 build_int_cst (TREE_TYPE (arg1), 0));
11120 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11121 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11122 && TREE_CODE (arg1) == INTEGER_CST
11123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11124 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11125 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11126 TREE_OPERAND (arg0, 1), arg1));
11128 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11129 (X & C) == 0 when C is a single bit. */
11130 if (TREE_CODE (arg0) == BIT_AND_EXPR
11131 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11132 && integer_zerop (arg1)
11133 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11135 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11136 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11137 TREE_OPERAND (arg0, 1));
11138 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11139 type, tem, arg1);
11142 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11143 constant C is a power of two, i.e. a single bit. */
11144 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11145 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11146 && integer_zerop (arg1)
11147 && integer_pow2p (TREE_OPERAND (arg0, 1))
11148 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11149 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11151 tree arg00 = TREE_OPERAND (arg0, 0);
11152 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11153 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11156 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11157 when is C is a power of two, i.e. a single bit. */
11158 if (TREE_CODE (arg0) == BIT_AND_EXPR
11159 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11160 && integer_zerop (arg1)
11161 && integer_pow2p (TREE_OPERAND (arg0, 1))
11162 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11163 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11165 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11166 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11167 arg000, TREE_OPERAND (arg0, 1));
11168 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11169 tem, build_int_cst (TREE_TYPE (tem), 0));
11172 if (integer_zerop (arg1)
11173 && tree_expr_nonzero_p (arg0))
11175 tree res = constant_boolean_node (code==NE_EXPR, type);
11176 return omit_one_operand (type, res, arg0);
11179 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11180 if (TREE_CODE (arg0) == NEGATE_EXPR
11181 && TREE_CODE (arg1) == NEGATE_EXPR)
11182 return fold_build2 (code, type,
11183 TREE_OPERAND (arg0, 0),
11184 TREE_OPERAND (arg1, 0));
11186 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11187 if (TREE_CODE (arg0) == BIT_AND_EXPR
11188 && TREE_CODE (arg1) == BIT_AND_EXPR)
11190 tree arg00 = TREE_OPERAND (arg0, 0);
11191 tree arg01 = TREE_OPERAND (arg0, 1);
11192 tree arg10 = TREE_OPERAND (arg1, 0);
11193 tree arg11 = TREE_OPERAND (arg1, 1);
11194 tree itype = TREE_TYPE (arg0);
11196 if (operand_equal_p (arg01, arg11, 0))
11197 return fold_build2 (code, type,
11198 fold_build2 (BIT_AND_EXPR, itype,
11199 fold_build2 (BIT_XOR_EXPR, itype,
11200 arg00, arg10),
11201 arg01),
11202 build_int_cst (itype, 0));
11204 if (operand_equal_p (arg01, arg10, 0))
11205 return fold_build2 (code, type,
11206 fold_build2 (BIT_AND_EXPR, itype,
11207 fold_build2 (BIT_XOR_EXPR, itype,
11208 arg00, arg11),
11209 arg01),
11210 build_int_cst (itype, 0));
11212 if (operand_equal_p (arg00, arg11, 0))
11213 return fold_build2 (code, type,
11214 fold_build2 (BIT_AND_EXPR, itype,
11215 fold_build2 (BIT_XOR_EXPR, itype,
11216 arg01, arg10),
11217 arg00),
11218 build_int_cst (itype, 0));
11220 if (operand_equal_p (arg00, arg10, 0))
11221 return fold_build2 (code, type,
11222 fold_build2 (BIT_AND_EXPR, itype,
11223 fold_build2 (BIT_XOR_EXPR, itype,
11224 arg01, arg11),
11225 arg00),
11226 build_int_cst (itype, 0));
11229 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11230 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11232 tree arg00 = TREE_OPERAND (arg0, 0);
11233 tree arg01 = TREE_OPERAND (arg0, 1);
11234 tree arg10 = TREE_OPERAND (arg1, 0);
11235 tree arg11 = TREE_OPERAND (arg1, 1);
11236 tree itype = TREE_TYPE (arg0);
11238 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11239 operand_equal_p guarantees no side-effects so we don't need
11240 to use omit_one_operand on Z. */
11241 if (operand_equal_p (arg01, arg11, 0))
11242 return fold_build2 (code, type, arg00, arg10);
11243 if (operand_equal_p (arg01, arg10, 0))
11244 return fold_build2 (code, type, arg00, arg11);
11245 if (operand_equal_p (arg00, arg11, 0))
11246 return fold_build2 (code, type, arg01, arg10);
11247 if (operand_equal_p (arg00, arg10, 0))
11248 return fold_build2 (code, type, arg01, arg11);
11250 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11251 if (TREE_CODE (arg01) == INTEGER_CST
11252 && TREE_CODE (arg11) == INTEGER_CST)
11253 return fold_build2 (code, type,
11254 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11255 fold_build2 (BIT_XOR_EXPR, itype,
11256 arg01, arg11)),
11257 arg10);
11259 return NULL_TREE;
11261 case LT_EXPR:
11262 case GT_EXPR:
11263 case LE_EXPR:
11264 case GE_EXPR:
11265 tem = fold_comparison (code, type, op0, op1);
11266 if (tem != NULL_TREE)
11267 return tem;
11269 /* Transform comparisons of the form X +- C CMP X. */
11270 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11271 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11272 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11273 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11274 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11275 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11277 tree arg01 = TREE_OPERAND (arg0, 1);
11278 enum tree_code code0 = TREE_CODE (arg0);
11279 int is_positive;
11281 if (TREE_CODE (arg01) == REAL_CST)
11282 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11283 else
11284 is_positive = tree_int_cst_sgn (arg01);
11286 /* (X - c) > X becomes false. */
11287 if (code == GT_EXPR
11288 && ((code0 == MINUS_EXPR && is_positive >= 0)
11289 || (code0 == PLUS_EXPR && is_positive <= 0)))
11290 return constant_boolean_node (0, type);
11292 /* Likewise (X + c) < X becomes false. */
11293 if (code == LT_EXPR
11294 && ((code0 == PLUS_EXPR && is_positive >= 0)
11295 || (code0 == MINUS_EXPR && is_positive <= 0)))
11296 return constant_boolean_node (0, type);
11298 /* Convert (X - c) <= X to true. */
11299 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11300 && code == LE_EXPR
11301 && ((code0 == MINUS_EXPR && is_positive >= 0)
11302 || (code0 == PLUS_EXPR && is_positive <= 0)))
11303 return constant_boolean_node (1, type);
11305 /* Convert (X + c) >= X to true. */
11306 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11307 && code == GE_EXPR
11308 && ((code0 == PLUS_EXPR && is_positive >= 0)
11309 || (code0 == MINUS_EXPR && is_positive <= 0)))
11310 return constant_boolean_node (1, type);
11312 if (TREE_CODE (arg01) == INTEGER_CST)
11314 /* Convert X + c > X and X - c < X to true for integers. */
11315 if (code == GT_EXPR
11316 && ((code0 == PLUS_EXPR && is_positive > 0)
11317 || (code0 == MINUS_EXPR && is_positive < 0)))
11318 return constant_boolean_node (1, type);
11320 if (code == LT_EXPR
11321 && ((code0 == MINUS_EXPR && is_positive > 0)
11322 || (code0 == PLUS_EXPR && is_positive < 0)))
11323 return constant_boolean_node (1, type);
11325 /* Convert X + c <= X and X - c >= X to false for integers. */
11326 if (code == LE_EXPR
11327 && ((code0 == PLUS_EXPR && is_positive > 0)
11328 || (code0 == MINUS_EXPR && is_positive < 0)))
11329 return constant_boolean_node (0, type);
11331 if (code == GE_EXPR
11332 && ((code0 == MINUS_EXPR && is_positive > 0)
11333 || (code0 == PLUS_EXPR && is_positive < 0)))
11334 return constant_boolean_node (0, type);
11338 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11339 This transformation affects the cases which are handled in later
11340 optimizations involving comparisons with non-negative constants. */
11341 if (TREE_CODE (arg1) == INTEGER_CST
11342 && TREE_CODE (arg0) != INTEGER_CST
11343 && tree_int_cst_sgn (arg1) > 0)
11345 if (code == GE_EXPR)
11347 arg1 = const_binop (MINUS_EXPR, arg1,
11348 build_int_cst (TREE_TYPE (arg1), 1), 0);
11349 return fold_build2 (GT_EXPR, type, arg0,
11350 fold_convert (TREE_TYPE (arg0), arg1));
11352 if (code == LT_EXPR)
11354 arg1 = const_binop (MINUS_EXPR, arg1,
11355 build_int_cst (TREE_TYPE (arg1), 1), 0);
11356 return fold_build2 (LE_EXPR, type, arg0,
11357 fold_convert (TREE_TYPE (arg0), arg1));
11361 /* Comparisons with the highest or lowest possible integer of
11362 the specified precision will have known values. */
11364 tree arg1_type = TREE_TYPE (arg1);
11365 unsigned int width = TYPE_PRECISION (arg1_type);
11367 if (TREE_CODE (arg1) == INTEGER_CST
11368 && !TREE_OVERFLOW (arg1)
11369 && width <= 2 * HOST_BITS_PER_WIDE_INT
11370 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11372 HOST_WIDE_INT signed_max_hi;
11373 unsigned HOST_WIDE_INT signed_max_lo;
11374 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11376 if (width <= HOST_BITS_PER_WIDE_INT)
11378 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11379 - 1;
11380 signed_max_hi = 0;
11381 max_hi = 0;
11383 if (TYPE_UNSIGNED (arg1_type))
11385 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11386 min_lo = 0;
11387 min_hi = 0;
11389 else
11391 max_lo = signed_max_lo;
11392 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11393 min_hi = -1;
11396 else
11398 width -= HOST_BITS_PER_WIDE_INT;
11399 signed_max_lo = -1;
11400 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11401 - 1;
11402 max_lo = -1;
11403 min_lo = 0;
11405 if (TYPE_UNSIGNED (arg1_type))
11407 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11408 min_hi = 0;
11410 else
11412 max_hi = signed_max_hi;
11413 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11417 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11418 && TREE_INT_CST_LOW (arg1) == max_lo)
11419 switch (code)
11421 case GT_EXPR:
11422 return omit_one_operand (type, integer_zero_node, arg0);
11424 case GE_EXPR:
11425 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11427 case LE_EXPR:
11428 return omit_one_operand (type, integer_one_node, arg0);
11430 case LT_EXPR:
11431 return fold_build2 (NE_EXPR, type, arg0, arg1);
11433 /* The GE_EXPR and LT_EXPR cases above are not normally
11434 reached because of previous transformations. */
11436 default:
11437 break;
11439 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11440 == max_hi
11441 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11442 switch (code)
11444 case GT_EXPR:
11445 arg1 = const_binop (PLUS_EXPR, arg1,
11446 build_int_cst (TREE_TYPE (arg1), 1), 0);
11447 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11448 case LE_EXPR:
11449 arg1 = const_binop (PLUS_EXPR, arg1,
11450 build_int_cst (TREE_TYPE (arg1), 1), 0);
11451 return fold_build2 (NE_EXPR, type, arg0, arg1);
11452 default:
11453 break;
11455 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11456 == min_hi
11457 && TREE_INT_CST_LOW (arg1) == min_lo)
11458 switch (code)
11460 case LT_EXPR:
11461 return omit_one_operand (type, integer_zero_node, arg0);
11463 case LE_EXPR:
11464 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11466 case GE_EXPR:
11467 return omit_one_operand (type, integer_one_node, arg0);
11469 case GT_EXPR:
11470 return fold_build2 (NE_EXPR, type, op0, op1);
11472 default:
11473 break;
11475 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11476 == min_hi
11477 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11478 switch (code)
11480 case GE_EXPR:
11481 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11482 return fold_build2 (NE_EXPR, type, arg0, arg1);
11483 case LT_EXPR:
11484 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11485 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11486 default:
11487 break;
11490 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11491 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11492 && TYPE_UNSIGNED (arg1_type)
11493 /* We will flip the signedness of the comparison operator
11494 associated with the mode of arg1, so the sign bit is
11495 specified by this mode. Check that arg1 is the signed
11496 max associated with this sign bit. */
11497 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11498 /* signed_type does not work on pointer types. */
11499 && INTEGRAL_TYPE_P (arg1_type))
11501 /* The following case also applies to X < signed_max+1
11502 and X >= signed_max+1 because previous transformations. */
11503 if (code == LE_EXPR || code == GT_EXPR)
11505 tree st0, st1;
11506 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11507 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11508 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11509 type, fold_convert (st0, arg0),
11510 build_int_cst (st1, 0));
11516 /* If we are comparing an ABS_EXPR with a constant, we can
11517 convert all the cases into explicit comparisons, but they may
11518 well not be faster than doing the ABS and one comparison.
11519 But ABS (X) <= C is a range comparison, which becomes a subtraction
11520 and a comparison, and is probably faster. */
11521 if (code == LE_EXPR
11522 && TREE_CODE (arg1) == INTEGER_CST
11523 && TREE_CODE (arg0) == ABS_EXPR
11524 && ! TREE_SIDE_EFFECTS (arg0)
11525 && (0 != (tem = negate_expr (arg1)))
11526 && TREE_CODE (tem) == INTEGER_CST
11527 && !TREE_OVERFLOW (tem))
11528 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11529 build2 (GE_EXPR, type,
11530 TREE_OPERAND (arg0, 0), tem),
11531 build2 (LE_EXPR, type,
11532 TREE_OPERAND (arg0, 0), arg1));
11534 /* Convert ABS_EXPR<x> >= 0 to true. */
11535 if (code == GE_EXPR
11536 && tree_expr_nonnegative_p (arg0)
11537 && (integer_zerop (arg1)
11538 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11539 && real_zerop (arg1))))
11540 return omit_one_operand (type, integer_one_node, arg0);
11542 /* Convert ABS_EXPR<x> < 0 to false. */
11543 if (code == LT_EXPR
11544 && tree_expr_nonnegative_p (arg0)
11545 && (integer_zerop (arg1) || real_zerop (arg1)))
11546 return omit_one_operand (type, integer_zero_node, arg0);
11548 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11549 and similarly for >= into !=. */
11550 if ((code == LT_EXPR || code == GE_EXPR)
11551 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11552 && TREE_CODE (arg1) == LSHIFT_EXPR
11553 && integer_onep (TREE_OPERAND (arg1, 0)))
11554 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11555 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11556 TREE_OPERAND (arg1, 1)),
11557 build_int_cst (TREE_TYPE (arg0), 0));
11559 if ((code == LT_EXPR || code == GE_EXPR)
11560 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11561 && (TREE_CODE (arg1) == NOP_EXPR
11562 || TREE_CODE (arg1) == CONVERT_EXPR)
11563 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11564 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11565 return
11566 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11567 fold_convert (TREE_TYPE (arg0),
11568 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11569 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11570 1))),
11571 build_int_cst (TREE_TYPE (arg0), 0));
11573 return NULL_TREE;
11575 case UNORDERED_EXPR:
11576 case ORDERED_EXPR:
11577 case UNLT_EXPR:
11578 case UNLE_EXPR:
11579 case UNGT_EXPR:
11580 case UNGE_EXPR:
11581 case UNEQ_EXPR:
11582 case LTGT_EXPR:
11583 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11585 t1 = fold_relational_const (code, type, arg0, arg1);
11586 if (t1 != NULL_TREE)
11587 return t1;
11590 /* If the first operand is NaN, the result is constant. */
11591 if (TREE_CODE (arg0) == REAL_CST
11592 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11593 && (code != LTGT_EXPR || ! flag_trapping_math))
11595 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11596 ? integer_zero_node
11597 : integer_one_node;
11598 return omit_one_operand (type, t1, arg1);
11601 /* If the second operand is NaN, the result is constant. */
11602 if (TREE_CODE (arg1) == REAL_CST
11603 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11604 && (code != LTGT_EXPR || ! flag_trapping_math))
11606 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11607 ? integer_zero_node
11608 : integer_one_node;
11609 return omit_one_operand (type, t1, arg0);
11612 /* Simplify unordered comparison of something with itself. */
11613 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11614 && operand_equal_p (arg0, arg1, 0))
11615 return constant_boolean_node (1, type);
11617 if (code == LTGT_EXPR
11618 && !flag_trapping_math
11619 && operand_equal_p (arg0, arg1, 0))
11620 return constant_boolean_node (0, type);
11622 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11624 tree targ0 = strip_float_extensions (arg0);
11625 tree targ1 = strip_float_extensions (arg1);
11626 tree newtype = TREE_TYPE (targ0);
11628 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11629 newtype = TREE_TYPE (targ1);
11631 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11632 return fold_build2 (code, type, fold_convert (newtype, targ0),
11633 fold_convert (newtype, targ1));
11636 return NULL_TREE;
11638 case COMPOUND_EXPR:
11639 /* When pedantic, a compound expression can be neither an lvalue
11640 nor an integer constant expression. */
11641 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11642 return NULL_TREE;
11643 /* Don't let (0, 0) be null pointer constant. */
11644 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11645 : fold_convert (type, arg1);
11646 return pedantic_non_lvalue (tem);
11648 case COMPLEX_EXPR:
11649 if ((TREE_CODE (arg0) == REAL_CST
11650 && TREE_CODE (arg1) == REAL_CST)
11651 || (TREE_CODE (arg0) == INTEGER_CST
11652 && TREE_CODE (arg1) == INTEGER_CST))
11653 return build_complex (type, arg0, arg1);
11654 return NULL_TREE;
11656 case ASSERT_EXPR:
11657 /* An ASSERT_EXPR should never be passed to fold_binary. */
11658 gcc_unreachable ();
11660 default:
11661 return NULL_TREE;
11662 } /* switch (code) */
11665 /* Callback for walk_tree, looking for LABEL_EXPR.
11666 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11667 Do not check the sub-tree of GOTO_EXPR. */
11669 static tree
11670 contains_label_1 (tree *tp,
11671 int *walk_subtrees,
11672 void *data ATTRIBUTE_UNUSED)
11674 switch (TREE_CODE (*tp))
11676 case LABEL_EXPR:
11677 return *tp;
11678 case GOTO_EXPR:
11679 *walk_subtrees = 0;
11680 /* no break */
11681 default:
11682 return NULL_TREE;
11686 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11687 accessible from outside the sub-tree. Returns NULL_TREE if no
11688 addressable label is found. */
11690 static bool
11691 contains_label_p (tree st)
11693 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11696 /* Fold a ternary expression of code CODE and type TYPE with operands
11697 OP0, OP1, and OP2. Return the folded expression if folding is
11698 successful. Otherwise, return NULL_TREE. */
11700 tree
11701 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11703 tree tem;
11704 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11705 enum tree_code_class kind = TREE_CODE_CLASS (code);
11707 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11708 && TREE_CODE_LENGTH (code) == 3);
11710 /* Strip any conversions that don't change the mode. This is safe
11711 for every expression, except for a comparison expression because
11712 its signedness is derived from its operands. So, in the latter
11713 case, only strip conversions that don't change the signedness.
11715 Note that this is done as an internal manipulation within the
11716 constant folder, in order to find the simplest representation of
11717 the arguments so that their form can be studied. In any cases,
11718 the appropriate type conversions should be put back in the tree
11719 that will get out of the constant folder. */
11720 if (op0)
11722 arg0 = op0;
11723 STRIP_NOPS (arg0);
11726 if (op1)
11728 arg1 = op1;
11729 STRIP_NOPS (arg1);
11732 switch (code)
11734 case COMPONENT_REF:
11735 if (TREE_CODE (arg0) == CONSTRUCTOR
11736 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11738 unsigned HOST_WIDE_INT idx;
11739 tree field, value;
11740 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11741 if (field == arg1)
11742 return value;
11744 return NULL_TREE;
11746 case COND_EXPR:
11747 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11748 so all simple results must be passed through pedantic_non_lvalue. */
11749 if (TREE_CODE (arg0) == INTEGER_CST)
11751 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11752 tem = integer_zerop (arg0) ? op2 : op1;
11753 /* Only optimize constant conditions when the selected branch
11754 has the same type as the COND_EXPR. This avoids optimizing
11755 away "c ? x : throw", where the throw has a void type.
11756 Avoid throwing away that operand which contains label. */
11757 if ((!TREE_SIDE_EFFECTS (unused_op)
11758 || !contains_label_p (unused_op))
11759 && (! VOID_TYPE_P (TREE_TYPE (tem))
11760 || VOID_TYPE_P (type)))
11761 return pedantic_non_lvalue (tem);
11762 return NULL_TREE;
11764 if (operand_equal_p (arg1, op2, 0))
11765 return pedantic_omit_one_operand (type, arg1, arg0);
11767 /* If we have A op B ? A : C, we may be able to convert this to a
11768 simpler expression, depending on the operation and the values
11769 of B and C. Signed zeros prevent all of these transformations,
11770 for reasons given above each one.
11772 Also try swapping the arguments and inverting the conditional. */
11773 if (COMPARISON_CLASS_P (arg0)
11774 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11775 arg1, TREE_OPERAND (arg0, 1))
11776 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11778 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11779 if (tem)
11780 return tem;
11783 if (COMPARISON_CLASS_P (arg0)
11784 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11785 op2,
11786 TREE_OPERAND (arg0, 1))
11787 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11789 tem = fold_truth_not_expr (arg0);
11790 if (tem && COMPARISON_CLASS_P (tem))
11792 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11793 if (tem)
11794 return tem;
11798 /* If the second operand is simpler than the third, swap them
11799 since that produces better jump optimization results. */
11800 if (truth_value_p (TREE_CODE (arg0))
11801 && tree_swap_operands_p (op1, op2, false))
11803 /* See if this can be inverted. If it can't, possibly because
11804 it was a floating-point inequality comparison, don't do
11805 anything. */
11806 tem = fold_truth_not_expr (arg0);
11807 if (tem)
11808 return fold_build3 (code, type, tem, op2, op1);
11811 /* Convert A ? 1 : 0 to simply A. */
11812 if (integer_onep (op1)
11813 && integer_zerop (op2)
11814 /* If we try to convert OP0 to our type, the
11815 call to fold will try to move the conversion inside
11816 a COND, which will recurse. In that case, the COND_EXPR
11817 is probably the best choice, so leave it alone. */
11818 && type == TREE_TYPE (arg0))
11819 return pedantic_non_lvalue (arg0);
11821 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11822 over COND_EXPR in cases such as floating point comparisons. */
11823 if (integer_zerop (op1)
11824 && integer_onep (op2)
11825 && truth_value_p (TREE_CODE (arg0)))
11826 return pedantic_non_lvalue (fold_convert (type,
11827 invert_truthvalue (arg0)));
11829 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11830 if (TREE_CODE (arg0) == LT_EXPR
11831 && integer_zerop (TREE_OPERAND (arg0, 1))
11832 && integer_zerop (op2)
11833 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11835 /* sign_bit_p only checks ARG1 bits within A's precision.
11836 If <sign bit of A> has wider type than A, bits outside
11837 of A's precision in <sign bit of A> need to be checked.
11838 If they are all 0, this optimization needs to be done
11839 in unsigned A's type, if they are all 1 in signed A's type,
11840 otherwise this can't be done. */
11841 if (TYPE_PRECISION (TREE_TYPE (tem))
11842 < TYPE_PRECISION (TREE_TYPE (arg1))
11843 && TYPE_PRECISION (TREE_TYPE (tem))
11844 < TYPE_PRECISION (type))
11846 unsigned HOST_WIDE_INT mask_lo;
11847 HOST_WIDE_INT mask_hi;
11848 int inner_width, outer_width;
11849 tree tem_type;
11851 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11852 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11853 if (outer_width > TYPE_PRECISION (type))
11854 outer_width = TYPE_PRECISION (type);
11856 if (outer_width > HOST_BITS_PER_WIDE_INT)
11858 mask_hi = ((unsigned HOST_WIDE_INT) -1
11859 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11860 mask_lo = -1;
11862 else
11864 mask_hi = 0;
11865 mask_lo = ((unsigned HOST_WIDE_INT) -1
11866 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11868 if (inner_width > HOST_BITS_PER_WIDE_INT)
11870 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11871 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11872 mask_lo = 0;
11874 else
11875 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11876 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11878 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11879 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11881 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11882 tem = fold_convert (tem_type, tem);
11884 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11885 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11887 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11888 tem = fold_convert (tem_type, tem);
11890 else
11891 tem = NULL;
11894 if (tem)
11895 return fold_convert (type,
11896 fold_build2 (BIT_AND_EXPR,
11897 TREE_TYPE (tem), tem,
11898 fold_convert (TREE_TYPE (tem),
11899 arg1)));
11902 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11903 already handled above. */
11904 if (TREE_CODE (arg0) == BIT_AND_EXPR
11905 && integer_onep (TREE_OPERAND (arg0, 1))
11906 && integer_zerop (op2)
11907 && integer_pow2p (arg1))
11909 tree tem = TREE_OPERAND (arg0, 0);
11910 STRIP_NOPS (tem);
11911 if (TREE_CODE (tem) == RSHIFT_EXPR
11912 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11913 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11914 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11915 return fold_build2 (BIT_AND_EXPR, type,
11916 TREE_OPERAND (tem, 0), arg1);
11919 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11920 is probably obsolete because the first operand should be a
11921 truth value (that's why we have the two cases above), but let's
11922 leave it in until we can confirm this for all front-ends. */
11923 if (integer_zerop (op2)
11924 && TREE_CODE (arg0) == NE_EXPR
11925 && integer_zerop (TREE_OPERAND (arg0, 1))
11926 && integer_pow2p (arg1)
11927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11928 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11929 arg1, OEP_ONLY_CONST))
11930 return pedantic_non_lvalue (fold_convert (type,
11931 TREE_OPERAND (arg0, 0)));
11933 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11934 if (integer_zerop (op2)
11935 && truth_value_p (TREE_CODE (arg0))
11936 && truth_value_p (TREE_CODE (arg1)))
11937 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11938 fold_convert (type, arg0),
11939 arg1);
11941 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11942 if (integer_onep (op2)
11943 && truth_value_p (TREE_CODE (arg0))
11944 && truth_value_p (TREE_CODE (arg1)))
11946 /* Only perform transformation if ARG0 is easily inverted. */
11947 tem = fold_truth_not_expr (arg0);
11948 if (tem)
11949 return fold_build2 (TRUTH_ORIF_EXPR, type,
11950 fold_convert (type, tem),
11951 arg1);
11954 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11955 if (integer_zerop (arg1)
11956 && truth_value_p (TREE_CODE (arg0))
11957 && truth_value_p (TREE_CODE (op2)))
11959 /* Only perform transformation if ARG0 is easily inverted. */
11960 tem = fold_truth_not_expr (arg0);
11961 if (tem)
11962 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11963 fold_convert (type, tem),
11964 op2);
11967 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11968 if (integer_onep (arg1)
11969 && truth_value_p (TREE_CODE (arg0))
11970 && truth_value_p (TREE_CODE (op2)))
11971 return fold_build2 (TRUTH_ORIF_EXPR, type,
11972 fold_convert (type, arg0),
11973 op2);
11975 return NULL_TREE;
11977 case CALL_EXPR:
11978 /* Check for a built-in function. */
11979 if (TREE_CODE (op0) == ADDR_EXPR
11980 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11981 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11982 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11983 return NULL_TREE;
11985 case BIT_FIELD_REF:
11986 if (TREE_CODE (arg0) == VECTOR_CST
11987 && type == TREE_TYPE (TREE_TYPE (arg0))
11988 && host_integerp (arg1, 1)
11989 && host_integerp (op2, 1))
11991 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11992 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11994 if (width != 0
11995 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11996 && (idx % width) == 0
11997 && (idx = idx / width)
11998 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12000 tree elements = TREE_VECTOR_CST_ELTS (arg0);
12001 while (idx-- > 0 && elements)
12002 elements = TREE_CHAIN (elements);
12003 if (elements)
12004 return TREE_VALUE (elements);
12005 else
12006 return fold_convert (type, integer_zero_node);
12009 return NULL_TREE;
12011 default:
12012 return NULL_TREE;
12013 } /* switch (code) */
12016 /* Perform constant folding and related simplification of EXPR.
12017 The related simplifications include x*1 => x, x*0 => 0, etc.,
12018 and application of the associative law.
12019 NOP_EXPR conversions may be removed freely (as long as we
12020 are careful not to change the type of the overall expression).
12021 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12022 but we can constant-fold them if they have constant operands. */
12024 #ifdef ENABLE_FOLD_CHECKING
12025 # define fold(x) fold_1 (x)
12026 static tree fold_1 (tree);
12027 static
12028 #endif
12029 tree
12030 fold (tree expr)
12032 const tree t = expr;
12033 enum tree_code code = TREE_CODE (t);
12034 enum tree_code_class kind = TREE_CODE_CLASS (code);
12035 tree tem;
12037 /* Return right away if a constant. */
12038 if (kind == tcc_constant)
12039 return t;
12041 if (IS_EXPR_CODE_CLASS (kind)
12042 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12044 tree type = TREE_TYPE (t);
12045 tree op0, op1, op2;
12047 switch (TREE_CODE_LENGTH (code))
12049 case 1:
12050 op0 = TREE_OPERAND (t, 0);
12051 tem = fold_unary (code, type, op0);
12052 return tem ? tem : expr;
12053 case 2:
12054 op0 = TREE_OPERAND (t, 0);
12055 op1 = TREE_OPERAND (t, 1);
12056 tem = fold_binary (code, type, op0, op1);
12057 return tem ? tem : expr;
12058 case 3:
12059 op0 = TREE_OPERAND (t, 0);
12060 op1 = TREE_OPERAND (t, 1);
12061 op2 = TREE_OPERAND (t, 2);
12062 tem = fold_ternary (code, type, op0, op1, op2);
12063 return tem ? tem : expr;
12064 default:
12065 break;
12069 switch (code)
12071 case CONST_DECL:
12072 return fold (DECL_INITIAL (t));
12074 default:
12075 return t;
12076 } /* switch (code) */
12079 #ifdef ENABLE_FOLD_CHECKING
12080 #undef fold
12082 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12083 static void fold_check_failed (tree, tree);
12084 void print_fold_checksum (tree);
12086 /* When --enable-checking=fold, compute a digest of expr before
12087 and after actual fold call to see if fold did not accidentally
12088 change original expr. */
12090 tree
12091 fold (tree expr)
12093 tree ret;
12094 struct md5_ctx ctx;
12095 unsigned char checksum_before[16], checksum_after[16];
12096 htab_t ht;
12098 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12099 md5_init_ctx (&ctx);
12100 fold_checksum_tree (expr, &ctx, ht);
12101 md5_finish_ctx (&ctx, checksum_before);
12102 htab_empty (ht);
12104 ret = fold_1 (expr);
12106 md5_init_ctx (&ctx);
12107 fold_checksum_tree (expr, &ctx, ht);
12108 md5_finish_ctx (&ctx, checksum_after);
12109 htab_delete (ht);
12111 if (memcmp (checksum_before, checksum_after, 16))
12112 fold_check_failed (expr, ret);
12114 return ret;
12117 void
12118 print_fold_checksum (tree expr)
12120 struct md5_ctx ctx;
12121 unsigned char checksum[16], cnt;
12122 htab_t ht;
12124 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12125 md5_init_ctx (&ctx);
12126 fold_checksum_tree (expr, &ctx, ht);
12127 md5_finish_ctx (&ctx, checksum);
12128 htab_delete (ht);
12129 for (cnt = 0; cnt < 16; ++cnt)
12130 fprintf (stderr, "%02x", checksum[cnt]);
12131 putc ('\n', stderr);
12134 static void
12135 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12137 internal_error ("fold check: original tree changed by fold");
12140 static void
12141 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12143 void **slot;
12144 enum tree_code code;
12145 struct tree_function_decl buf;
12146 int i, len;
12148 recursive_label:
12150 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12151 <= sizeof (struct tree_function_decl))
12152 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12153 if (expr == NULL)
12154 return;
12155 slot = htab_find_slot (ht, expr, INSERT);
12156 if (*slot != NULL)
12157 return;
12158 *slot = expr;
12159 code = TREE_CODE (expr);
12160 if (TREE_CODE_CLASS (code) == tcc_declaration
12161 && DECL_ASSEMBLER_NAME_SET_P (expr))
12163 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12164 memcpy ((char *) &buf, expr, tree_size (expr));
12165 expr = (tree) &buf;
12166 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12168 else if (TREE_CODE_CLASS (code) == tcc_type
12169 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12170 || TYPE_CACHED_VALUES_P (expr)
12171 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12173 /* Allow these fields to be modified. */
12174 memcpy ((char *) &buf, expr, tree_size (expr));
12175 expr = (tree) &buf;
12176 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12177 TYPE_POINTER_TO (expr) = NULL;
12178 TYPE_REFERENCE_TO (expr) = NULL;
12179 if (TYPE_CACHED_VALUES_P (expr))
12181 TYPE_CACHED_VALUES_P (expr) = 0;
12182 TYPE_CACHED_VALUES (expr) = NULL;
12185 md5_process_bytes (expr, tree_size (expr), ctx);
12186 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12187 if (TREE_CODE_CLASS (code) != tcc_type
12188 && TREE_CODE_CLASS (code) != tcc_declaration
12189 && code != TREE_LIST)
12190 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12191 switch (TREE_CODE_CLASS (code))
12193 case tcc_constant:
12194 switch (code)
12196 case STRING_CST:
12197 md5_process_bytes (TREE_STRING_POINTER (expr),
12198 TREE_STRING_LENGTH (expr), ctx);
12199 break;
12200 case COMPLEX_CST:
12201 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12202 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12203 break;
12204 case VECTOR_CST:
12205 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12206 break;
12207 default:
12208 break;
12210 break;
12211 case tcc_exceptional:
12212 switch (code)
12214 case TREE_LIST:
12215 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12216 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12217 expr = TREE_CHAIN (expr);
12218 goto recursive_label;
12219 break;
12220 case TREE_VEC:
12221 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12222 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12223 break;
12224 default:
12225 break;
12227 break;
12228 case tcc_expression:
12229 case tcc_reference:
12230 case tcc_comparison:
12231 case tcc_unary:
12232 case tcc_binary:
12233 case tcc_statement:
12234 len = TREE_CODE_LENGTH (code);
12235 for (i = 0; i < len; ++i)
12236 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12237 break;
12238 case tcc_declaration:
12239 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12240 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12241 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12243 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12244 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12245 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12246 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12247 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12249 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12250 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12252 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12254 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12255 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12256 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12258 break;
12259 case tcc_type:
12260 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12261 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12262 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12263 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12264 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12265 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12266 if (INTEGRAL_TYPE_P (expr)
12267 || SCALAR_FLOAT_TYPE_P (expr))
12269 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12270 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12272 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12273 if (TREE_CODE (expr) == RECORD_TYPE
12274 || TREE_CODE (expr) == UNION_TYPE
12275 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12276 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12277 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12278 break;
12279 default:
12280 break;
12284 #endif
12286 /* Fold a unary tree expression with code CODE of type TYPE with an
12287 operand OP0. Return a folded expression if successful. Otherwise,
12288 return a tree expression with code CODE of type TYPE with an
12289 operand OP0. */
12291 tree
12292 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12294 tree tem;
12295 #ifdef ENABLE_FOLD_CHECKING
12296 unsigned char checksum_before[16], checksum_after[16];
12297 struct md5_ctx ctx;
12298 htab_t ht;
12300 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12301 md5_init_ctx (&ctx);
12302 fold_checksum_tree (op0, &ctx, ht);
12303 md5_finish_ctx (&ctx, checksum_before);
12304 htab_empty (ht);
12305 #endif
12307 tem = fold_unary (code, type, op0);
12308 if (!tem)
12309 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12311 #ifdef ENABLE_FOLD_CHECKING
12312 md5_init_ctx (&ctx);
12313 fold_checksum_tree (op0, &ctx, ht);
12314 md5_finish_ctx (&ctx, checksum_after);
12315 htab_delete (ht);
12317 if (memcmp (checksum_before, checksum_after, 16))
12318 fold_check_failed (op0, tem);
12319 #endif
12320 return tem;
12323 /* Fold a binary tree expression with code CODE of type TYPE with
12324 operands OP0 and OP1. Return a folded expression if successful.
12325 Otherwise, return a tree expression with code CODE of type TYPE
12326 with operands OP0 and OP1. */
12328 tree
12329 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12330 MEM_STAT_DECL)
12332 tree tem;
12333 #ifdef ENABLE_FOLD_CHECKING
12334 unsigned char checksum_before_op0[16],
12335 checksum_before_op1[16],
12336 checksum_after_op0[16],
12337 checksum_after_op1[16];
12338 struct md5_ctx ctx;
12339 htab_t ht;
12341 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12342 md5_init_ctx (&ctx);
12343 fold_checksum_tree (op0, &ctx, ht);
12344 md5_finish_ctx (&ctx, checksum_before_op0);
12345 htab_empty (ht);
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (op1, &ctx, ht);
12349 md5_finish_ctx (&ctx, checksum_before_op1);
12350 htab_empty (ht);
12351 #endif
12353 tem = fold_binary (code, type, op0, op1);
12354 if (!tem)
12355 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12357 #ifdef ENABLE_FOLD_CHECKING
12358 md5_init_ctx (&ctx);
12359 fold_checksum_tree (op0, &ctx, ht);
12360 md5_finish_ctx (&ctx, checksum_after_op0);
12361 htab_empty (ht);
12363 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12364 fold_check_failed (op0, tem);
12366 md5_init_ctx (&ctx);
12367 fold_checksum_tree (op1, &ctx, ht);
12368 md5_finish_ctx (&ctx, checksum_after_op1);
12369 htab_delete (ht);
12371 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12372 fold_check_failed (op1, tem);
12373 #endif
12374 return tem;
12377 /* Fold a ternary tree expression with code CODE of type TYPE with
12378 operands OP0, OP1, and OP2. Return a folded expression if
12379 successful. Otherwise, return a tree expression with code CODE of
12380 type TYPE with operands OP0, OP1, and OP2. */
12382 tree
12383 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12384 MEM_STAT_DECL)
12386 tree tem;
12387 #ifdef ENABLE_FOLD_CHECKING
12388 unsigned char checksum_before_op0[16],
12389 checksum_before_op1[16],
12390 checksum_before_op2[16],
12391 checksum_after_op0[16],
12392 checksum_after_op1[16],
12393 checksum_after_op2[16];
12394 struct md5_ctx ctx;
12395 htab_t ht;
12397 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12398 md5_init_ctx (&ctx);
12399 fold_checksum_tree (op0, &ctx, ht);
12400 md5_finish_ctx (&ctx, checksum_before_op0);
12401 htab_empty (ht);
12403 md5_init_ctx (&ctx);
12404 fold_checksum_tree (op1, &ctx, ht);
12405 md5_finish_ctx (&ctx, checksum_before_op1);
12406 htab_empty (ht);
12408 md5_init_ctx (&ctx);
12409 fold_checksum_tree (op2, &ctx, ht);
12410 md5_finish_ctx (&ctx, checksum_before_op2);
12411 htab_empty (ht);
12412 #endif
12414 tem = fold_ternary (code, type, op0, op1, op2);
12415 if (!tem)
12416 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12418 #ifdef ENABLE_FOLD_CHECKING
12419 md5_init_ctx (&ctx);
12420 fold_checksum_tree (op0, &ctx, ht);
12421 md5_finish_ctx (&ctx, checksum_after_op0);
12422 htab_empty (ht);
12424 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12425 fold_check_failed (op0, tem);
12427 md5_init_ctx (&ctx);
12428 fold_checksum_tree (op1, &ctx, ht);
12429 md5_finish_ctx (&ctx, checksum_after_op1);
12430 htab_empty (ht);
12432 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12433 fold_check_failed (op1, tem);
12435 md5_init_ctx (&ctx);
12436 fold_checksum_tree (op2, &ctx, ht);
12437 md5_finish_ctx (&ctx, checksum_after_op2);
12438 htab_delete (ht);
12440 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12441 fold_check_failed (op2, tem);
12442 #endif
12443 return tem;
12446 /* Perform constant folding and related simplification of initializer
12447 expression EXPR. These behave identically to "fold_buildN" but ignore
12448 potential run-time traps and exceptions that fold must preserve. */
12450 #define START_FOLD_INIT \
12451 int saved_signaling_nans = flag_signaling_nans;\
12452 int saved_trapping_math = flag_trapping_math;\
12453 int saved_rounding_math = flag_rounding_math;\
12454 int saved_trapv = flag_trapv;\
12455 int saved_folding_initializer = folding_initializer;\
12456 flag_signaling_nans = 0;\
12457 flag_trapping_math = 0;\
12458 flag_rounding_math = 0;\
12459 flag_trapv = 0;\
12460 folding_initializer = 1;
12462 #define END_FOLD_INIT \
12463 flag_signaling_nans = saved_signaling_nans;\
12464 flag_trapping_math = saved_trapping_math;\
12465 flag_rounding_math = saved_rounding_math;\
12466 flag_trapv = saved_trapv;\
12467 folding_initializer = saved_folding_initializer;
12469 tree
12470 fold_build1_initializer (enum tree_code code, tree type, tree op)
12472 tree result;
12473 START_FOLD_INIT;
12475 result = fold_build1 (code, type, op);
12477 END_FOLD_INIT;
12478 return result;
12481 tree
12482 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12484 tree result;
12485 START_FOLD_INIT;
12487 result = fold_build2 (code, type, op0, op1);
12489 END_FOLD_INIT;
12490 return result;
12493 tree
12494 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12495 tree op2)
12497 tree result;
12498 START_FOLD_INIT;
12500 result = fold_build3 (code, type, op0, op1, op2);
12502 END_FOLD_INIT;
12503 return result;
12506 #undef START_FOLD_INIT
12507 #undef END_FOLD_INIT
12509 /* Determine if first argument is a multiple of second argument. Return 0 if
12510 it is not, or we cannot easily determined it to be.
12512 An example of the sort of thing we care about (at this point; this routine
12513 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12514 fold cases do now) is discovering that
12516 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12518 is a multiple of
12520 SAVE_EXPR (J * 8)
12522 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12524 This code also handles discovering that
12526 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12528 is a multiple of 8 so we don't have to worry about dealing with a
12529 possible remainder.
12531 Note that we *look* inside a SAVE_EXPR only to determine how it was
12532 calculated; it is not safe for fold to do much of anything else with the
12533 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12534 at run time. For example, the latter example above *cannot* be implemented
12535 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12536 evaluation time of the original SAVE_EXPR is not necessarily the same at
12537 the time the new expression is evaluated. The only optimization of this
12538 sort that would be valid is changing
12540 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12542 divided by 8 to
12544 SAVE_EXPR (I) * SAVE_EXPR (J)
12546 (where the same SAVE_EXPR (J) is used in the original and the
12547 transformed version). */
12549 static int
12550 multiple_of_p (tree type, tree top, tree bottom)
12552 if (operand_equal_p (top, bottom, 0))
12553 return 1;
12555 if (TREE_CODE (type) != INTEGER_TYPE)
12556 return 0;
12558 switch (TREE_CODE (top))
12560 case BIT_AND_EXPR:
12561 /* Bitwise and provides a power of two multiple. If the mask is
12562 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12563 if (!integer_pow2p (bottom))
12564 return 0;
12565 /* FALLTHRU */
12567 case MULT_EXPR:
12568 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12569 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12571 case PLUS_EXPR:
12572 case MINUS_EXPR:
12573 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12574 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12576 case LSHIFT_EXPR:
12577 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12579 tree op1, t1;
12581 op1 = TREE_OPERAND (top, 1);
12582 /* const_binop may not detect overflow correctly,
12583 so check for it explicitly here. */
12584 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12585 > TREE_INT_CST_LOW (op1)
12586 && TREE_INT_CST_HIGH (op1) == 0
12587 && 0 != (t1 = fold_convert (type,
12588 const_binop (LSHIFT_EXPR,
12589 size_one_node,
12590 op1, 0)))
12591 && !TREE_OVERFLOW (t1))
12592 return multiple_of_p (type, t1, bottom);
12594 return 0;
12596 case NOP_EXPR:
12597 /* Can't handle conversions from non-integral or wider integral type. */
12598 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12599 || (TYPE_PRECISION (type)
12600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12601 return 0;
12603 /* .. fall through ... */
12605 case SAVE_EXPR:
12606 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12608 case INTEGER_CST:
12609 if (TREE_CODE (bottom) != INTEGER_CST
12610 || (TYPE_UNSIGNED (type)
12611 && (tree_int_cst_sgn (top) < 0
12612 || tree_int_cst_sgn (bottom) < 0)))
12613 return 0;
12614 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
12615 top, bottom, 0));
12617 default:
12618 return 0;
12622 /* Return true if `t' is known to be non-negative. */
12624 bool
12625 tree_expr_nonnegative_p (tree t)
12627 if (t == error_mark_node)
12628 return false;
12630 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12631 return true;
12633 switch (TREE_CODE (t))
12635 case SSA_NAME:
12636 /* Query VRP to see if it has recorded any information about
12637 the range of this object. */
12638 return ssa_name_nonnegative_p (t);
12640 case ABS_EXPR:
12641 /* We can't return 1 if flag_wrapv is set because
12642 ABS_EXPR<INT_MIN> = INT_MIN. */
12643 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12644 return true;
12645 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12646 return true;
12647 break;
12649 case INTEGER_CST:
12650 return tree_int_cst_sgn (t) >= 0;
12652 case REAL_CST:
12653 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12655 case PLUS_EXPR:
12656 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12657 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12658 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12660 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12661 both unsigned and at least 2 bits shorter than the result. */
12662 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12663 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12664 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12666 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12667 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12668 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12669 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12671 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12672 TYPE_PRECISION (inner2)) + 1;
12673 return prec < TYPE_PRECISION (TREE_TYPE (t));
12676 break;
12678 case MULT_EXPR:
12679 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12681 /* x * x for floating point x is always non-negative. */
12682 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12683 return true;
12684 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12685 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12688 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12689 both unsigned and their total bits is shorter than the result. */
12690 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12691 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12692 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12694 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12695 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12696 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12697 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12698 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12699 < TYPE_PRECISION (TREE_TYPE (t));
12701 return false;
12703 case BIT_AND_EXPR:
12704 case MAX_EXPR:
12705 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12706 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12708 case BIT_IOR_EXPR:
12709 case BIT_XOR_EXPR:
12710 case MIN_EXPR:
12711 case RDIV_EXPR:
12712 case TRUNC_DIV_EXPR:
12713 case CEIL_DIV_EXPR:
12714 case FLOOR_DIV_EXPR:
12715 case ROUND_DIV_EXPR:
12716 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12717 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12719 case TRUNC_MOD_EXPR:
12720 case CEIL_MOD_EXPR:
12721 case FLOOR_MOD_EXPR:
12722 case ROUND_MOD_EXPR:
12723 case SAVE_EXPR:
12724 case NON_LVALUE_EXPR:
12725 case FLOAT_EXPR:
12726 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12728 case COMPOUND_EXPR:
12729 case MODIFY_EXPR:
12730 case GIMPLE_MODIFY_STMT:
12731 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12733 case BIND_EXPR:
12734 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12736 case COND_EXPR:
12737 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12738 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12740 case NOP_EXPR:
12742 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12743 tree outer_type = TREE_TYPE (t);
12745 if (TREE_CODE (outer_type) == REAL_TYPE)
12747 if (TREE_CODE (inner_type) == REAL_TYPE)
12748 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12749 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12751 if (TYPE_UNSIGNED (inner_type))
12752 return true;
12753 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12756 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12758 if (TREE_CODE (inner_type) == REAL_TYPE)
12759 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12760 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12761 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12762 && TYPE_UNSIGNED (inner_type);
12765 break;
12767 case TARGET_EXPR:
12769 tree temp = TARGET_EXPR_SLOT (t);
12770 t = TARGET_EXPR_INITIAL (t);
12772 /* If the initializer is non-void, then it's a normal expression
12773 that will be assigned to the slot. */
12774 if (!VOID_TYPE_P (t))
12775 return tree_expr_nonnegative_p (t);
12777 /* Otherwise, the initializer sets the slot in some way. One common
12778 way is an assignment statement at the end of the initializer. */
12779 while (1)
12781 if (TREE_CODE (t) == BIND_EXPR)
12782 t = expr_last (BIND_EXPR_BODY (t));
12783 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12784 || TREE_CODE (t) == TRY_CATCH_EXPR)
12785 t = expr_last (TREE_OPERAND (t, 0));
12786 else if (TREE_CODE (t) == STATEMENT_LIST)
12787 t = expr_last (t);
12788 else
12789 break;
12791 if ((TREE_CODE (t) == MODIFY_EXPR
12792 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12793 && GENERIC_TREE_OPERAND (t, 0) == temp)
12794 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12796 return false;
12799 case CALL_EXPR:
12801 tree fndecl = get_callee_fndecl (t);
12802 tree arglist = TREE_OPERAND (t, 1);
12803 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12804 switch (DECL_FUNCTION_CODE (fndecl))
12806 CASE_FLT_FN (BUILT_IN_ACOS):
12807 CASE_FLT_FN (BUILT_IN_ACOSH):
12808 CASE_FLT_FN (BUILT_IN_CABS):
12809 CASE_FLT_FN (BUILT_IN_COSH):
12810 CASE_FLT_FN (BUILT_IN_ERFC):
12811 CASE_FLT_FN (BUILT_IN_EXP):
12812 CASE_FLT_FN (BUILT_IN_EXP10):
12813 CASE_FLT_FN (BUILT_IN_EXP2):
12814 CASE_FLT_FN (BUILT_IN_FABS):
12815 CASE_FLT_FN (BUILT_IN_FDIM):
12816 CASE_FLT_FN (BUILT_IN_HYPOT):
12817 CASE_FLT_FN (BUILT_IN_POW10):
12818 CASE_INT_FN (BUILT_IN_FFS):
12819 CASE_INT_FN (BUILT_IN_PARITY):
12820 CASE_INT_FN (BUILT_IN_POPCOUNT):
12821 case BUILT_IN_BSWAP32:
12822 case BUILT_IN_BSWAP64:
12823 /* Always true. */
12824 return true;
12826 CASE_FLT_FN (BUILT_IN_SQRT):
12827 /* sqrt(-0.0) is -0.0. */
12828 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12829 return true;
12830 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12832 CASE_FLT_FN (BUILT_IN_ASINH):
12833 CASE_FLT_FN (BUILT_IN_ATAN):
12834 CASE_FLT_FN (BUILT_IN_ATANH):
12835 CASE_FLT_FN (BUILT_IN_CBRT):
12836 CASE_FLT_FN (BUILT_IN_CEIL):
12837 CASE_FLT_FN (BUILT_IN_ERF):
12838 CASE_FLT_FN (BUILT_IN_EXPM1):
12839 CASE_FLT_FN (BUILT_IN_FLOOR):
12840 CASE_FLT_FN (BUILT_IN_FMOD):
12841 CASE_FLT_FN (BUILT_IN_FREXP):
12842 CASE_FLT_FN (BUILT_IN_LCEIL):
12843 CASE_FLT_FN (BUILT_IN_LDEXP):
12844 CASE_FLT_FN (BUILT_IN_LFLOOR):
12845 CASE_FLT_FN (BUILT_IN_LLCEIL):
12846 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12847 CASE_FLT_FN (BUILT_IN_LLRINT):
12848 CASE_FLT_FN (BUILT_IN_LLROUND):
12849 CASE_FLT_FN (BUILT_IN_LRINT):
12850 CASE_FLT_FN (BUILT_IN_LROUND):
12851 CASE_FLT_FN (BUILT_IN_MODF):
12852 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12853 CASE_FLT_FN (BUILT_IN_RINT):
12854 CASE_FLT_FN (BUILT_IN_ROUND):
12855 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12856 CASE_FLT_FN (BUILT_IN_SINH):
12857 CASE_FLT_FN (BUILT_IN_TANH):
12858 CASE_FLT_FN (BUILT_IN_TRUNC):
12859 /* True if the 1st argument is nonnegative. */
12860 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12862 CASE_FLT_FN (BUILT_IN_FMAX):
12863 /* True if the 1st OR 2nd arguments are nonnegative. */
12864 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12865 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12867 CASE_FLT_FN (BUILT_IN_FMIN):
12868 /* True if the 1st AND 2nd arguments are nonnegative. */
12869 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12870 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12872 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12873 /* True if the 2nd argument is nonnegative. */
12874 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12876 CASE_FLT_FN (BUILT_IN_POWI):
12877 /* True if the 1st argument is nonnegative or the second
12878 argument is an even integer. */
12879 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12881 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12882 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12883 return true;
12885 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12887 CASE_FLT_FN (BUILT_IN_POW):
12888 /* True if the 1st argument is nonnegative or the second
12889 argument is an even integer valued real. */
12890 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12892 REAL_VALUE_TYPE c;
12893 HOST_WIDE_INT n;
12895 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12896 n = real_to_integer (&c);
12897 if ((n & 1) == 0)
12899 REAL_VALUE_TYPE cint;
12900 real_from_integer (&cint, VOIDmode, n,
12901 n < 0 ? -1 : 0, 0);
12902 if (real_identical (&c, &cint))
12903 return true;
12906 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12908 default:
12909 break;
12913 /* ... fall through ... */
12915 default:
12916 if (truth_value_p (TREE_CODE (t)))
12917 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12918 return true;
12921 /* We don't know sign of `t', so be conservative and return false. */
12922 return false;
12925 /* Return true when T is an address and is known to be nonzero.
12926 For floating point we further ensure that T is not denormal.
12927 Similar logic is present in nonzero_address in rtlanal.h. */
12929 bool
12930 tree_expr_nonzero_p (tree t)
12932 tree type = TREE_TYPE (t);
12934 /* Doing something useful for floating point would need more work. */
12935 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12936 return false;
12938 switch (TREE_CODE (t))
12940 case SSA_NAME:
12941 /* Query VRP to see if it has recorded any information about
12942 the range of this object. */
12943 return ssa_name_nonzero_p (t);
12945 case ABS_EXPR:
12946 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12948 case INTEGER_CST:
12949 return !integer_zerop (t);
12951 case PLUS_EXPR:
12952 if (TYPE_OVERFLOW_UNDEFINED (type))
12954 /* With the presence of negative values it is hard
12955 to say something. */
12956 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12957 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12958 return false;
12959 /* One of operands must be positive and the other non-negative. */
12960 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12961 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12963 break;
12965 case MULT_EXPR:
12966 if (TYPE_OVERFLOW_UNDEFINED (type))
12968 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12969 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12971 break;
12973 case NOP_EXPR:
12975 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12976 tree outer_type = TREE_TYPE (t);
12978 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12979 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12981 break;
12983 case ADDR_EXPR:
12985 tree base = get_base_address (TREE_OPERAND (t, 0));
12987 if (!base)
12988 return false;
12990 /* Weak declarations may link to NULL. */
12991 if (VAR_OR_FUNCTION_DECL_P (base))
12992 return !DECL_WEAK (base);
12994 /* Constants are never weak. */
12995 if (CONSTANT_CLASS_P (base))
12996 return true;
12998 return false;
13001 case COND_EXPR:
13002 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13003 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
13005 case MIN_EXPR:
13006 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
13007 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
13009 case MAX_EXPR:
13010 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
13012 /* When both operands are nonzero, then MAX must be too. */
13013 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
13014 return true;
13016 /* MAX where operand 0 is positive is positive. */
13017 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
13019 /* MAX where operand 1 is positive is positive. */
13020 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13021 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
13022 return true;
13023 break;
13025 case COMPOUND_EXPR:
13026 case MODIFY_EXPR:
13027 case GIMPLE_MODIFY_STMT:
13028 case BIND_EXPR:
13029 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
13031 case SAVE_EXPR:
13032 case NON_LVALUE_EXPR:
13033 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
13035 case BIT_IOR_EXPR:
13036 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13037 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
13039 case CALL_EXPR:
13040 return alloca_call_p (t);
13042 default:
13043 break;
13045 return false;
13048 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13049 attempt to fold the expression to a constant without modifying TYPE,
13050 OP0 or OP1.
13052 If the expression could be simplified to a constant, then return
13053 the constant. If the expression would not be simplified to a
13054 constant, then return NULL_TREE. */
13056 tree
13057 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13059 tree tem = fold_binary (code, type, op0, op1);
13060 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13063 /* Given the components of a unary expression CODE, TYPE and OP0,
13064 attempt to fold the expression to a constant without modifying
13065 TYPE or OP0.
13067 If the expression could be simplified to a constant, then return
13068 the constant. If the expression would not be simplified to a
13069 constant, then return NULL_TREE. */
13071 tree
13072 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13074 tree tem = fold_unary (code, type, op0);
13075 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13078 /* If EXP represents referencing an element in a constant string
13079 (either via pointer arithmetic or array indexing), return the
13080 tree representing the value accessed, otherwise return NULL. */
13082 tree
13083 fold_read_from_constant_string (tree exp)
13085 if ((TREE_CODE (exp) == INDIRECT_REF
13086 || TREE_CODE (exp) == ARRAY_REF)
13087 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13089 tree exp1 = TREE_OPERAND (exp, 0);
13090 tree index;
13091 tree string;
13093 if (TREE_CODE (exp) == INDIRECT_REF)
13094 string = string_constant (exp1, &index);
13095 else
13097 tree low_bound = array_ref_low_bound (exp);
13098 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13100 /* Optimize the special-case of a zero lower bound.
13102 We convert the low_bound to sizetype to avoid some problems
13103 with constant folding. (E.g. suppose the lower bound is 1,
13104 and its mode is QI. Without the conversion,l (ARRAY
13105 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13106 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13107 if (! integer_zerop (low_bound))
13108 index = size_diffop (index, fold_convert (sizetype, low_bound));
13110 string = exp1;
13113 if (string
13114 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13115 && TREE_CODE (string) == STRING_CST
13116 && TREE_CODE (index) == INTEGER_CST
13117 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13118 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13119 == MODE_INT)
13120 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13121 return fold_convert (TREE_TYPE (exp),
13122 build_int_cst (NULL_TREE,
13123 (TREE_STRING_POINTER (string)
13124 [TREE_INT_CST_LOW (index)])));
13126 return NULL;
13129 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13130 an integer constant or real constant.
13132 TYPE is the type of the result. */
13134 static tree
13135 fold_negate_const (tree arg0, tree type)
13137 tree t = NULL_TREE;
13139 switch (TREE_CODE (arg0))
13141 case INTEGER_CST:
13143 unsigned HOST_WIDE_INT low;
13144 HOST_WIDE_INT high;
13145 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13146 TREE_INT_CST_HIGH (arg0),
13147 &low, &high);
13148 t = force_fit_type_double (type, low, high, 1,
13149 (overflow | TREE_OVERFLOW (arg0))
13150 && !TYPE_UNSIGNED (type));
13151 break;
13154 case REAL_CST:
13155 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13156 break;
13158 default:
13159 gcc_unreachable ();
13162 return t;
13165 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13166 an integer constant or real constant.
13168 TYPE is the type of the result. */
13170 tree
13171 fold_abs_const (tree arg0, tree type)
13173 tree t = NULL_TREE;
13175 switch (TREE_CODE (arg0))
13177 case INTEGER_CST:
13178 /* If the value is unsigned, then the absolute value is
13179 the same as the ordinary value. */
13180 if (TYPE_UNSIGNED (type))
13181 t = arg0;
13182 /* Similarly, if the value is non-negative. */
13183 else if (INT_CST_LT (integer_minus_one_node, arg0))
13184 t = arg0;
13185 /* If the value is negative, then the absolute value is
13186 its negation. */
13187 else
13189 unsigned HOST_WIDE_INT low;
13190 HOST_WIDE_INT high;
13191 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13192 TREE_INT_CST_HIGH (arg0),
13193 &low, &high);
13194 t = force_fit_type_double (type, low, high, -1,
13195 overflow | TREE_OVERFLOW (arg0));
13197 break;
13199 case REAL_CST:
13200 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13201 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13202 else
13203 t = arg0;
13204 break;
13206 default:
13207 gcc_unreachable ();
13210 return t;
13213 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13214 constant. TYPE is the type of the result. */
13216 static tree
13217 fold_not_const (tree arg0, tree type)
13219 tree t = NULL_TREE;
13221 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13223 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13224 ~TREE_INT_CST_HIGH (arg0), 0,
13225 TREE_OVERFLOW (arg0));
13227 return t;
13230 /* Given CODE, a relational operator, the target type, TYPE and two
13231 constant operands OP0 and OP1, return the result of the
13232 relational operation. If the result is not a compile time
13233 constant, then return NULL_TREE. */
13235 static tree
13236 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13238 int result, invert;
13240 /* From here on, the only cases we handle are when the result is
13241 known to be a constant. */
13243 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13245 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13246 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13248 /* Handle the cases where either operand is a NaN. */
13249 if (real_isnan (c0) || real_isnan (c1))
13251 switch (code)
13253 case EQ_EXPR:
13254 case ORDERED_EXPR:
13255 result = 0;
13256 break;
13258 case NE_EXPR:
13259 case UNORDERED_EXPR:
13260 case UNLT_EXPR:
13261 case UNLE_EXPR:
13262 case UNGT_EXPR:
13263 case UNGE_EXPR:
13264 case UNEQ_EXPR:
13265 result = 1;
13266 break;
13268 case LT_EXPR:
13269 case LE_EXPR:
13270 case GT_EXPR:
13271 case GE_EXPR:
13272 case LTGT_EXPR:
13273 if (flag_trapping_math)
13274 return NULL_TREE;
13275 result = 0;
13276 break;
13278 default:
13279 gcc_unreachable ();
13282 return constant_boolean_node (result, type);
13285 return constant_boolean_node (real_compare (code, c0, c1), type);
13288 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13290 To compute GT, swap the arguments and do LT.
13291 To compute GE, do LT and invert the result.
13292 To compute LE, swap the arguments, do LT and invert the result.
13293 To compute NE, do EQ and invert the result.
13295 Therefore, the code below must handle only EQ and LT. */
13297 if (code == LE_EXPR || code == GT_EXPR)
13299 tree tem = op0;
13300 op0 = op1;
13301 op1 = tem;
13302 code = swap_tree_comparison (code);
13305 /* Note that it is safe to invert for real values here because we
13306 have already handled the one case that it matters. */
13308 invert = 0;
13309 if (code == NE_EXPR || code == GE_EXPR)
13311 invert = 1;
13312 code = invert_tree_comparison (code, false);
13315 /* Compute a result for LT or EQ if args permit;
13316 Otherwise return T. */
13317 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13319 if (code == EQ_EXPR)
13320 result = tree_int_cst_equal (op0, op1);
13321 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13322 result = INT_CST_LT_UNSIGNED (op0, op1);
13323 else
13324 result = INT_CST_LT (op0, op1);
13326 else
13327 return NULL_TREE;
13329 if (invert)
13330 result ^= 1;
13331 return constant_boolean_node (result, type);
13334 /* Build an expression for the a clean point containing EXPR with type TYPE.
13335 Don't build a cleanup point expression for EXPR which don't have side
13336 effects. */
13338 tree
13339 fold_build_cleanup_point_expr (tree type, tree expr)
13341 /* If the expression does not have side effects then we don't have to wrap
13342 it with a cleanup point expression. */
13343 if (!TREE_SIDE_EFFECTS (expr))
13344 return expr;
13346 /* If the expression is a return, check to see if the expression inside the
13347 return has no side effects or the right hand side of the modify expression
13348 inside the return. If either don't have side effects set we don't need to
13349 wrap the expression in a cleanup point expression. Note we don't check the
13350 left hand side of the modify because it should always be a return decl. */
13351 if (TREE_CODE (expr) == RETURN_EXPR)
13353 tree op = TREE_OPERAND (expr, 0);
13354 if (!op || !TREE_SIDE_EFFECTS (op))
13355 return expr;
13356 op = TREE_OPERAND (op, 1);
13357 if (!TREE_SIDE_EFFECTS (op))
13358 return expr;
13361 return build1 (CLEANUP_POINT_EXPR, type, expr);
13364 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13365 avoid confusing the gimplify process. */
13367 tree
13368 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13370 /* The size of the object is not relevant when talking about its address. */
13371 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13372 t = TREE_OPERAND (t, 0);
13374 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13375 if (TREE_CODE (t) == INDIRECT_REF
13376 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13378 t = TREE_OPERAND (t, 0);
13379 if (TREE_TYPE (t) != ptrtype)
13380 t = build1 (NOP_EXPR, ptrtype, t);
13382 else
13384 tree base = t;
13386 while (handled_component_p (base))
13387 base = TREE_OPERAND (base, 0);
13388 if (DECL_P (base))
13389 TREE_ADDRESSABLE (base) = 1;
13391 t = build1 (ADDR_EXPR, ptrtype, t);
13394 return t;
13397 tree
13398 build_fold_addr_expr (tree t)
13400 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13403 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13404 of an indirection through OP0, or NULL_TREE if no simplification is
13405 possible. */
13407 tree
13408 fold_indirect_ref_1 (tree type, tree op0)
13410 tree sub = op0;
13411 tree subtype;
13413 STRIP_NOPS (sub);
13414 subtype = TREE_TYPE (sub);
13415 if (!POINTER_TYPE_P (subtype))
13416 return NULL_TREE;
13418 if (TREE_CODE (sub) == ADDR_EXPR)
13420 tree op = TREE_OPERAND (sub, 0);
13421 tree optype = TREE_TYPE (op);
13422 /* *&CONST_DECL -> to the value of the const decl. */
13423 if (TREE_CODE (op) == CONST_DECL)
13424 return DECL_INITIAL (op);
13425 /* *&p => p; make sure to handle *&"str"[cst] here. */
13426 if (type == optype)
13428 tree fop = fold_read_from_constant_string (op);
13429 if (fop)
13430 return fop;
13431 else
13432 return op;
13434 /* *(foo *)&fooarray => fooarray[0] */
13435 else if (TREE_CODE (optype) == ARRAY_TYPE
13436 && type == TREE_TYPE (optype))
13438 tree type_domain = TYPE_DOMAIN (optype);
13439 tree min_val = size_zero_node;
13440 if (type_domain && TYPE_MIN_VALUE (type_domain))
13441 min_val = TYPE_MIN_VALUE (type_domain);
13442 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13444 /* *(foo *)&complexfoo => __real__ complexfoo */
13445 else if (TREE_CODE (optype) == COMPLEX_TYPE
13446 && type == TREE_TYPE (optype))
13447 return fold_build1 (REALPART_EXPR, type, op);
13448 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13449 else if (TREE_CODE (optype) == VECTOR_TYPE
13450 && type == TREE_TYPE (optype))
13452 tree part_width = TYPE_SIZE (type);
13453 tree index = bitsize_int (0);
13454 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13458 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13459 if (TREE_CODE (sub) == PLUS_EXPR
13460 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13462 tree op00 = TREE_OPERAND (sub, 0);
13463 tree op01 = TREE_OPERAND (sub, 1);
13464 tree op00type;
13466 STRIP_NOPS (op00);
13467 op00type = TREE_TYPE (op00);
13468 if (TREE_CODE (op00) == ADDR_EXPR
13469 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13470 && type == TREE_TYPE (TREE_TYPE (op00type)))
13472 tree size = TYPE_SIZE_UNIT (type);
13473 if (tree_int_cst_equal (size, op01))
13474 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13478 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13479 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13480 && type == TREE_TYPE (TREE_TYPE (subtype)))
13482 tree type_domain;
13483 tree min_val = size_zero_node;
13484 sub = build_fold_indirect_ref (sub);
13485 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13486 if (type_domain && TYPE_MIN_VALUE (type_domain))
13487 min_val = TYPE_MIN_VALUE (type_domain);
13488 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13491 return NULL_TREE;
13494 /* Builds an expression for an indirection through T, simplifying some
13495 cases. */
13497 tree
13498 build_fold_indirect_ref (tree t)
13500 tree type = TREE_TYPE (TREE_TYPE (t));
13501 tree sub = fold_indirect_ref_1 (type, t);
13503 if (sub)
13504 return sub;
13505 else
13506 return build1 (INDIRECT_REF, type, t);
13509 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13511 tree
13512 fold_indirect_ref (tree t)
13514 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13516 if (sub)
13517 return sub;
13518 else
13519 return t;
13522 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13523 whose result is ignored. The type of the returned tree need not be
13524 the same as the original expression. */
13526 tree
13527 fold_ignored_result (tree t)
13529 if (!TREE_SIDE_EFFECTS (t))
13530 return integer_zero_node;
13532 for (;;)
13533 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13535 case tcc_unary:
13536 t = TREE_OPERAND (t, 0);
13537 break;
13539 case tcc_binary:
13540 case tcc_comparison:
13541 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13542 t = TREE_OPERAND (t, 0);
13543 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13544 t = TREE_OPERAND (t, 1);
13545 else
13546 return t;
13547 break;
13549 case tcc_expression:
13550 switch (TREE_CODE (t))
13552 case COMPOUND_EXPR:
13553 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13554 return t;
13555 t = TREE_OPERAND (t, 0);
13556 break;
13558 case COND_EXPR:
13559 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13560 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13561 return t;
13562 t = TREE_OPERAND (t, 0);
13563 break;
13565 default:
13566 return t;
13568 break;
13570 default:
13571 return t;
13575 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13576 This can only be applied to objects of a sizetype. */
13578 tree
13579 round_up (tree value, int divisor)
13581 tree div = NULL_TREE;
13583 gcc_assert (divisor > 0);
13584 if (divisor == 1)
13585 return value;
13587 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13588 have to do anything. Only do this when we are not given a const,
13589 because in that case, this check is more expensive than just
13590 doing it. */
13591 if (TREE_CODE (value) != INTEGER_CST)
13593 div = build_int_cst (TREE_TYPE (value), divisor);
13595 if (multiple_of_p (TREE_TYPE (value), value, div))
13596 return value;
13599 /* If divisor is a power of two, simplify this to bit manipulation. */
13600 if (divisor == (divisor & -divisor))
13602 if (TREE_CODE (value) == INTEGER_CST)
13604 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
13605 unsigned HOST_WIDE_INT high;
13606 bool overflow_p;
13608 if ((low & (divisor - 1)) == 0)
13609 return value;
13611 overflow_p = TREE_OVERFLOW (value);
13612 high = TREE_INT_CST_HIGH (value);
13613 low &= ~(divisor - 1);
13614 low += divisor;
13615 if (low == 0)
13617 high++;
13618 if (high == 0)
13619 overflow_p = true;
13622 return force_fit_type_double (TREE_TYPE (value), low, high,
13623 -1, overflow_p);
13625 else
13627 tree t;
13629 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13630 value = size_binop (PLUS_EXPR, value, t);
13631 t = build_int_cst (TREE_TYPE (value), -divisor);
13632 value = size_binop (BIT_AND_EXPR, value, t);
13635 else
13637 if (!div)
13638 div = build_int_cst (TREE_TYPE (value), divisor);
13639 value = size_binop (CEIL_DIV_EXPR, value, div);
13640 value = size_binop (MULT_EXPR, value, div);
13643 return value;
13646 /* Likewise, but round down. */
13648 tree
13649 round_down (tree value, int divisor)
13651 tree div = NULL_TREE;
13653 gcc_assert (divisor > 0);
13654 if (divisor == 1)
13655 return value;
13657 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13658 have to do anything. Only do this when we are not given a const,
13659 because in that case, this check is more expensive than just
13660 doing it. */
13661 if (TREE_CODE (value) != INTEGER_CST)
13663 div = build_int_cst (TREE_TYPE (value), divisor);
13665 if (multiple_of_p (TREE_TYPE (value), value, div))
13666 return value;
13669 /* If divisor is a power of two, simplify this to bit manipulation. */
13670 if (divisor == (divisor & -divisor))
13672 tree t;
13674 t = build_int_cst (TREE_TYPE (value), -divisor);
13675 value = size_binop (BIT_AND_EXPR, value, t);
13677 else
13679 if (!div)
13680 div = build_int_cst (TREE_TYPE (value), divisor);
13681 value = size_binop (FLOOR_DIV_EXPR, value, div);
13682 value = size_binop (MULT_EXPR, value, div);
13685 return value;
13688 /* Returns the pointer to the base of the object addressed by EXP and
13689 extracts the information about the offset of the access, storing it
13690 to PBITPOS and POFFSET. */
13692 static tree
13693 split_address_to_core_and_offset (tree exp,
13694 HOST_WIDE_INT *pbitpos, tree *poffset)
13696 tree core;
13697 enum machine_mode mode;
13698 int unsignedp, volatilep;
13699 HOST_WIDE_INT bitsize;
13701 if (TREE_CODE (exp) == ADDR_EXPR)
13703 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13704 poffset, &mode, &unsignedp, &volatilep,
13705 false);
13706 core = build_fold_addr_expr (core);
13708 else
13710 core = exp;
13711 *pbitpos = 0;
13712 *poffset = NULL_TREE;
13715 return core;
13718 /* Returns true if addresses of E1 and E2 differ by a constant, false
13719 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13721 bool
13722 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13724 tree core1, core2;
13725 HOST_WIDE_INT bitpos1, bitpos2;
13726 tree toffset1, toffset2, tdiff, type;
13728 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13729 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13731 if (bitpos1 % BITS_PER_UNIT != 0
13732 || bitpos2 % BITS_PER_UNIT != 0
13733 || !operand_equal_p (core1, core2, 0))
13734 return false;
13736 if (toffset1 && toffset2)
13738 type = TREE_TYPE (toffset1);
13739 if (type != TREE_TYPE (toffset2))
13740 toffset2 = fold_convert (type, toffset2);
13742 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13743 if (!cst_and_fits_in_hwi (tdiff))
13744 return false;
13746 *diff = int_cst_value (tdiff);
13748 else if (toffset1 || toffset2)
13750 /* If only one of the offsets is non-constant, the difference cannot
13751 be a constant. */
13752 return false;
13754 else
13755 *diff = 0;
13757 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13758 return true;
13761 /* Simplify the floating point expression EXP when the sign of the
13762 result is not significant. Return NULL_TREE if no simplification
13763 is possible. */
13765 tree
13766 fold_strip_sign_ops (tree exp)
13768 tree arg0, arg1;
13770 switch (TREE_CODE (exp))
13772 case ABS_EXPR:
13773 case NEGATE_EXPR:
13774 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13775 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13777 case MULT_EXPR:
13778 case RDIV_EXPR:
13779 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13780 return NULL_TREE;
13781 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13782 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13783 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13784 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13785 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13786 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13787 break;
13789 case COMPOUND_EXPR:
13790 arg0 = TREE_OPERAND (exp, 0);
13791 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13792 if (arg1)
13793 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13794 break;
13796 case COND_EXPR:
13797 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13798 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13799 if (arg0 || arg1)
13800 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13801 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13802 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13803 break;
13805 case CALL_EXPR:
13807 const enum built_in_function fcode = builtin_mathfn_code (exp);
13808 switch (fcode)
13810 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13811 /* Strip copysign function call, return the 1st argument. */
13812 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13813 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13814 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13816 default:
13817 /* Strip sign ops from the argument of "odd" math functions. */
13818 if (negate_mathfn_p (fcode))
13820 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13821 if (arg0)
13822 return build_function_call_expr (get_callee_fndecl (exp),
13823 build_tree_list (NULL_TREE,
13824 arg0));
13826 break;
13829 break;
13831 default:
13832 break;
13834 return NULL_TREE;