re PR testsuite/40567 (Revision 149002 caused many failures)
[official-gcc.git] / gcc / fold-const.c
blob433ec6085ab10d1986cb6dac7ece4d4e4314eda3
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 extern tree make_range (tree, int *, tree *, tree *, bool *);
123 extern tree build_range_check (tree, tree, int, tree, tree);
124 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 tree, tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144 static tree fold_convert_const (enum tree_code, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 #define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
172 static void
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 static void
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
204 unsigned int prec;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = POINTER_SIZE;
210 else
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
225 h1 = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
245 h1 = -1;
247 else
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 h1 = -1;
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
257 *lv = l1;
258 *hv = h1;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
279 tree
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
282 bool overflowed)
284 int sign_extended_type;
285 bool overflow;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
297 if (overflowed
298 || overflowable < 0
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
306 return t;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 bool unsigned_p)
326 unsigned HOST_WIDE_INT l;
327 HOST_WIDE_INT h;
329 l = l1 + l2;
330 h = h1 + h2 + (l < l1);
332 *lv = l;
333 *hv = h;
335 if (unsigned_p)
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 else
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
350 if (l1 == 0)
352 *lv = 0;
353 *hv = - h1;
354 return (*hv & h1) < 0;
356 else
358 *lv = -l1;
359 *hv = ~h1;
360 return 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 bool unsigned_p)
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
380 int i, j, k;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
391 carry = 0;
392 for (j = 0; j < 4; j++)
394 k = i + j;
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 carry += prod[k];
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
402 prod[i + 4] = carry;
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
409 if (unsigned_p)
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
414 if (h1 < 0)
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 if (h2 < 0)
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 void
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
440 if (count < 0)
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
443 return;
446 if (SHIFT_COUNT_TRUNCATED)
447 count %= prec;
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
453 *hv = 0;
454 *lv = 0;
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
459 *lv = 0;
461 else
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
465 *lv = l1 << count;
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
482 else
484 *hv = signmask;
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495 void
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 int arith)
501 unsigned HOST_WIDE_INT signmask;
503 signmask = (arith
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 : 0);
507 if (SHIFT_COUNT_TRUNCATED)
508 count %= prec;
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
514 *hv = 0;
515 *lv = 0;
517 else if (count >= HOST_BITS_PER_WIDE_INT)
519 *hv = 0;
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 else
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 *lv = ((l1 >> count)
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
533 *hv = signmask;
534 *lv = signmask;
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
543 else
545 *hv = signmask;
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556 void
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
564 count %= prec;
565 if (count < 0)
566 count += prec;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
570 *lv = s1l | s2l;
571 *hv = s1h | s2h;
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578 void
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
586 count %= prec;
587 if (count < 0)
588 count += prec;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
592 *lv = s1l | s2l;
593 *hv = s1h | s2h;
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 or EXACT_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
613 HOST_WIDE_INT *hrem)
615 int quo_neg = 0;
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
618 int i, j;
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
625 int overflow = 0;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
631 if (!uns)
633 if (hnum < 0)
635 quo_neg = ~ quo_neg;
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
639 overflow = 1;
641 if (hden < 0)
643 quo_neg = ~ quo_neg;
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
650 *hquo = *hrem = 0;
651 /* This unsigned division rounds toward zero. */
652 *lquo = lnum / lden;
653 goto finish_up;
656 if (hnum == 0)
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
659 *hquo = *lquo = 0;
660 *hrem = hnum;
661 *lrem = lnum;
662 goto finish_up;
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
681 carry = work % lden;
684 else
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
693 if (den[i] != 0)
695 den_hi_sig = i;
696 break;
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
703 if (scale > 1)
704 { /* scale divisor and dividend */
705 carry = 0;
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
713 num[4] = carry;
714 carry = 0;
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
724 num_hi_sig = 4;
726 /* Main loop */
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
738 else
739 quo_est = BASE - 1;
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
743 if (tmp < BASE
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
746 quo_est--;
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
752 carry = 0;
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 quo_est--;
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
779 quo[i] = quo_est;
783 decode (quo, lquo, hquo);
785 finish_up:
786 /* If result is negative, make it so. */
787 if (quo_neg)
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
795 switch (code)
797 case TRUNC_DIV_EXPR:
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
800 return overflow;
802 case FLOOR_DIV_EXPR:
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 /* quo = quo - 1; */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
808 lquo, hquo);
810 else
811 return overflow;
812 break;
814 case CEIL_DIV_EXPR:
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
823 break;
825 case ROUND_DIV_EXPR:
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
834 if (*hrem < 0)
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 if (hden < 0)
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, &ltwice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den <= ltwice)))
849 if (*hquo < 0)
850 /* quo = quo - 1; */
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 else
854 /* quo = quo + 1; */
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
856 lquo, hquo);
858 else
859 return overflow;
861 break;
863 default:
864 gcc_unreachable ();
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
871 return overflow;
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
878 tree
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
894 uns = false;
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
897 type);
899 else
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
907 return NULL_TREE;
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
919 used. */
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
938 void
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
951 deferred code. */
953 void
954 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
956 const char *warnmsg;
957 location_t locus;
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
964 && code != 0
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
967 return;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
974 return;
976 if (gimple_no_warning_p (stmt))
977 return;
979 /* Use the smallest code level when deciding to issue the
980 warning. */
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
984 if (!issue_strict_overflow_warning (code))
985 return;
987 if (stmt == NULL)
988 locus = input_location;
989 else
990 locus = gimple_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
994 /* Stop deferring overflow warnings, ignoring any deferred
995 warnings. */
997 void
998 fold_undefer_and_ignore_overflow_warnings (void)
1000 fold_undefer_overflow_warnings (false, NULL, 0);
1003 /* Whether we are deferring overflow warnings. */
1005 bool
1006 fold_deferring_overflow_warnings_p (void)
1008 return fold_deferring_overflow_warnings > 0;
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1014 static void
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1017 if (fold_deferring_overflow_warnings > 0)
1019 if (fold_deferred_overflow_warning == NULL
1020 || wc < fold_deferred_overflow_code)
1022 fold_deferred_overflow_warning = gmsgid;
1023 fold_deferred_overflow_code = wc;
1026 else if (issue_strict_overflow_warning (wc))
1027 warning (OPT_Wstrict_overflow, gmsgid);
1030 /* Return true if the built-in mathematical function specified by CODE
1031 is odd, i.e. -f(x) == f(-x). */
1033 static bool
1034 negate_mathfn_p (enum built_in_function code)
1036 switch (code)
1038 CASE_FLT_FN (BUILT_IN_ASIN):
1039 CASE_FLT_FN (BUILT_IN_ASINH):
1040 CASE_FLT_FN (BUILT_IN_ATAN):
1041 CASE_FLT_FN (BUILT_IN_ATANH):
1042 CASE_FLT_FN (BUILT_IN_CASIN):
1043 CASE_FLT_FN (BUILT_IN_CASINH):
1044 CASE_FLT_FN (BUILT_IN_CATAN):
1045 CASE_FLT_FN (BUILT_IN_CATANH):
1046 CASE_FLT_FN (BUILT_IN_CBRT):
1047 CASE_FLT_FN (BUILT_IN_CPROJ):
1048 CASE_FLT_FN (BUILT_IN_CSIN):
1049 CASE_FLT_FN (BUILT_IN_CSINH):
1050 CASE_FLT_FN (BUILT_IN_CTAN):
1051 CASE_FLT_FN (BUILT_IN_CTANH):
1052 CASE_FLT_FN (BUILT_IN_ERF):
1053 CASE_FLT_FN (BUILT_IN_LLROUND):
1054 CASE_FLT_FN (BUILT_IN_LROUND):
1055 CASE_FLT_FN (BUILT_IN_ROUND):
1056 CASE_FLT_FN (BUILT_IN_SIN):
1057 CASE_FLT_FN (BUILT_IN_SINH):
1058 CASE_FLT_FN (BUILT_IN_TAN):
1059 CASE_FLT_FN (BUILT_IN_TANH):
1060 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 return true;
1063 CASE_FLT_FN (BUILT_IN_LLRINT):
1064 CASE_FLT_FN (BUILT_IN_LRINT):
1065 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1066 CASE_FLT_FN (BUILT_IN_RINT):
1067 return !flag_rounding_math;
1069 default:
1070 break;
1072 return false;
1075 /* Check whether we may negate an integer constant T without causing
1076 overflow. */
1078 bool
1079 may_negate_without_overflow_p (const_tree t)
1081 unsigned HOST_WIDE_INT val;
1082 unsigned int prec;
1083 tree type;
1085 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1087 type = TREE_TYPE (t);
1088 if (TYPE_UNSIGNED (type))
1089 return false;
1091 prec = TYPE_PRECISION (type);
1092 if (prec > HOST_BITS_PER_WIDE_INT)
1094 if (TREE_INT_CST_LOW (t) != 0)
1095 return true;
1096 prec -= HOST_BITS_PER_WIDE_INT;
1097 val = TREE_INT_CST_HIGH (t);
1099 else
1100 val = TREE_INT_CST_LOW (t);
1101 if (prec < HOST_BITS_PER_WIDE_INT)
1102 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1103 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1106 /* Determine whether an expression T can be cheaply negated using
1107 the function negate_expr without introducing undefined overflow. */
1109 static bool
1110 negate_expr_p (tree t)
1112 tree type;
1114 if (t == 0)
1115 return false;
1117 type = TREE_TYPE (t);
1119 STRIP_SIGN_NOPS (t);
1120 switch (TREE_CODE (t))
1122 case INTEGER_CST:
1123 if (TYPE_OVERFLOW_WRAPS (type))
1124 return true;
1126 /* Check that -CST will not overflow type. */
1127 return may_negate_without_overflow_p (t);
1128 case BIT_NOT_EXPR:
1129 return (INTEGRAL_TYPE_P (type)
1130 && TYPE_OVERFLOW_WRAPS (type));
1132 case FIXED_CST:
1133 case REAL_CST:
1134 case NEGATE_EXPR:
1135 return true;
1137 case COMPLEX_CST:
1138 return negate_expr_p (TREE_REALPART (t))
1139 && negate_expr_p (TREE_IMAGPART (t));
1141 case COMPLEX_EXPR:
1142 return negate_expr_p (TREE_OPERAND (t, 0))
1143 && negate_expr_p (TREE_OPERAND (t, 1));
1145 case CONJ_EXPR:
1146 return negate_expr_p (TREE_OPERAND (t, 0));
1148 case PLUS_EXPR:
1149 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1150 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 return false;
1152 /* -(A + B) -> (-B) - A. */
1153 if (negate_expr_p (TREE_OPERAND (t, 1))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1)))
1156 return true;
1157 /* -(A + B) -> (-A) - B. */
1158 return negate_expr_p (TREE_OPERAND (t, 0));
1160 case MINUS_EXPR:
1161 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1162 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1164 && reorder_operands_p (TREE_OPERAND (t, 0),
1165 TREE_OPERAND (t, 1));
1167 case MULT_EXPR:
1168 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1169 break;
1171 /* Fall through. */
1173 case RDIV_EXPR:
1174 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1175 return negate_expr_p (TREE_OPERAND (t, 1))
1176 || negate_expr_p (TREE_OPERAND (t, 0));
1177 break;
1179 case TRUNC_DIV_EXPR:
1180 case ROUND_DIV_EXPR:
1181 case FLOOR_DIV_EXPR:
1182 case CEIL_DIV_EXPR:
1183 case EXACT_DIV_EXPR:
1184 /* In general we can't negate A / B, because if A is INT_MIN and
1185 B is 1, we may turn this into INT_MIN / -1 which is undefined
1186 and actually traps on some architectures. But if overflow is
1187 undefined, we can negate, because - (INT_MIN / 1) is an
1188 overflow. */
1189 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1190 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 break;
1192 return negate_expr_p (TREE_OPERAND (t, 1))
1193 || negate_expr_p (TREE_OPERAND (t, 0));
1195 case NOP_EXPR:
1196 /* Negate -((double)float) as (double)(-float). */
1197 if (TREE_CODE (type) == REAL_TYPE)
1199 tree tem = strip_float_extensions (t);
1200 if (tem != t)
1201 return negate_expr_p (tem);
1203 break;
1205 case CALL_EXPR:
1206 /* Negate -f(x) as f(-x). */
1207 if (negate_mathfn_p (builtin_mathfn_code (t)))
1208 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1209 break;
1211 case RSHIFT_EXPR:
1212 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1213 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1215 tree op1 = TREE_OPERAND (t, 1);
1216 if (TREE_INT_CST_HIGH (op1) == 0
1217 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1218 == TREE_INT_CST_LOW (op1))
1219 return true;
1221 break;
1223 default:
1224 break;
1226 return false;
1229 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1230 simplification is possible.
1231 If negate_expr_p would return true for T, NULL_TREE will never be
1232 returned. */
1234 static tree
1235 fold_negate_expr (tree t)
1237 tree type = TREE_TYPE (t);
1238 tree tem;
1240 switch (TREE_CODE (t))
1242 /* Convert - (~A) to A + 1. */
1243 case BIT_NOT_EXPR:
1244 if (INTEGRAL_TYPE_P (type))
1245 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1246 build_int_cst (type, 1));
1247 break;
1249 case INTEGER_CST:
1250 tem = fold_negate_const (t, type);
1251 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1252 || !TYPE_OVERFLOW_TRAPS (type))
1253 return tem;
1254 break;
1256 case REAL_CST:
1257 tem = fold_negate_const (t, type);
1258 /* Two's complement FP formats, such as c4x, may overflow. */
1259 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1260 return tem;
1261 break;
1263 case FIXED_CST:
1264 tem = fold_negate_const (t, type);
1265 return tem;
1267 case COMPLEX_CST:
1269 tree rpart = negate_expr (TREE_REALPART (t));
1270 tree ipart = negate_expr (TREE_IMAGPART (t));
1272 if ((TREE_CODE (rpart) == REAL_CST
1273 && TREE_CODE (ipart) == REAL_CST)
1274 || (TREE_CODE (rpart) == INTEGER_CST
1275 && TREE_CODE (ipart) == INTEGER_CST))
1276 return build_complex (type, rpart, ipart);
1278 break;
1280 case COMPLEX_EXPR:
1281 if (negate_expr_p (t))
1282 return fold_build2 (COMPLEX_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)),
1284 fold_negate_expr (TREE_OPERAND (t, 1)));
1285 break;
1287 case CONJ_EXPR:
1288 if (negate_expr_p (t))
1289 return fold_build1 (CONJ_EXPR, type,
1290 fold_negate_expr (TREE_OPERAND (t, 0)));
1291 break;
1293 case NEGATE_EXPR:
1294 return TREE_OPERAND (t, 0);
1296 case PLUS_EXPR:
1297 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1298 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1300 /* -(A + B) -> (-B) - A. */
1301 if (negate_expr_p (TREE_OPERAND (t, 1))
1302 && reorder_operands_p (TREE_OPERAND (t, 0),
1303 TREE_OPERAND (t, 1)))
1305 tem = negate_expr (TREE_OPERAND (t, 1));
1306 return fold_build2 (MINUS_EXPR, type,
1307 tem, TREE_OPERAND (t, 0));
1310 /* -(A + B) -> (-A) - B. */
1311 if (negate_expr_p (TREE_OPERAND (t, 0)))
1313 tem = negate_expr (TREE_OPERAND (t, 0));
1314 return fold_build2 (MINUS_EXPR, type,
1315 tem, TREE_OPERAND (t, 1));
1318 break;
1320 case MINUS_EXPR:
1321 /* - (A - B) -> B - A */
1322 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1324 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1325 return fold_build2 (MINUS_EXPR, type,
1326 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1327 break;
1329 case MULT_EXPR:
1330 if (TYPE_UNSIGNED (type))
1331 break;
1333 /* Fall through. */
1335 case RDIV_EXPR:
1336 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1338 tem = TREE_OPERAND (t, 1);
1339 if (negate_expr_p (tem))
1340 return fold_build2 (TREE_CODE (t), type,
1341 TREE_OPERAND (t, 0), negate_expr (tem));
1342 tem = TREE_OPERAND (t, 0);
1343 if (negate_expr_p (tem))
1344 return fold_build2 (TREE_CODE (t), type,
1345 negate_expr (tem), TREE_OPERAND (t, 1));
1347 break;
1349 case TRUNC_DIV_EXPR:
1350 case ROUND_DIV_EXPR:
1351 case FLOOR_DIV_EXPR:
1352 case CEIL_DIV_EXPR:
1353 case EXACT_DIV_EXPR:
1354 /* In general we can't negate A / B, because if A is INT_MIN and
1355 B is 1, we may turn this into INT_MIN / -1 which is undefined
1356 and actually traps on some architectures. But if overflow is
1357 undefined, we can negate, because - (INT_MIN / 1) is an
1358 overflow. */
1359 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1361 const char * const warnmsg = G_("assuming signed overflow does not "
1362 "occur when negating a division");
1363 tem = TREE_OPERAND (t, 1);
1364 if (negate_expr_p (tem))
1366 if (INTEGRAL_TYPE_P (type)
1367 && (TREE_CODE (tem) != INTEGER_CST
1368 || integer_onep (tem)))
1369 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1370 return fold_build2 (TREE_CODE (t), type,
1371 TREE_OPERAND (t, 0), negate_expr (tem));
1373 tem = TREE_OPERAND (t, 0);
1374 if (negate_expr_p (tem))
1376 if (INTEGRAL_TYPE_P (type)
1377 && (TREE_CODE (tem) != INTEGER_CST
1378 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1379 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1380 return fold_build2 (TREE_CODE (t), type,
1381 negate_expr (tem), TREE_OPERAND (t, 1));
1384 break;
1386 case NOP_EXPR:
1387 /* Convert -((double)float) into (double)(-float). */
1388 if (TREE_CODE (type) == REAL_TYPE)
1390 tem = strip_float_extensions (t);
1391 if (tem != t && negate_expr_p (tem))
1392 return fold_convert (type, negate_expr (tem));
1394 break;
1396 case CALL_EXPR:
1397 /* Negate -f(x) as f(-x). */
1398 if (negate_mathfn_p (builtin_mathfn_code (t))
1399 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1401 tree fndecl, arg;
1403 fndecl = get_callee_fndecl (t);
1404 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1405 return build_call_expr (fndecl, 1, arg);
1407 break;
1409 case RSHIFT_EXPR:
1410 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1411 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1413 tree op1 = TREE_OPERAND (t, 1);
1414 if (TREE_INT_CST_HIGH (op1) == 0
1415 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1416 == TREE_INT_CST_LOW (op1))
1418 tree ntype = TYPE_UNSIGNED (type)
1419 ? signed_type_for (type)
1420 : unsigned_type_for (type);
1421 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1422 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1423 return fold_convert (type, temp);
1426 break;
1428 default:
1429 break;
1432 return NULL_TREE;
1435 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1436 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1437 return NULL_TREE. */
1439 static tree
1440 negate_expr (tree t)
1442 tree type, tem;
1444 if (t == NULL_TREE)
1445 return NULL_TREE;
1447 type = TREE_TYPE (t);
1448 STRIP_SIGN_NOPS (t);
1450 tem = fold_negate_expr (t);
1451 if (!tem)
1452 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1453 return fold_convert (type, tem);
1456 /* Split a tree IN into a constant, literal and variable parts that could be
1457 combined with CODE to make IN. "constant" means an expression with
1458 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1459 commutative arithmetic operation. Store the constant part into *CONP,
1460 the literal in *LITP and return the variable part. If a part isn't
1461 present, set it to null. If the tree does not decompose in this way,
1462 return the entire tree as the variable part and the other parts as null.
1464 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1465 case, we negate an operand that was subtracted. Except if it is a
1466 literal for which we use *MINUS_LITP instead.
1468 If NEGATE_P is true, we are negating all of IN, again except a literal
1469 for which we use *MINUS_LITP instead.
1471 If IN is itself a literal or constant, return it as appropriate.
1473 Note that we do not guarantee that any of the three values will be the
1474 same type as IN, but they will have the same signedness and mode. */
1476 static tree
1477 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1478 tree *minus_litp, int negate_p)
1480 tree var = 0;
1482 *conp = 0;
1483 *litp = 0;
1484 *minus_litp = 0;
1486 /* Strip any conversions that don't change the machine mode or signedness. */
1487 STRIP_SIGN_NOPS (in);
1489 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1490 || TREE_CODE (in) == FIXED_CST)
1491 *litp = in;
1492 else if (TREE_CODE (in) == code
1493 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1494 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1495 /* We can associate addition and subtraction together (even
1496 though the C standard doesn't say so) for integers because
1497 the value is not affected. For reals, the value might be
1498 affected, so we can't. */
1499 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1500 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1502 tree op0 = TREE_OPERAND (in, 0);
1503 tree op1 = TREE_OPERAND (in, 1);
1504 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1505 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1507 /* First see if either of the operands is a literal, then a constant. */
1508 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1509 || TREE_CODE (op0) == FIXED_CST)
1510 *litp = op0, op0 = 0;
1511 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1512 || TREE_CODE (op1) == FIXED_CST)
1513 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1515 if (op0 != 0 && TREE_CONSTANT (op0))
1516 *conp = op0, op0 = 0;
1517 else if (op1 != 0 && TREE_CONSTANT (op1))
1518 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1520 /* If we haven't dealt with either operand, this is not a case we can
1521 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1522 if (op0 != 0 && op1 != 0)
1523 var = in;
1524 else if (op0 != 0)
1525 var = op0;
1526 else
1527 var = op1, neg_var_p = neg1_p;
1529 /* Now do any needed negations. */
1530 if (neg_litp_p)
1531 *minus_litp = *litp, *litp = 0;
1532 if (neg_conp_p)
1533 *conp = negate_expr (*conp);
1534 if (neg_var_p)
1535 var = negate_expr (var);
1537 else if (TREE_CONSTANT (in))
1538 *conp = in;
1539 else
1540 var = in;
1542 if (negate_p)
1544 if (*litp)
1545 *minus_litp = *litp, *litp = 0;
1546 else if (*minus_litp)
1547 *litp = *minus_litp, *minus_litp = 0;
1548 *conp = negate_expr (*conp);
1549 var = negate_expr (var);
1552 return var;
1555 /* Re-associate trees split by the above function. T1 and T2 are either
1556 expressions to associate or null. Return the new expression, if any. If
1557 we build an operation, do it in TYPE and with CODE. */
1559 static tree
1560 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1562 if (t1 == 0)
1563 return t2;
1564 else if (t2 == 0)
1565 return t1;
1567 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1568 try to fold this since we will have infinite recursion. But do
1569 deal with any NEGATE_EXPRs. */
1570 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1571 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1573 if (code == PLUS_EXPR)
1575 if (TREE_CODE (t1) == NEGATE_EXPR)
1576 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1577 fold_convert (type, TREE_OPERAND (t1, 0)));
1578 else if (TREE_CODE (t2) == NEGATE_EXPR)
1579 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1580 fold_convert (type, TREE_OPERAND (t2, 0)));
1581 else if (integer_zerop (t2))
1582 return fold_convert (type, t1);
1584 else if (code == MINUS_EXPR)
1586 if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1590 return build2 (code, type, fold_convert (type, t1),
1591 fold_convert (type, t2));
1594 return fold_build2 (code, type, fold_convert (type, t1),
1595 fold_convert (type, t2));
1598 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1599 for use in int_const_binop, size_binop and size_diffop. */
1601 static bool
1602 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1604 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 return false;
1606 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1607 return false;
1609 switch (code)
1611 case LSHIFT_EXPR:
1612 case RSHIFT_EXPR:
1613 case LROTATE_EXPR:
1614 case RROTATE_EXPR:
1615 return true;
1617 default:
1618 break;
1621 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1622 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1623 && TYPE_MODE (type1) == TYPE_MODE (type2);
1627 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1628 to produce a new constant. Return NULL_TREE if we don't know how
1629 to evaluate CODE at compile-time.
1631 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 tree
1634 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1636 unsigned HOST_WIDE_INT int1l, int2l;
1637 HOST_WIDE_INT int1h, int2h;
1638 unsigned HOST_WIDE_INT low;
1639 HOST_WIDE_INT hi;
1640 unsigned HOST_WIDE_INT garbagel;
1641 HOST_WIDE_INT garbageh;
1642 tree t;
1643 tree type = TREE_TYPE (arg1);
1644 int uns = TYPE_UNSIGNED (type);
1645 int is_sizetype
1646 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int overflow = 0;
1649 int1l = TREE_INT_CST_LOW (arg1);
1650 int1h = TREE_INT_CST_HIGH (arg1);
1651 int2l = TREE_INT_CST_LOW (arg2);
1652 int2h = TREE_INT_CST_HIGH (arg2);
1654 switch (code)
1656 case BIT_IOR_EXPR:
1657 low = int1l | int2l, hi = int1h | int2h;
1658 break;
1660 case BIT_XOR_EXPR:
1661 low = int1l ^ int2l, hi = int1h ^ int2h;
1662 break;
1664 case BIT_AND_EXPR:
1665 low = int1l & int2l, hi = int1h & int2h;
1666 break;
1668 case RSHIFT_EXPR:
1669 int2l = -int2l;
1670 case LSHIFT_EXPR:
1671 /* It's unclear from the C standard whether shifts can overflow.
1672 The following code ignores overflow; perhaps a C standard
1673 interpretation ruling is needed. */
1674 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1675 &low, &hi, !uns);
1676 break;
1678 case RROTATE_EXPR:
1679 int2l = - int2l;
1680 case LROTATE_EXPR:
1681 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 &low, &hi);
1683 break;
1685 case PLUS_EXPR:
1686 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1687 break;
1689 case MINUS_EXPR:
1690 neg_double (int2l, int2h, &low, &hi);
1691 add_double (int1l, int1h, low, hi, &low, &hi);
1692 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1693 break;
1695 case MULT_EXPR:
1696 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 break;
1699 case TRUNC_DIV_EXPR:
1700 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1701 case EXACT_DIV_EXPR:
1702 /* This is a shortcut for a common special case. */
1703 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1704 && !TREE_OVERFLOW (arg1)
1705 && !TREE_OVERFLOW (arg2)
1706 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1708 if (code == CEIL_DIV_EXPR)
1709 int1l += int2l - 1;
1711 low = int1l / int2l, hi = 0;
1712 break;
1715 /* ... fall through ... */
1717 case ROUND_DIV_EXPR:
1718 if (int2h == 0 && int2l == 0)
1719 return NULL_TREE;
1720 if (int2h == 0 && int2l == 1)
1722 low = int1l, hi = int1h;
1723 break;
1725 if (int1l == int2l && int1h == int2h
1726 && ! (int1l == 0 && int1h == 0))
1728 low = 1, hi = 0;
1729 break;
1731 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1732 &low, &hi, &garbagel, &garbageh);
1733 break;
1735 case TRUNC_MOD_EXPR:
1736 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1737 /* This is a shortcut for a common special case. */
1738 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1739 && !TREE_OVERFLOW (arg1)
1740 && !TREE_OVERFLOW (arg2)
1741 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1743 if (code == CEIL_MOD_EXPR)
1744 int1l += int2l - 1;
1745 low = int1l % int2l, hi = 0;
1746 break;
1749 /* ... fall through ... */
1751 case ROUND_MOD_EXPR:
1752 if (int2h == 0 && int2l == 0)
1753 return NULL_TREE;
1754 overflow = div_and_round_double (code, uns,
1755 int1l, int1h, int2l, int2h,
1756 &garbagel, &garbageh, &low, &hi);
1757 break;
1759 case MIN_EXPR:
1760 case MAX_EXPR:
1761 if (uns)
1762 low = (((unsigned HOST_WIDE_INT) int1h
1763 < (unsigned HOST_WIDE_INT) int2h)
1764 || (((unsigned HOST_WIDE_INT) int1h
1765 == (unsigned HOST_WIDE_INT) int2h)
1766 && int1l < int2l));
1767 else
1768 low = (int1h < int2h
1769 || (int1h == int2h && int1l < int2l));
1771 if (low == (code == MIN_EXPR))
1772 low = int1l, hi = int1h;
1773 else
1774 low = int2l, hi = int2h;
1775 break;
1777 default:
1778 return NULL_TREE;
1781 if (notrunc)
1783 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1785 /* Propagate overflow flags ourselves. */
1786 if (((!uns || is_sizetype) && overflow)
1787 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 t = copy_node (t);
1790 TREE_OVERFLOW (t) = 1;
1793 else
1794 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1795 ((!uns || is_sizetype) && overflow)
1796 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1798 return t;
1801 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1802 constant. We assume ARG1 and ARG2 have the same data type, or at least
1803 are the same kind of constant and the same machine mode. Return zero if
1804 combining the constants is not allowed in the current operating mode.
1806 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 static tree
1809 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1811 /* Sanity check for the recursive cases. */
1812 if (!arg1 || !arg2)
1813 return NULL_TREE;
1815 STRIP_NOPS (arg1);
1816 STRIP_NOPS (arg2);
1818 if (TREE_CODE (arg1) == INTEGER_CST)
1819 return int_const_binop (code, arg1, arg2, notrunc);
1821 if (TREE_CODE (arg1) == REAL_CST)
1823 enum machine_mode mode;
1824 REAL_VALUE_TYPE d1;
1825 REAL_VALUE_TYPE d2;
1826 REAL_VALUE_TYPE value;
1827 REAL_VALUE_TYPE result;
1828 bool inexact;
1829 tree t, type;
1831 /* The following codes are handled by real_arithmetic. */
1832 switch (code)
1834 case PLUS_EXPR:
1835 case MINUS_EXPR:
1836 case MULT_EXPR:
1837 case RDIV_EXPR:
1838 case MIN_EXPR:
1839 case MAX_EXPR:
1840 break;
1842 default:
1843 return NULL_TREE;
1846 d1 = TREE_REAL_CST (arg1);
1847 d2 = TREE_REAL_CST (arg2);
1849 type = TREE_TYPE (arg1);
1850 mode = TYPE_MODE (type);
1852 /* Don't perform operation if we honor signaling NaNs and
1853 either operand is a NaN. */
1854 if (HONOR_SNANS (mode)
1855 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 return NULL_TREE;
1858 /* Don't perform operation if it would raise a division
1859 by zero exception. */
1860 if (code == RDIV_EXPR
1861 && REAL_VALUES_EQUAL (d2, dconst0)
1862 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 return NULL_TREE;
1865 /* If either operand is a NaN, just return it. Otherwise, set up
1866 for floating-point trap; we return an overflow. */
1867 if (REAL_VALUE_ISNAN (d1))
1868 return arg1;
1869 else if (REAL_VALUE_ISNAN (d2))
1870 return arg2;
1872 inexact = real_arithmetic (&value, code, &d1, &d2);
1873 real_convert (&result, mode, &value);
1875 /* Don't constant fold this floating point operation if
1876 the result has overflowed and flag_trapping_math. */
1877 if (flag_trapping_math
1878 && MODE_HAS_INFINITIES (mode)
1879 && REAL_VALUE_ISINF (result)
1880 && !REAL_VALUE_ISINF (d1)
1881 && !REAL_VALUE_ISINF (d2))
1882 return NULL_TREE;
1884 /* Don't constant fold this floating point operation if the
1885 result may dependent upon the run-time rounding mode and
1886 flag_rounding_math is set, or if GCC's software emulation
1887 is unable to accurately represent the result. */
1888 if ((flag_rounding_math
1889 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1891 return NULL_TREE;
1893 t = build_real (type, result);
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1896 return t;
1899 if (TREE_CODE (arg1) == FIXED_CST)
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1904 tree t, type;
1905 int sat_p;
1906 bool overflow_p;
1908 /* The following codes are handled by fixed_arithmetic. */
1909 switch (code)
1911 case PLUS_EXPR:
1912 case MINUS_EXPR:
1913 case MULT_EXPR:
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1916 break;
1918 case LSHIFT_EXPR:
1919 case RSHIFT_EXPR:
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1922 f2.mode = SImode;
1923 break;
1925 default:
1926 return NULL_TREE;
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 return t;
1940 if (TREE_CODE (arg1) == COMPLEX_CST)
1942 tree type = TREE_TYPE (arg1);
1943 tree r1 = TREE_REALPART (arg1);
1944 tree i1 = TREE_IMAGPART (arg1);
1945 tree r2 = TREE_REALPART (arg2);
1946 tree i2 = TREE_IMAGPART (arg2);
1947 tree real, imag;
1949 switch (code)
1951 case PLUS_EXPR:
1952 case MINUS_EXPR:
1953 real = const_binop (code, r1, r2, notrunc);
1954 imag = const_binop (code, i1, i2, notrunc);
1955 break;
1957 case MULT_EXPR:
1958 real = const_binop (MINUS_EXPR,
1959 const_binop (MULT_EXPR, r1, r2, notrunc),
1960 const_binop (MULT_EXPR, i1, i2, notrunc),
1961 notrunc);
1962 imag = const_binop (PLUS_EXPR,
1963 const_binop (MULT_EXPR, r1, i2, notrunc),
1964 const_binop (MULT_EXPR, i1, r2, notrunc),
1965 notrunc);
1966 break;
1968 case RDIV_EXPR:
1970 tree magsquared
1971 = const_binop (PLUS_EXPR,
1972 const_binop (MULT_EXPR, r2, r2, notrunc),
1973 const_binop (MULT_EXPR, i2, i2, notrunc),
1974 notrunc);
1975 tree t1
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r1, r2, notrunc),
1978 const_binop (MULT_EXPR, i1, i2, notrunc),
1979 notrunc);
1980 tree t2
1981 = const_binop (MINUS_EXPR,
1982 const_binop (MULT_EXPR, i1, r2, notrunc),
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 notrunc);
1986 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1987 code = TRUNC_DIV_EXPR;
1989 real = const_binop (code, t1, magsquared, notrunc);
1990 imag = const_binop (code, t2, magsquared, notrunc);
1992 break;
1994 default:
1995 return NULL_TREE;
1998 if (real && imag)
1999 return build_complex (type, real, imag);
2002 if (TREE_CODE (arg1) == VECTOR_CST)
2004 tree type = TREE_TYPE(arg1);
2005 int count = TYPE_VECTOR_SUBPARTS (type), i;
2006 tree elements1, elements2, list = NULL_TREE;
2008 if(TREE_CODE(arg2) != VECTOR_CST)
2009 return NULL_TREE;
2011 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2012 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2014 for (i = 0; i < count; i++)
2016 tree elem1, elem2, elem;
2018 /* The trailing elements can be empty and should be treated as 0 */
2019 if(!elements1)
2020 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2021 else
2023 elem1 = TREE_VALUE(elements1);
2024 elements1 = TREE_CHAIN (elements1);
2027 if(!elements2)
2028 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2029 else
2031 elem2 = TREE_VALUE(elements2);
2032 elements2 = TREE_CHAIN (elements2);
2035 elem = const_binop (code, elem1, elem2, notrunc);
2037 /* It is possible that const_binop cannot handle the given
2038 code and return NULL_TREE */
2039 if(elem == NULL_TREE)
2040 return NULL_TREE;
2042 list = tree_cons (NULL_TREE, elem, list);
2044 return build_vector(type, nreverse(list));
2046 return NULL_TREE;
2049 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2050 indicates which particular sizetype to create. */
2052 tree
2053 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2055 return build_int_cst (sizetype_tab[(int) kind], number);
2058 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2059 is a tree code. The type of the result is taken from the operands.
2060 Both must be equivalent integer types, ala int_binop_types_match_p.
2061 If the operands are constant, so is the result. */
2063 tree
2064 size_binop (enum tree_code code, tree arg0, tree arg1)
2066 tree type = TREE_TYPE (arg0);
2068 if (arg0 == error_mark_node || arg1 == error_mark_node)
2069 return error_mark_node;
2071 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2072 TREE_TYPE (arg1)));
2074 /* Handle the special case of two integer constants faster. */
2075 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2077 /* And some specific cases even faster than that. */
2078 if (code == PLUS_EXPR)
2080 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2081 return arg1;
2082 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2083 return arg0;
2085 else if (code == MINUS_EXPR)
2087 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2088 return arg0;
2090 else if (code == MULT_EXPR)
2092 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2093 return arg1;
2096 /* Handle general case of two integer constants. */
2097 return int_const_binop (code, arg0, arg1, 0);
2100 return fold_build2 (code, type, arg0, arg1);
2103 /* Given two values, either both of sizetype or both of bitsizetype,
2104 compute the difference between the two values. Return the value
2105 in signed type corresponding to the type of the operands. */
2107 tree
2108 size_diffop (tree arg0, tree arg1)
2110 tree type = TREE_TYPE (arg0);
2111 tree ctype;
2113 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2114 TREE_TYPE (arg1)));
2116 /* If the type is already signed, just do the simple thing. */
2117 if (!TYPE_UNSIGNED (type))
2118 return size_binop (MINUS_EXPR, arg0, arg1);
2120 if (type == sizetype)
2121 ctype = ssizetype;
2122 else if (type == bitsizetype)
2123 ctype = sbitsizetype;
2124 else
2125 ctype = signed_type_for (type);
2127 /* If either operand is not a constant, do the conversions to the signed
2128 type and subtract. The hardware will do the right thing with any
2129 overflow in the subtraction. */
2130 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2131 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2132 fold_convert (ctype, arg1));
2134 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2135 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2136 overflow) and negate (which can't either). Special-case a result
2137 of zero while we're here. */
2138 if (tree_int_cst_equal (arg0, arg1))
2139 return build_int_cst (ctype, 0);
2140 else if (tree_int_cst_lt (arg1, arg0))
2141 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2142 else
2143 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2144 fold_convert (ctype, size_binop (MINUS_EXPR,
2145 arg1, arg0)));
2148 /* A subroutine of fold_convert_const handling conversions of an
2149 INTEGER_CST to another integer type. */
2151 static tree
2152 fold_convert_const_int_from_int (tree type, const_tree arg1)
2154 tree t;
2156 /* Given an integer constant, make new constant with new type,
2157 appropriately sign-extended or truncated. */
2158 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2159 TREE_INT_CST_HIGH (arg1),
2160 /* Don't set the overflow when
2161 converting from a pointer, */
2162 !POINTER_TYPE_P (TREE_TYPE (arg1))
2163 /* or to a sizetype with same signedness
2164 and the precision is unchanged.
2165 ??? sizetype is always sign-extended,
2166 but its signedness depends on the
2167 frontend. Thus we see spurious overflows
2168 here if we do not check this. */
2169 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2170 == TYPE_PRECISION (type))
2171 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2172 == TYPE_UNSIGNED (type))
2173 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2175 || (TREE_CODE (type) == INTEGER_TYPE
2176 && TYPE_IS_SIZETYPE (type)))),
2177 (TREE_INT_CST_HIGH (arg1) < 0
2178 && (TYPE_UNSIGNED (type)
2179 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2180 | TREE_OVERFLOW (arg1));
2182 return t;
2185 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2186 to an integer type. */
2188 static tree
2189 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2191 int overflow = 0;
2192 tree t;
2194 /* The following code implements the floating point to integer
2195 conversion rules required by the Java Language Specification,
2196 that IEEE NaNs are mapped to zero and values that overflow
2197 the target precision saturate, i.e. values greater than
2198 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2199 are mapped to INT_MIN. These semantics are allowed by the
2200 C and C++ standards that simply state that the behavior of
2201 FP-to-integer conversion is unspecified upon overflow. */
2203 HOST_WIDE_INT high, low;
2204 REAL_VALUE_TYPE r;
2205 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2207 switch (code)
2209 case FIX_TRUNC_EXPR:
2210 real_trunc (&r, VOIDmode, &x);
2211 break;
2213 default:
2214 gcc_unreachable ();
2217 /* If R is NaN, return zero and show we have an overflow. */
2218 if (REAL_VALUE_ISNAN (r))
2220 overflow = 1;
2221 high = 0;
2222 low = 0;
2225 /* See if R is less than the lower bound or greater than the
2226 upper bound. */
2228 if (! overflow)
2230 tree lt = TYPE_MIN_VALUE (type);
2231 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2232 if (REAL_VALUES_LESS (r, l))
2234 overflow = 1;
2235 high = TREE_INT_CST_HIGH (lt);
2236 low = TREE_INT_CST_LOW (lt);
2240 if (! overflow)
2242 tree ut = TYPE_MAX_VALUE (type);
2243 if (ut)
2245 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2246 if (REAL_VALUES_LESS (u, r))
2248 overflow = 1;
2249 high = TREE_INT_CST_HIGH (ut);
2250 low = TREE_INT_CST_LOW (ut);
2255 if (! overflow)
2256 REAL_VALUE_TO_INT (&low, &high, r);
2258 t = force_fit_type_double (type, low, high, -1,
2259 overflow | TREE_OVERFLOW (arg1));
2260 return t;
2263 /* A subroutine of fold_convert_const handling conversions of a
2264 FIXED_CST to an integer type. */
2266 static tree
2267 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2269 tree t;
2270 double_int temp, temp_trunc;
2271 unsigned int mode;
2273 /* Right shift FIXED_CST to temp by fbit. */
2274 temp = TREE_FIXED_CST (arg1).data;
2275 mode = TREE_FIXED_CST (arg1).mode;
2276 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2278 lshift_double (temp.low, temp.high,
2279 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2280 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2282 /* Left shift temp to temp_trunc by fbit. */
2283 lshift_double (temp.low, temp.high,
2284 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2285 &temp_trunc.low, &temp_trunc.high,
2286 SIGNED_FIXED_POINT_MODE_P (mode));
2288 else
2290 temp.low = 0;
2291 temp.high = 0;
2292 temp_trunc.low = 0;
2293 temp_trunc.high = 0;
2296 /* If FIXED_CST is negative, we need to round the value toward 0.
2297 By checking if the fractional bits are not zero to add 1 to temp. */
2298 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2299 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2301 double_int one;
2302 one.low = 1;
2303 one.high = 0;
2304 temp = double_int_add (temp, one);
2307 /* Given a fixed-point constant, make new constant with new type,
2308 appropriately sign-extended or truncated. */
2309 t = force_fit_type_double (type, temp.low, temp.high, -1,
2310 (temp.high < 0
2311 && (TYPE_UNSIGNED (type)
2312 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2313 | TREE_OVERFLOW (arg1));
2315 return t;
2318 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2319 to another floating point type. */
2321 static tree
2322 fold_convert_const_real_from_real (tree type, const_tree arg1)
2324 REAL_VALUE_TYPE value;
2325 tree t;
2327 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2328 t = build_real (type, value);
2330 /* If converting an infinity or NAN to a representation that doesn't
2331 have one, set the overflow bit so that we can produce some kind of
2332 error message at the appropriate point if necessary. It's not the
2333 most user-friendly message, but it's better than nothing. */
2334 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2335 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2336 TREE_OVERFLOW (t) = 1;
2337 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2338 && !MODE_HAS_NANS (TYPE_MODE (type)))
2339 TREE_OVERFLOW (t) = 1;
2340 /* Regular overflow, conversion produced an infinity in a mode that
2341 can't represent them. */
2342 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2343 && REAL_VALUE_ISINF (value)
2344 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2345 TREE_OVERFLOW (t) = 1;
2346 else
2347 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2348 return t;
2351 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2352 to a floating point type. */
2354 static tree
2355 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2357 REAL_VALUE_TYPE value;
2358 tree t;
2360 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2361 t = build_real (type, value);
2363 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2364 return t;
2367 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2368 to another fixed-point type. */
2370 static tree
2371 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2373 FIXED_VALUE_TYPE value;
2374 tree t;
2375 bool overflow_p;
2377 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2378 TYPE_SATURATING (type));
2379 t = build_fixed (type, value);
2381 /* Propagate overflow flags. */
2382 if (overflow_p | TREE_OVERFLOW (arg1))
2383 TREE_OVERFLOW (t) = 1;
2384 return t;
2387 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2388 to a fixed-point type. */
2390 static tree
2391 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2393 FIXED_VALUE_TYPE value;
2394 tree t;
2395 bool overflow_p;
2397 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2398 TREE_INT_CST (arg1),
2399 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2400 TYPE_SATURATING (type));
2401 t = build_fixed (type, value);
2403 /* Propagate overflow flags. */
2404 if (overflow_p | TREE_OVERFLOW (arg1))
2405 TREE_OVERFLOW (t) = 1;
2406 return t;
2409 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2410 to a fixed-point type. */
2412 static tree
2413 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2415 FIXED_VALUE_TYPE value;
2416 tree t;
2417 bool overflow_p;
2419 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2420 &TREE_REAL_CST (arg1),
2421 TYPE_SATURATING (type));
2422 t = build_fixed (type, value);
2424 /* Propagate overflow flags. */
2425 if (overflow_p | TREE_OVERFLOW (arg1))
2426 TREE_OVERFLOW (t) = 1;
2427 return t;
2430 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2431 type TYPE. If no simplification can be done return NULL_TREE. */
2433 static tree
2434 fold_convert_const (enum tree_code code, tree type, tree arg1)
2436 if (TREE_TYPE (arg1) == type)
2437 return arg1;
2439 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2440 || TREE_CODE (type) == OFFSET_TYPE)
2442 if (TREE_CODE (arg1) == INTEGER_CST)
2443 return fold_convert_const_int_from_int (type, arg1);
2444 else if (TREE_CODE (arg1) == REAL_CST)
2445 return fold_convert_const_int_from_real (code, type, arg1);
2446 else if (TREE_CODE (arg1) == FIXED_CST)
2447 return fold_convert_const_int_from_fixed (type, arg1);
2449 else if (TREE_CODE (type) == REAL_TYPE)
2451 if (TREE_CODE (arg1) == INTEGER_CST)
2452 return build_real_from_int_cst (type, arg1);
2453 else if (TREE_CODE (arg1) == REAL_CST)
2454 return fold_convert_const_real_from_real (type, arg1);
2455 else if (TREE_CODE (arg1) == FIXED_CST)
2456 return fold_convert_const_real_from_fixed (type, arg1);
2458 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2460 if (TREE_CODE (arg1) == FIXED_CST)
2461 return fold_convert_const_fixed_from_fixed (type, arg1);
2462 else if (TREE_CODE (arg1) == INTEGER_CST)
2463 return fold_convert_const_fixed_from_int (type, arg1);
2464 else if (TREE_CODE (arg1) == REAL_CST)
2465 return fold_convert_const_fixed_from_real (type, arg1);
2467 return NULL_TREE;
2470 /* Construct a vector of zero elements of vector type TYPE. */
2472 static tree
2473 build_zero_vector (tree type)
2475 tree elem, list;
2476 int i, units;
2478 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2479 units = TYPE_VECTOR_SUBPARTS (type);
2481 list = NULL_TREE;
2482 for (i = 0; i < units; i++)
2483 list = tree_cons (NULL_TREE, elem, list);
2484 return build_vector (type, list);
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2489 bool
2490 fold_convertible_p (const_tree type, const_tree arg)
2492 tree orig = TREE_TYPE (arg);
2494 if (type == orig)
2495 return true;
2497 if (TREE_CODE (arg) == ERROR_MARK
2498 || TREE_CODE (type) == ERROR_MARK
2499 || TREE_CODE (orig) == ERROR_MARK)
2500 return false;
2502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2503 return true;
2505 switch (TREE_CODE (type))
2507 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2508 case POINTER_TYPE: case REFERENCE_TYPE:
2509 case OFFSET_TYPE:
2510 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2511 || TREE_CODE (orig) == OFFSET_TYPE)
2512 return true;
2513 return (TREE_CODE (orig) == VECTOR_TYPE
2514 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2516 case REAL_TYPE:
2517 case FIXED_POINT_TYPE:
2518 case COMPLEX_TYPE:
2519 case VECTOR_TYPE:
2520 case VOID_TYPE:
2521 return TREE_CODE (type) == TREE_CODE (orig);
2523 default:
2524 return false;
2528 /* Convert expression ARG to type TYPE. Used by the middle-end for
2529 simple conversions in preference to calling the front-end's convert. */
2531 tree
2532 fold_convert (tree type, tree arg)
2534 tree orig = TREE_TYPE (arg);
2535 tree tem;
2537 if (type == orig)
2538 return arg;
2540 if (TREE_CODE (arg) == ERROR_MARK
2541 || TREE_CODE (type) == ERROR_MARK
2542 || TREE_CODE (orig) == ERROR_MARK)
2543 return error_mark_node;
2545 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2546 return fold_build1 (NOP_EXPR, type, arg);
2548 switch (TREE_CODE (type))
2550 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2551 case POINTER_TYPE: case REFERENCE_TYPE:
2552 case OFFSET_TYPE:
2553 if (TREE_CODE (arg) == INTEGER_CST)
2555 tem = fold_convert_const (NOP_EXPR, type, arg);
2556 if (tem != NULL_TREE)
2557 return tem;
2559 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2560 || TREE_CODE (orig) == OFFSET_TYPE)
2561 return fold_build1 (NOP_EXPR, type, arg);
2562 if (TREE_CODE (orig) == COMPLEX_TYPE)
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2567 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2568 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2569 return fold_build1 (NOP_EXPR, type, arg);
2571 case REAL_TYPE:
2572 if (TREE_CODE (arg) == INTEGER_CST)
2574 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2575 if (tem != NULL_TREE)
2576 return tem;
2578 else if (TREE_CODE (arg) == REAL_CST)
2580 tem = fold_convert_const (NOP_EXPR, type, arg);
2581 if (tem != NULL_TREE)
2582 return tem;
2584 else if (TREE_CODE (arg) == FIXED_CST)
2586 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2587 if (tem != NULL_TREE)
2588 return tem;
2591 switch (TREE_CODE (orig))
2593 case INTEGER_TYPE:
2594 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2595 case POINTER_TYPE: case REFERENCE_TYPE:
2596 return fold_build1 (FLOAT_EXPR, type, arg);
2598 case REAL_TYPE:
2599 return fold_build1 (NOP_EXPR, type, arg);
2601 case FIXED_POINT_TYPE:
2602 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2604 case COMPLEX_TYPE:
2605 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2606 return fold_convert (type, tem);
2608 default:
2609 gcc_unreachable ();
2612 case FIXED_POINT_TYPE:
2613 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2614 || TREE_CODE (arg) == REAL_CST)
2616 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2617 if (tem != NULL_TREE)
2618 return tem;
2621 switch (TREE_CODE (orig))
2623 case FIXED_POINT_TYPE:
2624 case INTEGER_TYPE:
2625 case ENUMERAL_TYPE:
2626 case BOOLEAN_TYPE:
2627 case REAL_TYPE:
2628 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2630 case COMPLEX_TYPE:
2631 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2632 return fold_convert (type, tem);
2634 default:
2635 gcc_unreachable ();
2638 case COMPLEX_TYPE:
2639 switch (TREE_CODE (orig))
2641 case INTEGER_TYPE:
2642 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2643 case POINTER_TYPE: case REFERENCE_TYPE:
2644 case REAL_TYPE:
2645 case FIXED_POINT_TYPE:
2646 return fold_build2 (COMPLEX_EXPR, type,
2647 fold_convert (TREE_TYPE (type), arg),
2648 fold_convert (TREE_TYPE (type),
2649 integer_zero_node));
2650 case COMPLEX_TYPE:
2652 tree rpart, ipart;
2654 if (TREE_CODE (arg) == COMPLEX_EXPR)
2656 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2657 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2658 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2661 arg = save_expr (arg);
2662 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2663 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2664 rpart = fold_convert (TREE_TYPE (type), rpart);
2665 ipart = fold_convert (TREE_TYPE (type), ipart);
2666 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2669 default:
2670 gcc_unreachable ();
2673 case VECTOR_TYPE:
2674 if (integer_zerop (arg))
2675 return build_zero_vector (type);
2676 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2677 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2678 || TREE_CODE (orig) == VECTOR_TYPE);
2679 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2681 case VOID_TYPE:
2682 tem = fold_ignored_result (arg);
2683 if (TREE_CODE (tem) == MODIFY_EXPR)
2684 return tem;
2685 return fold_build1 (NOP_EXPR, type, tem);
2687 default:
2688 gcc_unreachable ();
2692 /* Return false if expr can be assumed not to be an lvalue, true
2693 otherwise. */
2695 static bool
2696 maybe_lvalue_p (const_tree x)
2698 /* We only need to wrap lvalue tree codes. */
2699 switch (TREE_CODE (x))
2701 case VAR_DECL:
2702 case PARM_DECL:
2703 case RESULT_DECL:
2704 case LABEL_DECL:
2705 case FUNCTION_DECL:
2706 case SSA_NAME:
2708 case COMPONENT_REF:
2709 case INDIRECT_REF:
2710 case ALIGN_INDIRECT_REF:
2711 case MISALIGNED_INDIRECT_REF:
2712 case ARRAY_REF:
2713 case ARRAY_RANGE_REF:
2714 case BIT_FIELD_REF:
2715 case OBJ_TYPE_REF:
2717 case REALPART_EXPR:
2718 case IMAGPART_EXPR:
2719 case PREINCREMENT_EXPR:
2720 case PREDECREMENT_EXPR:
2721 case SAVE_EXPR:
2722 case TRY_CATCH_EXPR:
2723 case WITH_CLEANUP_EXPR:
2724 case COMPOUND_EXPR:
2725 case MODIFY_EXPR:
2726 case TARGET_EXPR:
2727 case COND_EXPR:
2728 case BIND_EXPR:
2729 case MIN_EXPR:
2730 case MAX_EXPR:
2731 break;
2733 default:
2734 /* Assume the worst for front-end tree codes. */
2735 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2736 break;
2737 return false;
2740 return true;
2743 /* Return an expr equal to X but certainly not valid as an lvalue. */
2745 tree
2746 non_lvalue (tree x)
2748 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2749 us. */
2750 if (in_gimple_form)
2751 return x;
2753 if (! maybe_lvalue_p (x))
2754 return x;
2755 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2758 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2759 Zero means allow extended lvalues. */
2761 int pedantic_lvalues;
2763 /* When pedantic, return an expr equal to X but certainly not valid as a
2764 pedantic lvalue. Otherwise, return X. */
2766 static tree
2767 pedantic_non_lvalue (tree x)
2769 if (pedantic_lvalues)
2770 return non_lvalue (x);
2771 else
2772 return x;
2775 /* Given a tree comparison code, return the code that is the logical inverse
2776 of the given code. It is not safe to do this for floating-point
2777 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2778 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2780 enum tree_code
2781 invert_tree_comparison (enum tree_code code, bool honor_nans)
2783 if (honor_nans && flag_trapping_math)
2784 return ERROR_MARK;
2786 switch (code)
2788 case EQ_EXPR:
2789 return NE_EXPR;
2790 case NE_EXPR:
2791 return EQ_EXPR;
2792 case GT_EXPR:
2793 return honor_nans ? UNLE_EXPR : LE_EXPR;
2794 case GE_EXPR:
2795 return honor_nans ? UNLT_EXPR : LT_EXPR;
2796 case LT_EXPR:
2797 return honor_nans ? UNGE_EXPR : GE_EXPR;
2798 case LE_EXPR:
2799 return honor_nans ? UNGT_EXPR : GT_EXPR;
2800 case LTGT_EXPR:
2801 return UNEQ_EXPR;
2802 case UNEQ_EXPR:
2803 return LTGT_EXPR;
2804 case UNGT_EXPR:
2805 return LE_EXPR;
2806 case UNGE_EXPR:
2807 return LT_EXPR;
2808 case UNLT_EXPR:
2809 return GE_EXPR;
2810 case UNLE_EXPR:
2811 return GT_EXPR;
2812 case ORDERED_EXPR:
2813 return UNORDERED_EXPR;
2814 case UNORDERED_EXPR:
2815 return ORDERED_EXPR;
2816 default:
2817 gcc_unreachable ();
2821 /* Similar, but return the comparison that results if the operands are
2822 swapped. This is safe for floating-point. */
2824 enum tree_code
2825 swap_tree_comparison (enum tree_code code)
2827 switch (code)
2829 case EQ_EXPR:
2830 case NE_EXPR:
2831 case ORDERED_EXPR:
2832 case UNORDERED_EXPR:
2833 case LTGT_EXPR:
2834 case UNEQ_EXPR:
2835 return code;
2836 case GT_EXPR:
2837 return LT_EXPR;
2838 case GE_EXPR:
2839 return LE_EXPR;
2840 case LT_EXPR:
2841 return GT_EXPR;
2842 case LE_EXPR:
2843 return GE_EXPR;
2844 case UNGT_EXPR:
2845 return UNLT_EXPR;
2846 case UNGE_EXPR:
2847 return UNLE_EXPR;
2848 case UNLT_EXPR:
2849 return UNGT_EXPR;
2850 case UNLE_EXPR:
2851 return UNGE_EXPR;
2852 default:
2853 gcc_unreachable ();
2858 /* Convert a comparison tree code from an enum tree_code representation
2859 into a compcode bit-based encoding. This function is the inverse of
2860 compcode_to_comparison. */
2862 static enum comparison_code
2863 comparison_to_compcode (enum tree_code code)
2865 switch (code)
2867 case LT_EXPR:
2868 return COMPCODE_LT;
2869 case EQ_EXPR:
2870 return COMPCODE_EQ;
2871 case LE_EXPR:
2872 return COMPCODE_LE;
2873 case GT_EXPR:
2874 return COMPCODE_GT;
2875 case NE_EXPR:
2876 return COMPCODE_NE;
2877 case GE_EXPR:
2878 return COMPCODE_GE;
2879 case ORDERED_EXPR:
2880 return COMPCODE_ORD;
2881 case UNORDERED_EXPR:
2882 return COMPCODE_UNORD;
2883 case UNLT_EXPR:
2884 return COMPCODE_UNLT;
2885 case UNEQ_EXPR:
2886 return COMPCODE_UNEQ;
2887 case UNLE_EXPR:
2888 return COMPCODE_UNLE;
2889 case UNGT_EXPR:
2890 return COMPCODE_UNGT;
2891 case LTGT_EXPR:
2892 return COMPCODE_LTGT;
2893 case UNGE_EXPR:
2894 return COMPCODE_UNGE;
2895 default:
2896 gcc_unreachable ();
2900 /* Convert a compcode bit-based encoding of a comparison operator back
2901 to GCC's enum tree_code representation. This function is the
2902 inverse of comparison_to_compcode. */
2904 static enum tree_code
2905 compcode_to_comparison (enum comparison_code code)
2907 switch (code)
2909 case COMPCODE_LT:
2910 return LT_EXPR;
2911 case COMPCODE_EQ:
2912 return EQ_EXPR;
2913 case COMPCODE_LE:
2914 return LE_EXPR;
2915 case COMPCODE_GT:
2916 return GT_EXPR;
2917 case COMPCODE_NE:
2918 return NE_EXPR;
2919 case COMPCODE_GE:
2920 return GE_EXPR;
2921 case COMPCODE_ORD:
2922 return ORDERED_EXPR;
2923 case COMPCODE_UNORD:
2924 return UNORDERED_EXPR;
2925 case COMPCODE_UNLT:
2926 return UNLT_EXPR;
2927 case COMPCODE_UNEQ:
2928 return UNEQ_EXPR;
2929 case COMPCODE_UNLE:
2930 return UNLE_EXPR;
2931 case COMPCODE_UNGT:
2932 return UNGT_EXPR;
2933 case COMPCODE_LTGT:
2934 return LTGT_EXPR;
2935 case COMPCODE_UNGE:
2936 return UNGE_EXPR;
2937 default:
2938 gcc_unreachable ();
2942 /* Return a tree for the comparison which is the combination of
2943 doing the AND or OR (depending on CODE) of the two operations LCODE
2944 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2945 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2946 if this makes the transformation invalid. */
2948 tree
2949 combine_comparisons (enum tree_code code, enum tree_code lcode,
2950 enum tree_code rcode, tree truth_type,
2951 tree ll_arg, tree lr_arg)
2953 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2954 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2955 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2956 int compcode;
2958 switch (code)
2960 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2961 compcode = lcompcode & rcompcode;
2962 break;
2964 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2965 compcode = lcompcode | rcompcode;
2966 break;
2968 default:
2969 return NULL_TREE;
2972 if (!honor_nans)
2974 /* Eliminate unordered comparisons, as well as LTGT and ORD
2975 which are not used unless the mode has NaNs. */
2976 compcode &= ~COMPCODE_UNORD;
2977 if (compcode == COMPCODE_LTGT)
2978 compcode = COMPCODE_NE;
2979 else if (compcode == COMPCODE_ORD)
2980 compcode = COMPCODE_TRUE;
2982 else if (flag_trapping_math)
2984 /* Check that the original operation and the optimized ones will trap
2985 under the same condition. */
2986 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2987 && (lcompcode != COMPCODE_EQ)
2988 && (lcompcode != COMPCODE_ORD);
2989 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2990 && (rcompcode != COMPCODE_EQ)
2991 && (rcompcode != COMPCODE_ORD);
2992 bool trap = (compcode & COMPCODE_UNORD) == 0
2993 && (compcode != COMPCODE_EQ)
2994 && (compcode != COMPCODE_ORD);
2996 /* In a short-circuited boolean expression the LHS might be
2997 such that the RHS, if evaluated, will never trap. For
2998 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2999 if neither x nor y is NaN. (This is a mixed blessing: for
3000 example, the expression above will never trap, hence
3001 optimizing it to x < y would be invalid). */
3002 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3003 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3004 rtrap = false;
3006 /* If the comparison was short-circuited, and only the RHS
3007 trapped, we may now generate a spurious trap. */
3008 if (rtrap && !ltrap
3009 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3010 return NULL_TREE;
3012 /* If we changed the conditions that cause a trap, we lose. */
3013 if ((ltrap || rtrap) != trap)
3014 return NULL_TREE;
3017 if (compcode == COMPCODE_TRUE)
3018 return constant_boolean_node (true, truth_type);
3019 else if (compcode == COMPCODE_FALSE)
3020 return constant_boolean_node (false, truth_type);
3021 else
3023 enum tree_code tcode;
3025 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3026 return fold_build2 (tcode, truth_type, ll_arg, lr_arg);
3030 /* Return nonzero if two operands (typically of the same tree node)
3031 are necessarily equal. If either argument has side-effects this
3032 function returns zero. FLAGS modifies behavior as follows:
3034 If OEP_ONLY_CONST is set, only return nonzero for constants.
3035 This function tests whether the operands are indistinguishable;
3036 it does not test whether they are equal using C's == operation.
3037 The distinction is important for IEEE floating point, because
3038 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3039 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3041 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3042 even though it may hold multiple values during a function.
3043 This is because a GCC tree node guarantees that nothing else is
3044 executed between the evaluation of its "operands" (which may often
3045 be evaluated in arbitrary order). Hence if the operands themselves
3046 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3047 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3048 unset means assuming isochronic (or instantaneous) tree equivalence.
3049 Unless comparing arbitrary expression trees, such as from different
3050 statements, this flag can usually be left unset.
3052 If OEP_PURE_SAME is set, then pure functions with identical arguments
3053 are considered the same. It is used when the caller has other ways
3054 to ensure that global memory is unchanged in between. */
3057 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3059 /* If either is ERROR_MARK, they aren't equal. */
3060 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3061 return 0;
3063 /* Check equality of integer constants before bailing out due to
3064 precision differences. */
3065 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3066 return tree_int_cst_equal (arg0, arg1);
3068 /* If both types don't have the same signedness, then we can't consider
3069 them equal. We must check this before the STRIP_NOPS calls
3070 because they may change the signedness of the arguments. As pointers
3071 strictly don't have a signedness, require either two pointers or
3072 two non-pointers as well. */
3073 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3074 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3075 return 0;
3077 /* If both types don't have the same precision, then it is not safe
3078 to strip NOPs. */
3079 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3080 return 0;
3082 STRIP_NOPS (arg0);
3083 STRIP_NOPS (arg1);
3085 /* In case both args are comparisons but with different comparison
3086 code, try to swap the comparison operands of one arg to produce
3087 a match and compare that variant. */
3088 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3089 && COMPARISON_CLASS_P (arg0)
3090 && COMPARISON_CLASS_P (arg1))
3092 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3094 if (TREE_CODE (arg0) == swap_code)
3095 return operand_equal_p (TREE_OPERAND (arg0, 0),
3096 TREE_OPERAND (arg1, 1), flags)
3097 && operand_equal_p (TREE_OPERAND (arg0, 1),
3098 TREE_OPERAND (arg1, 0), flags);
3101 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3102 /* This is needed for conversions and for COMPONENT_REF.
3103 Might as well play it safe and always test this. */
3104 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3105 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3106 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3107 return 0;
3109 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3110 We don't care about side effects in that case because the SAVE_EXPR
3111 takes care of that for us. In all other cases, two expressions are
3112 equal if they have no side effects. If we have two identical
3113 expressions with side effects that should be treated the same due
3114 to the only side effects being identical SAVE_EXPR's, that will
3115 be detected in the recursive calls below. */
3116 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3117 && (TREE_CODE (arg0) == SAVE_EXPR
3118 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3119 return 1;
3121 /* Next handle constant cases, those for which we can return 1 even
3122 if ONLY_CONST is set. */
3123 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3124 switch (TREE_CODE (arg0))
3126 case INTEGER_CST:
3127 return tree_int_cst_equal (arg0, arg1);
3129 case FIXED_CST:
3130 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3131 TREE_FIXED_CST (arg1));
3133 case REAL_CST:
3134 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3135 TREE_REAL_CST (arg1)))
3136 return 1;
3139 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3141 /* If we do not distinguish between signed and unsigned zero,
3142 consider them equal. */
3143 if (real_zerop (arg0) && real_zerop (arg1))
3144 return 1;
3146 return 0;
3148 case VECTOR_CST:
3150 tree v1, v2;
3152 v1 = TREE_VECTOR_CST_ELTS (arg0);
3153 v2 = TREE_VECTOR_CST_ELTS (arg1);
3154 while (v1 && v2)
3156 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3157 flags))
3158 return 0;
3159 v1 = TREE_CHAIN (v1);
3160 v2 = TREE_CHAIN (v2);
3163 return v1 == v2;
3166 case COMPLEX_CST:
3167 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3168 flags)
3169 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3170 flags));
3172 case STRING_CST:
3173 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3174 && ! memcmp (TREE_STRING_POINTER (arg0),
3175 TREE_STRING_POINTER (arg1),
3176 TREE_STRING_LENGTH (arg0)));
3178 case ADDR_EXPR:
3179 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3181 default:
3182 break;
3185 if (flags & OEP_ONLY_CONST)
3186 return 0;
3188 /* Define macros to test an operand from arg0 and arg1 for equality and a
3189 variant that allows null and views null as being different from any
3190 non-null value. In the latter case, if either is null, the both
3191 must be; otherwise, do the normal comparison. */
3192 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3193 TREE_OPERAND (arg1, N), flags)
3195 #define OP_SAME_WITH_NULL(N) \
3196 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3197 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3199 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3201 case tcc_unary:
3202 /* Two conversions are equal only if signedness and modes match. */
3203 switch (TREE_CODE (arg0))
3205 CASE_CONVERT:
3206 case FIX_TRUNC_EXPR:
3207 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3208 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3209 return 0;
3210 break;
3211 default:
3212 break;
3215 return OP_SAME (0);
3218 case tcc_comparison:
3219 case tcc_binary:
3220 if (OP_SAME (0) && OP_SAME (1))
3221 return 1;
3223 /* For commutative ops, allow the other order. */
3224 return (commutative_tree_code (TREE_CODE (arg0))
3225 && operand_equal_p (TREE_OPERAND (arg0, 0),
3226 TREE_OPERAND (arg1, 1), flags)
3227 && operand_equal_p (TREE_OPERAND (arg0, 1),
3228 TREE_OPERAND (arg1, 0), flags));
3230 case tcc_reference:
3231 /* If either of the pointer (or reference) expressions we are
3232 dereferencing contain a side effect, these cannot be equal. */
3233 if (TREE_SIDE_EFFECTS (arg0)
3234 || TREE_SIDE_EFFECTS (arg1))
3235 return 0;
3237 switch (TREE_CODE (arg0))
3239 case INDIRECT_REF:
3240 case ALIGN_INDIRECT_REF:
3241 case MISALIGNED_INDIRECT_REF:
3242 case REALPART_EXPR:
3243 case IMAGPART_EXPR:
3244 return OP_SAME (0);
3246 case ARRAY_REF:
3247 case ARRAY_RANGE_REF:
3248 /* Operands 2 and 3 may be null.
3249 Compare the array index by value if it is constant first as we
3250 may have different types but same value here. */
3251 return (OP_SAME (0)
3252 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3253 TREE_OPERAND (arg1, 1))
3254 || OP_SAME (1))
3255 && OP_SAME_WITH_NULL (2)
3256 && OP_SAME_WITH_NULL (3));
3258 case COMPONENT_REF:
3259 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3260 may be NULL when we're called to compare MEM_EXPRs. */
3261 return OP_SAME_WITH_NULL (0)
3262 && OP_SAME (1)
3263 && OP_SAME_WITH_NULL (2);
3265 case BIT_FIELD_REF:
3266 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3268 default:
3269 return 0;
3272 case tcc_expression:
3273 switch (TREE_CODE (arg0))
3275 case ADDR_EXPR:
3276 case TRUTH_NOT_EXPR:
3277 return OP_SAME (0);
3279 case TRUTH_ANDIF_EXPR:
3280 case TRUTH_ORIF_EXPR:
3281 return OP_SAME (0) && OP_SAME (1);
3283 case TRUTH_AND_EXPR:
3284 case TRUTH_OR_EXPR:
3285 case TRUTH_XOR_EXPR:
3286 if (OP_SAME (0) && OP_SAME (1))
3287 return 1;
3289 /* Otherwise take into account this is a commutative operation. */
3290 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3291 TREE_OPERAND (arg1, 1), flags)
3292 && operand_equal_p (TREE_OPERAND (arg0, 1),
3293 TREE_OPERAND (arg1, 0), flags));
3295 case COND_EXPR:
3296 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3298 default:
3299 return 0;
3302 case tcc_vl_exp:
3303 switch (TREE_CODE (arg0))
3305 case CALL_EXPR:
3306 /* If the CALL_EXPRs call different functions, then they
3307 clearly can not be equal. */
3308 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3309 flags))
3310 return 0;
3313 unsigned int cef = call_expr_flags (arg0);
3314 if (flags & OEP_PURE_SAME)
3315 cef &= ECF_CONST | ECF_PURE;
3316 else
3317 cef &= ECF_CONST;
3318 if (!cef)
3319 return 0;
3322 /* Now see if all the arguments are the same. */
3324 const_call_expr_arg_iterator iter0, iter1;
3325 const_tree a0, a1;
3326 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3327 a1 = first_const_call_expr_arg (arg1, &iter1);
3328 a0 && a1;
3329 a0 = next_const_call_expr_arg (&iter0),
3330 a1 = next_const_call_expr_arg (&iter1))
3331 if (! operand_equal_p (a0, a1, flags))
3332 return 0;
3334 /* If we get here and both argument lists are exhausted
3335 then the CALL_EXPRs are equal. */
3336 return ! (a0 || a1);
3338 default:
3339 return 0;
3342 case tcc_declaration:
3343 /* Consider __builtin_sqrt equal to sqrt. */
3344 return (TREE_CODE (arg0) == FUNCTION_DECL
3345 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3346 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3347 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3349 default:
3350 return 0;
3353 #undef OP_SAME
3354 #undef OP_SAME_WITH_NULL
3357 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3358 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3360 When in doubt, return 0. */
3362 static int
3363 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3365 int unsignedp1, unsignedpo;
3366 tree primarg0, primarg1, primother;
3367 unsigned int correct_width;
3369 if (operand_equal_p (arg0, arg1, 0))
3370 return 1;
3372 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3373 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3374 return 0;
3376 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3377 and see if the inner values are the same. This removes any
3378 signedness comparison, which doesn't matter here. */
3379 primarg0 = arg0, primarg1 = arg1;
3380 STRIP_NOPS (primarg0);
3381 STRIP_NOPS (primarg1);
3382 if (operand_equal_p (primarg0, primarg1, 0))
3383 return 1;
3385 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3386 actual comparison operand, ARG0.
3388 First throw away any conversions to wider types
3389 already present in the operands. */
3391 primarg1 = get_narrower (arg1, &unsignedp1);
3392 primother = get_narrower (other, &unsignedpo);
3394 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3395 if (unsignedp1 == unsignedpo
3396 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3397 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3399 tree type = TREE_TYPE (arg0);
3401 /* Make sure shorter operand is extended the right way
3402 to match the longer operand. */
3403 primarg1 = fold_convert (signed_or_unsigned_type_for
3404 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3406 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3407 return 1;
3410 return 0;
3413 /* See if ARG is an expression that is either a comparison or is performing
3414 arithmetic on comparisons. The comparisons must only be comparing
3415 two different values, which will be stored in *CVAL1 and *CVAL2; if
3416 they are nonzero it means that some operands have already been found.
3417 No variables may be used anywhere else in the expression except in the
3418 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3419 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3421 If this is true, return 1. Otherwise, return zero. */
3423 static int
3424 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3426 enum tree_code code = TREE_CODE (arg);
3427 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3429 /* We can handle some of the tcc_expression cases here. */
3430 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3431 tclass = tcc_unary;
3432 else if (tclass == tcc_expression
3433 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3434 || code == COMPOUND_EXPR))
3435 tclass = tcc_binary;
3437 else if (tclass == tcc_expression && code == SAVE_EXPR
3438 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3440 /* If we've already found a CVAL1 or CVAL2, this expression is
3441 two complex to handle. */
3442 if (*cval1 || *cval2)
3443 return 0;
3445 tclass = tcc_unary;
3446 *save_p = 1;
3449 switch (tclass)
3451 case tcc_unary:
3452 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3454 case tcc_binary:
3455 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3456 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3457 cval1, cval2, save_p));
3459 case tcc_constant:
3460 return 1;
3462 case tcc_expression:
3463 if (code == COND_EXPR)
3464 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3465 cval1, cval2, save_p)
3466 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3467 cval1, cval2, save_p)
3468 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3469 cval1, cval2, save_p));
3470 return 0;
3472 case tcc_comparison:
3473 /* First see if we can handle the first operand, then the second. For
3474 the second operand, we know *CVAL1 can't be zero. It must be that
3475 one side of the comparison is each of the values; test for the
3476 case where this isn't true by failing if the two operands
3477 are the same. */
3479 if (operand_equal_p (TREE_OPERAND (arg, 0),
3480 TREE_OPERAND (arg, 1), 0))
3481 return 0;
3483 if (*cval1 == 0)
3484 *cval1 = TREE_OPERAND (arg, 0);
3485 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3487 else if (*cval2 == 0)
3488 *cval2 = TREE_OPERAND (arg, 0);
3489 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3491 else
3492 return 0;
3494 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3496 else if (*cval2 == 0)
3497 *cval2 = TREE_OPERAND (arg, 1);
3498 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3500 else
3501 return 0;
3503 return 1;
3505 default:
3506 return 0;
3510 /* ARG is a tree that is known to contain just arithmetic operations and
3511 comparisons. Evaluate the operations in the tree substituting NEW0 for
3512 any occurrence of OLD0 as an operand of a comparison and likewise for
3513 NEW1 and OLD1. */
3515 static tree
3516 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3518 tree type = TREE_TYPE (arg);
3519 enum tree_code code = TREE_CODE (arg);
3520 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3522 /* We can handle some of the tcc_expression cases here. */
3523 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3524 tclass = tcc_unary;
3525 else if (tclass == tcc_expression
3526 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3527 tclass = tcc_binary;
3529 switch (tclass)
3531 case tcc_unary:
3532 return fold_build1 (code, type,
3533 eval_subst (TREE_OPERAND (arg, 0),
3534 old0, new0, old1, new1));
3536 case tcc_binary:
3537 return fold_build2 (code, type,
3538 eval_subst (TREE_OPERAND (arg, 0),
3539 old0, new0, old1, new1),
3540 eval_subst (TREE_OPERAND (arg, 1),
3541 old0, new0, old1, new1));
3543 case tcc_expression:
3544 switch (code)
3546 case SAVE_EXPR:
3547 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3549 case COMPOUND_EXPR:
3550 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3552 case COND_EXPR:
3553 return fold_build3 (code, type,
3554 eval_subst (TREE_OPERAND (arg, 0),
3555 old0, new0, old1, new1),
3556 eval_subst (TREE_OPERAND (arg, 1),
3557 old0, new0, old1, new1),
3558 eval_subst (TREE_OPERAND (arg, 2),
3559 old0, new0, old1, new1));
3560 default:
3561 break;
3563 /* Fall through - ??? */
3565 case tcc_comparison:
3567 tree arg0 = TREE_OPERAND (arg, 0);
3568 tree arg1 = TREE_OPERAND (arg, 1);
3570 /* We need to check both for exact equality and tree equality. The
3571 former will be true if the operand has a side-effect. In that
3572 case, we know the operand occurred exactly once. */
3574 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3575 arg0 = new0;
3576 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3577 arg0 = new1;
3579 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3580 arg1 = new0;
3581 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3582 arg1 = new1;
3584 return fold_build2 (code, type, arg0, arg1);
3587 default:
3588 return arg;
3592 /* Return a tree for the case when the result of an expression is RESULT
3593 converted to TYPE and OMITTED was previously an operand of the expression
3594 but is now not needed (e.g., we folded OMITTED * 0).
3596 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3597 the conversion of RESULT to TYPE. */
3599 tree
3600 omit_one_operand (tree type, tree result, tree omitted)
3602 tree t = fold_convert (type, result);
3604 /* If the resulting operand is an empty statement, just return the omitted
3605 statement casted to void. */
3606 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3607 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3609 if (TREE_SIDE_EFFECTS (omitted))
3610 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3612 return non_lvalue (t);
3615 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3617 static tree
3618 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3620 tree t = fold_convert (type, result);
3622 /* If the resulting operand is an empty statement, just return the omitted
3623 statement casted to void. */
3624 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3625 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3627 if (TREE_SIDE_EFFECTS (omitted))
3628 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3630 return pedantic_non_lvalue (t);
3633 /* Return a tree for the case when the result of an expression is RESULT
3634 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3635 of the expression but are now not needed.
3637 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3638 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3639 evaluated before OMITTED2. Otherwise, if neither has side effects,
3640 just do the conversion of RESULT to TYPE. */
3642 tree
3643 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3645 tree t = fold_convert (type, result);
3647 if (TREE_SIDE_EFFECTS (omitted2))
3648 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3649 if (TREE_SIDE_EFFECTS (omitted1))
3650 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3652 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3656 /* Return a simplified tree node for the truth-negation of ARG. This
3657 never alters ARG itself. We assume that ARG is an operation that
3658 returns a truth value (0 or 1).
3660 FIXME: one would think we would fold the result, but it causes
3661 problems with the dominator optimizer. */
3663 tree
3664 fold_truth_not_expr (tree arg)
3666 tree t, type = TREE_TYPE (arg);
3667 enum tree_code code = TREE_CODE (arg);
3669 /* If this is a comparison, we can simply invert it, except for
3670 floating-point non-equality comparisons, in which case we just
3671 enclose a TRUTH_NOT_EXPR around what we have. */
3673 if (TREE_CODE_CLASS (code) == tcc_comparison)
3675 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3676 if (FLOAT_TYPE_P (op_type)
3677 && flag_trapping_math
3678 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3679 && code != NE_EXPR && code != EQ_EXPR)
3680 return NULL_TREE;
3682 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3683 if (code == ERROR_MARK)
3684 return NULL_TREE;
3686 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3687 if (EXPR_HAS_LOCATION (arg))
3688 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3689 return t;
3692 switch (code)
3694 case INTEGER_CST:
3695 return constant_boolean_node (integer_zerop (arg), type);
3697 case TRUTH_AND_EXPR:
3698 t = build2 (TRUTH_OR_EXPR, type,
3699 invert_truthvalue (TREE_OPERAND (arg, 0)),
3700 invert_truthvalue (TREE_OPERAND (arg, 1)));
3701 break;
3703 case TRUTH_OR_EXPR:
3704 t = build2 (TRUTH_AND_EXPR, type,
3705 invert_truthvalue (TREE_OPERAND (arg, 0)),
3706 invert_truthvalue (TREE_OPERAND (arg, 1)));
3707 break;
3709 case TRUTH_XOR_EXPR:
3710 /* Here we can invert either operand. We invert the first operand
3711 unless the second operand is a TRUTH_NOT_EXPR in which case our
3712 result is the XOR of the first operand with the inside of the
3713 negation of the second operand. */
3715 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3716 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3717 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3718 else
3719 t = build2 (TRUTH_XOR_EXPR, type,
3720 invert_truthvalue (TREE_OPERAND (arg, 0)),
3721 TREE_OPERAND (arg, 1));
3722 break;
3724 case TRUTH_ANDIF_EXPR:
3725 t = build2 (TRUTH_ORIF_EXPR, type,
3726 invert_truthvalue (TREE_OPERAND (arg, 0)),
3727 invert_truthvalue (TREE_OPERAND (arg, 1)));
3728 break;
3730 case TRUTH_ORIF_EXPR:
3731 t = build2 (TRUTH_ANDIF_EXPR, type,
3732 invert_truthvalue (TREE_OPERAND (arg, 0)),
3733 invert_truthvalue (TREE_OPERAND (arg, 1)));
3734 break;
3736 case TRUTH_NOT_EXPR:
3737 return TREE_OPERAND (arg, 0);
3739 case COND_EXPR:
3741 tree arg1 = TREE_OPERAND (arg, 1);
3742 tree arg2 = TREE_OPERAND (arg, 2);
3743 /* A COND_EXPR may have a throw as one operand, which
3744 then has void type. Just leave void operands
3745 as they are. */
3746 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3747 VOID_TYPE_P (TREE_TYPE (arg1))
3748 ? arg1 : invert_truthvalue (arg1),
3749 VOID_TYPE_P (TREE_TYPE (arg2))
3750 ? arg2 : invert_truthvalue (arg2));
3751 break;
3754 case COMPOUND_EXPR:
3755 t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3756 invert_truthvalue (TREE_OPERAND (arg, 1)));
3757 break;
3759 case NON_LVALUE_EXPR:
3760 return invert_truthvalue (TREE_OPERAND (arg, 0));
3762 CASE_CONVERT:
3763 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3765 t = build1 (TRUTH_NOT_EXPR, type, arg);
3766 break;
3769 /* ... fall through ... */
3771 case FLOAT_EXPR:
3772 t = build1 (TREE_CODE (arg), type,
3773 invert_truthvalue (TREE_OPERAND (arg, 0)));
3774 break;
3776 case BIT_AND_EXPR:
3777 if (!integer_onep (TREE_OPERAND (arg, 1)))
3778 return NULL_TREE;
3779 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3780 break;
3782 case SAVE_EXPR:
3783 t = build1 (TRUTH_NOT_EXPR, type, arg);
3784 break;
3786 case CLEANUP_POINT_EXPR:
3787 t = build1 (CLEANUP_POINT_EXPR, type,
3788 invert_truthvalue (TREE_OPERAND (arg, 0)));
3789 break;
3791 default:
3792 t = NULL_TREE;
3793 break;
3796 if (t && EXPR_HAS_LOCATION (arg))
3797 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3799 return t;
3802 /* Return a simplified tree node for the truth-negation of ARG. This
3803 never alters ARG itself. We assume that ARG is an operation that
3804 returns a truth value (0 or 1).
3806 FIXME: one would think we would fold the result, but it causes
3807 problems with the dominator optimizer. */
3809 tree
3810 invert_truthvalue (tree arg)
3812 tree tem;
3814 if (TREE_CODE (arg) == ERROR_MARK)
3815 return arg;
3817 tem = fold_truth_not_expr (arg);
3818 if (!tem)
3819 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3821 return tem;
3824 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3825 operands are another bit-wise operation with a common input. If so,
3826 distribute the bit operations to save an operation and possibly two if
3827 constants are involved. For example, convert
3828 (A | B) & (A | C) into A | (B & C)
3829 Further simplification will occur if B and C are constants.
3831 If this optimization cannot be done, 0 will be returned. */
3833 static tree
3834 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3836 tree common;
3837 tree left, right;
3839 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3840 || TREE_CODE (arg0) == code
3841 || (TREE_CODE (arg0) != BIT_AND_EXPR
3842 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3843 return 0;
3845 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3847 common = TREE_OPERAND (arg0, 0);
3848 left = TREE_OPERAND (arg0, 1);
3849 right = TREE_OPERAND (arg1, 1);
3851 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3853 common = TREE_OPERAND (arg0, 0);
3854 left = TREE_OPERAND (arg0, 1);
3855 right = TREE_OPERAND (arg1, 0);
3857 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3859 common = TREE_OPERAND (arg0, 1);
3860 left = TREE_OPERAND (arg0, 0);
3861 right = TREE_OPERAND (arg1, 1);
3863 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3865 common = TREE_OPERAND (arg0, 1);
3866 left = TREE_OPERAND (arg0, 0);
3867 right = TREE_OPERAND (arg1, 0);
3869 else
3870 return 0;
3872 common = fold_convert (type, common);
3873 left = fold_convert (type, left);
3874 right = fold_convert (type, right);
3875 return fold_build2 (TREE_CODE (arg0), type, common,
3876 fold_build2 (code, type, left, right));
3879 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3880 with code CODE. This optimization is unsafe. */
3881 static tree
3882 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3884 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3885 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3887 /* (A / C) +- (B / C) -> (A +- B) / C. */
3888 if (mul0 == mul1
3889 && operand_equal_p (TREE_OPERAND (arg0, 1),
3890 TREE_OPERAND (arg1, 1), 0))
3891 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3892 fold_build2 (code, type,
3893 TREE_OPERAND (arg0, 0),
3894 TREE_OPERAND (arg1, 0)),
3895 TREE_OPERAND (arg0, 1));
3897 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3898 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3899 TREE_OPERAND (arg1, 0), 0)
3900 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3901 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3903 REAL_VALUE_TYPE r0, r1;
3904 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3905 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3906 if (!mul0)
3907 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3908 if (!mul1)
3909 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3910 real_arithmetic (&r0, code, &r0, &r1);
3911 return fold_build2 (MULT_EXPR, type,
3912 TREE_OPERAND (arg0, 0),
3913 build_real (type, r0));
3916 return NULL_TREE;
3919 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3920 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3922 static tree
3923 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3924 HOST_WIDE_INT bitpos, int unsignedp)
3926 tree result, bftype;
3928 if (bitpos == 0)
3930 tree size = TYPE_SIZE (TREE_TYPE (inner));
3931 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3932 || POINTER_TYPE_P (TREE_TYPE (inner)))
3933 && host_integerp (size, 0)
3934 && tree_low_cst (size, 0) == bitsize)
3935 return fold_convert (type, inner);
3938 bftype = type;
3939 if (TYPE_PRECISION (bftype) != bitsize
3940 || TYPE_UNSIGNED (bftype) == !unsignedp)
3941 bftype = build_nonstandard_integer_type (bitsize, 0);
3943 result = build3 (BIT_FIELD_REF, bftype, inner,
3944 size_int (bitsize), bitsize_int (bitpos));
3946 if (bftype != type)
3947 result = fold_convert (type, result);
3949 return result;
3952 /* Optimize a bit-field compare.
3954 There are two cases: First is a compare against a constant and the
3955 second is a comparison of two items where the fields are at the same
3956 bit position relative to the start of a chunk (byte, halfword, word)
3957 large enough to contain it. In these cases we can avoid the shift
3958 implicit in bitfield extractions.
3960 For constants, we emit a compare of the shifted constant with the
3961 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3962 compared. For two fields at the same position, we do the ANDs with the
3963 similar mask and compare the result of the ANDs.
3965 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3966 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3967 are the left and right operands of the comparison, respectively.
3969 If the optimization described above can be done, we return the resulting
3970 tree. Otherwise we return zero. */
3972 static tree
3973 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3974 tree lhs, tree rhs)
3976 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3977 tree type = TREE_TYPE (lhs);
3978 tree signed_type, unsigned_type;
3979 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3980 enum machine_mode lmode, rmode, nmode;
3981 int lunsignedp, runsignedp;
3982 int lvolatilep = 0, rvolatilep = 0;
3983 tree linner, rinner = NULL_TREE;
3984 tree mask;
3985 tree offset;
3987 /* Get all the information about the extractions being done. If the bit size
3988 if the same as the size of the underlying object, we aren't doing an
3989 extraction at all and so can do nothing. We also don't want to
3990 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3991 then will no longer be able to replace it. */
3992 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3993 &lunsignedp, &lvolatilep, false);
3994 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3995 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3996 return 0;
3998 if (!const_p)
4000 /* If this is not a constant, we can only do something if bit positions,
4001 sizes, and signedness are the same. */
4002 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4003 &runsignedp, &rvolatilep, false);
4005 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4006 || lunsignedp != runsignedp || offset != 0
4007 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4008 return 0;
4011 /* See if we can find a mode to refer to this field. We should be able to,
4012 but fail if we can't. */
4013 nmode = get_best_mode (lbitsize, lbitpos,
4014 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4015 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4016 TYPE_ALIGN (TREE_TYPE (rinner))),
4017 word_mode, lvolatilep || rvolatilep);
4018 if (nmode == VOIDmode)
4019 return 0;
4021 /* Set signed and unsigned types of the precision of this mode for the
4022 shifts below. */
4023 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4024 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4026 /* Compute the bit position and size for the new reference and our offset
4027 within it. If the new reference is the same size as the original, we
4028 won't optimize anything, so return zero. */
4029 nbitsize = GET_MODE_BITSIZE (nmode);
4030 nbitpos = lbitpos & ~ (nbitsize - 1);
4031 lbitpos -= nbitpos;
4032 if (nbitsize == lbitsize)
4033 return 0;
4035 if (BYTES_BIG_ENDIAN)
4036 lbitpos = nbitsize - lbitsize - lbitpos;
4038 /* Make the mask to be used against the extracted field. */
4039 mask = build_int_cst_type (unsigned_type, -1);
4040 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4041 mask = const_binop (RSHIFT_EXPR, mask,
4042 size_int (nbitsize - lbitsize - lbitpos), 0);
4044 if (! const_p)
4045 /* If not comparing with constant, just rework the comparison
4046 and return. */
4047 return fold_build2 (code, compare_type,
4048 fold_build2 (BIT_AND_EXPR, unsigned_type,
4049 make_bit_field_ref (linner,
4050 unsigned_type,
4051 nbitsize, nbitpos,
4053 mask),
4054 fold_build2 (BIT_AND_EXPR, unsigned_type,
4055 make_bit_field_ref (rinner,
4056 unsigned_type,
4057 nbitsize, nbitpos,
4059 mask));
4061 /* Otherwise, we are handling the constant case. See if the constant is too
4062 big for the field. Warn and return a tree of for 0 (false) if so. We do
4063 this not only for its own sake, but to avoid having to test for this
4064 error case below. If we didn't, we might generate wrong code.
4066 For unsigned fields, the constant shifted right by the field length should
4067 be all zero. For signed fields, the high-order bits should agree with
4068 the sign bit. */
4070 if (lunsignedp)
4072 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4073 fold_convert (unsigned_type, rhs),
4074 size_int (lbitsize), 0)))
4076 warning (0, "comparison is always %d due to width of bit-field",
4077 code == NE_EXPR);
4078 return constant_boolean_node (code == NE_EXPR, compare_type);
4081 else
4083 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4084 size_int (lbitsize - 1), 0);
4085 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4087 warning (0, "comparison is always %d due to width of bit-field",
4088 code == NE_EXPR);
4089 return constant_boolean_node (code == NE_EXPR, compare_type);
4093 /* Single-bit compares should always be against zero. */
4094 if (lbitsize == 1 && ! integer_zerop (rhs))
4096 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4097 rhs = build_int_cst (type, 0);
4100 /* Make a new bitfield reference, shift the constant over the
4101 appropriate number of bits and mask it with the computed mask
4102 (in case this was a signed field). If we changed it, make a new one. */
4103 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4104 if (lvolatilep)
4106 TREE_SIDE_EFFECTS (lhs) = 1;
4107 TREE_THIS_VOLATILE (lhs) = 1;
4110 rhs = const_binop (BIT_AND_EXPR,
4111 const_binop (LSHIFT_EXPR,
4112 fold_convert (unsigned_type, rhs),
4113 size_int (lbitpos), 0),
4114 mask, 0);
4116 return build2 (code, compare_type,
4117 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4118 rhs);
4121 /* Subroutine for fold_truthop: decode a field reference.
4123 If EXP is a comparison reference, we return the innermost reference.
4125 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4126 set to the starting bit number.
4128 If the innermost field can be completely contained in a mode-sized
4129 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4131 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4132 otherwise it is not changed.
4134 *PUNSIGNEDP is set to the signedness of the field.
4136 *PMASK is set to the mask used. This is either contained in a
4137 BIT_AND_EXPR or derived from the width of the field.
4139 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4141 Return 0 if this is not a component reference or is one that we can't
4142 do anything with. */
4144 static tree
4145 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4146 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4147 int *punsignedp, int *pvolatilep,
4148 tree *pmask, tree *pand_mask)
4150 tree outer_type = 0;
4151 tree and_mask = 0;
4152 tree mask, inner, offset;
4153 tree unsigned_type;
4154 unsigned int precision;
4156 /* All the optimizations using this function assume integer fields.
4157 There are problems with FP fields since the type_for_size call
4158 below can fail for, e.g., XFmode. */
4159 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4160 return 0;
4162 /* We are interested in the bare arrangement of bits, so strip everything
4163 that doesn't affect the machine mode. However, record the type of the
4164 outermost expression if it may matter below. */
4165 if (CONVERT_EXPR_P (exp)
4166 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4167 outer_type = TREE_TYPE (exp);
4168 STRIP_NOPS (exp);
4170 if (TREE_CODE (exp) == BIT_AND_EXPR)
4172 and_mask = TREE_OPERAND (exp, 1);
4173 exp = TREE_OPERAND (exp, 0);
4174 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4175 if (TREE_CODE (and_mask) != INTEGER_CST)
4176 return 0;
4179 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4180 punsignedp, pvolatilep, false);
4181 if ((inner == exp && and_mask == 0)
4182 || *pbitsize < 0 || offset != 0
4183 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4184 return 0;
4186 /* If the number of bits in the reference is the same as the bitsize of
4187 the outer type, then the outer type gives the signedness. Otherwise
4188 (in case of a small bitfield) the signedness is unchanged. */
4189 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4190 *punsignedp = TYPE_UNSIGNED (outer_type);
4192 /* Compute the mask to access the bitfield. */
4193 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4194 precision = TYPE_PRECISION (unsigned_type);
4196 mask = build_int_cst_type (unsigned_type, -1);
4198 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4199 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4201 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4202 if (and_mask != 0)
4203 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4204 fold_convert (unsigned_type, and_mask), mask);
4206 *pmask = mask;
4207 *pand_mask = and_mask;
4208 return inner;
4211 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4212 bit positions. */
4214 static int
4215 all_ones_mask_p (const_tree mask, int size)
4217 tree type = TREE_TYPE (mask);
4218 unsigned int precision = TYPE_PRECISION (type);
4219 tree tmask;
4221 tmask = build_int_cst_type (signed_type_for (type), -1);
4223 return
4224 tree_int_cst_equal (mask,
4225 const_binop (RSHIFT_EXPR,
4226 const_binop (LSHIFT_EXPR, tmask,
4227 size_int (precision - size),
4229 size_int (precision - size), 0));
4232 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4233 represents the sign bit of EXP's type. If EXP represents a sign
4234 or zero extension, also test VAL against the unextended type.
4235 The return value is the (sub)expression whose sign bit is VAL,
4236 or NULL_TREE otherwise. */
4238 static tree
4239 sign_bit_p (tree exp, const_tree val)
4241 unsigned HOST_WIDE_INT mask_lo, lo;
4242 HOST_WIDE_INT mask_hi, hi;
4243 int width;
4244 tree t;
4246 /* Tree EXP must have an integral type. */
4247 t = TREE_TYPE (exp);
4248 if (! INTEGRAL_TYPE_P (t))
4249 return NULL_TREE;
4251 /* Tree VAL must be an integer constant. */
4252 if (TREE_CODE (val) != INTEGER_CST
4253 || TREE_OVERFLOW (val))
4254 return NULL_TREE;
4256 width = TYPE_PRECISION (t);
4257 if (width > HOST_BITS_PER_WIDE_INT)
4259 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4260 lo = 0;
4262 mask_hi = ((unsigned HOST_WIDE_INT) -1
4263 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4264 mask_lo = -1;
4266 else
4268 hi = 0;
4269 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4271 mask_hi = 0;
4272 mask_lo = ((unsigned HOST_WIDE_INT) -1
4273 >> (HOST_BITS_PER_WIDE_INT - width));
4276 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4277 treat VAL as if it were unsigned. */
4278 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4279 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4280 return exp;
4282 /* Handle extension from a narrower type. */
4283 if (TREE_CODE (exp) == NOP_EXPR
4284 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4285 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4287 return NULL_TREE;
4290 /* Subroutine for fold_truthop: determine if an operand is simple enough
4291 to be evaluated unconditionally. */
4293 static int
4294 simple_operand_p (const_tree exp)
4296 /* Strip any conversions that don't change the machine mode. */
4297 STRIP_NOPS (exp);
4299 return (CONSTANT_CLASS_P (exp)
4300 || TREE_CODE (exp) == SSA_NAME
4301 || (DECL_P (exp)
4302 && ! TREE_ADDRESSABLE (exp)
4303 && ! TREE_THIS_VOLATILE (exp)
4304 && ! DECL_NONLOCAL (exp)
4305 /* Don't regard global variables as simple. They may be
4306 allocated in ways unknown to the compiler (shared memory,
4307 #pragma weak, etc). */
4308 && ! TREE_PUBLIC (exp)
4309 && ! DECL_EXTERNAL (exp)
4310 /* Loading a static variable is unduly expensive, but global
4311 registers aren't expensive. */
4312 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4315 /* The following functions are subroutines to fold_range_test and allow it to
4316 try to change a logical combination of comparisons into a range test.
4318 For example, both
4319 X == 2 || X == 3 || X == 4 || X == 5
4321 X >= 2 && X <= 5
4322 are converted to
4323 (unsigned) (X - 2) <= 3
4325 We describe each set of comparisons as being either inside or outside
4326 a range, using a variable named like IN_P, and then describe the
4327 range with a lower and upper bound. If one of the bounds is omitted,
4328 it represents either the highest or lowest value of the type.
4330 In the comments below, we represent a range by two numbers in brackets
4331 preceded by a "+" to designate being inside that range, or a "-" to
4332 designate being outside that range, so the condition can be inverted by
4333 flipping the prefix. An omitted bound is represented by a "-". For
4334 example, "- [-, 10]" means being outside the range starting at the lowest
4335 possible value and ending at 10, in other words, being greater than 10.
4336 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4337 always false.
4339 We set up things so that the missing bounds are handled in a consistent
4340 manner so neither a missing bound nor "true" and "false" need to be
4341 handled using a special case. */
4343 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4344 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4345 and UPPER1_P are nonzero if the respective argument is an upper bound
4346 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4347 must be specified for a comparison. ARG1 will be converted to ARG0's
4348 type if both are specified. */
4350 static tree
4351 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4352 tree arg1, int upper1_p)
4354 tree tem;
4355 int result;
4356 int sgn0, sgn1;
4358 /* If neither arg represents infinity, do the normal operation.
4359 Else, if not a comparison, return infinity. Else handle the special
4360 comparison rules. Note that most of the cases below won't occur, but
4361 are handled for consistency. */
4363 if (arg0 != 0 && arg1 != 0)
4365 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4366 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4367 STRIP_NOPS (tem);
4368 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4371 if (TREE_CODE_CLASS (code) != tcc_comparison)
4372 return 0;
4374 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4375 for neither. In real maths, we cannot assume open ended ranges are
4376 the same. But, this is computer arithmetic, where numbers are finite.
4377 We can therefore make the transformation of any unbounded range with
4378 the value Z, Z being greater than any representable number. This permits
4379 us to treat unbounded ranges as equal. */
4380 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4381 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4382 switch (code)
4384 case EQ_EXPR:
4385 result = sgn0 == sgn1;
4386 break;
4387 case NE_EXPR:
4388 result = sgn0 != sgn1;
4389 break;
4390 case LT_EXPR:
4391 result = sgn0 < sgn1;
4392 break;
4393 case LE_EXPR:
4394 result = sgn0 <= sgn1;
4395 break;
4396 case GT_EXPR:
4397 result = sgn0 > sgn1;
4398 break;
4399 case GE_EXPR:
4400 result = sgn0 >= sgn1;
4401 break;
4402 default:
4403 gcc_unreachable ();
4406 return constant_boolean_node (result, type);
4409 /* Given EXP, a logical expression, set the range it is testing into
4410 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4411 actually being tested. *PLOW and *PHIGH will be made of the same
4412 type as the returned expression. If EXP is not a comparison, we
4413 will most likely not be returning a useful value and range. Set
4414 *STRICT_OVERFLOW_P to true if the return value is only valid
4415 because signed overflow is undefined; otherwise, do not change
4416 *STRICT_OVERFLOW_P. */
4418 tree
4419 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4420 bool *strict_overflow_p)
4422 enum tree_code code;
4423 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4424 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4425 int in_p, n_in_p;
4426 tree low, high, n_low, n_high;
4428 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4429 and see if we can refine the range. Some of the cases below may not
4430 happen, but it doesn't seem worth worrying about this. We "continue"
4431 the outer loop when we've changed something; otherwise we "break"
4432 the switch, which will "break" the while. */
4434 in_p = 0;
4435 low = high = build_int_cst (TREE_TYPE (exp), 0);
4437 while (1)
4439 code = TREE_CODE (exp);
4440 exp_type = TREE_TYPE (exp);
4442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4444 if (TREE_OPERAND_LENGTH (exp) > 0)
4445 arg0 = TREE_OPERAND (exp, 0);
4446 if (TREE_CODE_CLASS (code) == tcc_comparison
4447 || TREE_CODE_CLASS (code) == tcc_unary
4448 || TREE_CODE_CLASS (code) == tcc_binary)
4449 arg0_type = TREE_TYPE (arg0);
4450 if (TREE_CODE_CLASS (code) == tcc_binary
4451 || TREE_CODE_CLASS (code) == tcc_comparison
4452 || (TREE_CODE_CLASS (code) == tcc_expression
4453 && TREE_OPERAND_LENGTH (exp) > 1))
4454 arg1 = TREE_OPERAND (exp, 1);
4457 switch (code)
4459 case TRUTH_NOT_EXPR:
4460 in_p = ! in_p, exp = arg0;
4461 continue;
4463 case EQ_EXPR: case NE_EXPR:
4464 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4465 /* We can only do something if the range is testing for zero
4466 and if the second operand is an integer constant. Note that
4467 saying something is "in" the range we make is done by
4468 complementing IN_P since it will set in the initial case of
4469 being not equal to zero; "out" is leaving it alone. */
4470 if (low == 0 || high == 0
4471 || ! integer_zerop (low) || ! integer_zerop (high)
4472 || TREE_CODE (arg1) != INTEGER_CST)
4473 break;
4475 switch (code)
4477 case NE_EXPR: /* - [c, c] */
4478 low = high = arg1;
4479 break;
4480 case EQ_EXPR: /* + [c, c] */
4481 in_p = ! in_p, low = high = arg1;
4482 break;
4483 case GT_EXPR: /* - [-, c] */
4484 low = 0, high = arg1;
4485 break;
4486 case GE_EXPR: /* + [c, -] */
4487 in_p = ! in_p, low = arg1, high = 0;
4488 break;
4489 case LT_EXPR: /* - [c, -] */
4490 low = arg1, high = 0;
4491 break;
4492 case LE_EXPR: /* + [-, c] */
4493 in_p = ! in_p, low = 0, high = arg1;
4494 break;
4495 default:
4496 gcc_unreachable ();
4499 /* If this is an unsigned comparison, we also know that EXP is
4500 greater than or equal to zero. We base the range tests we make
4501 on that fact, so we record it here so we can parse existing
4502 range tests. We test arg0_type since often the return type
4503 of, e.g. EQ_EXPR, is boolean. */
4504 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4506 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4507 in_p, low, high, 1,
4508 build_int_cst (arg0_type, 0),
4509 NULL_TREE))
4510 break;
4512 in_p = n_in_p, low = n_low, high = n_high;
4514 /* If the high bound is missing, but we have a nonzero low
4515 bound, reverse the range so it goes from zero to the low bound
4516 minus 1. */
4517 if (high == 0 && low && ! integer_zerop (low))
4519 in_p = ! in_p;
4520 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4521 integer_one_node, 0);
4522 low = build_int_cst (arg0_type, 0);
4526 exp = arg0;
4527 continue;
4529 case NEGATE_EXPR:
4530 /* (-x) IN [a,b] -> x in [-b, -a] */
4531 n_low = range_binop (MINUS_EXPR, exp_type,
4532 build_int_cst (exp_type, 0),
4533 0, high, 1);
4534 n_high = range_binop (MINUS_EXPR, exp_type,
4535 build_int_cst (exp_type, 0),
4536 0, low, 0);
4537 low = n_low, high = n_high;
4538 exp = arg0;
4539 continue;
4541 case BIT_NOT_EXPR:
4542 /* ~ X -> -X - 1 */
4543 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4544 build_int_cst (exp_type, 1));
4545 continue;
4547 case PLUS_EXPR: case MINUS_EXPR:
4548 if (TREE_CODE (arg1) != INTEGER_CST)
4549 break;
4551 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4552 move a constant to the other side. */
4553 if (!TYPE_UNSIGNED (arg0_type)
4554 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4555 break;
4557 /* If EXP is signed, any overflow in the computation is undefined,
4558 so we don't worry about it so long as our computations on
4559 the bounds don't overflow. For unsigned, overflow is defined
4560 and this is exactly the right thing. */
4561 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4562 arg0_type, low, 0, arg1, 0);
4563 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4564 arg0_type, high, 1, arg1, 0);
4565 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4566 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4567 break;
4569 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4570 *strict_overflow_p = true;
4572 /* Check for an unsigned range which has wrapped around the maximum
4573 value thus making n_high < n_low, and normalize it. */
4574 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4576 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4577 integer_one_node, 0);
4578 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4579 integer_one_node, 0);
4581 /* If the range is of the form +/- [ x+1, x ], we won't
4582 be able to normalize it. But then, it represents the
4583 whole range or the empty set, so make it
4584 +/- [ -, - ]. */
4585 if (tree_int_cst_equal (n_low, low)
4586 && tree_int_cst_equal (n_high, high))
4587 low = high = 0;
4588 else
4589 in_p = ! in_p;
4591 else
4592 low = n_low, high = n_high;
4594 exp = arg0;
4595 continue;
4597 CASE_CONVERT: case NON_LVALUE_EXPR:
4598 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4599 break;
4601 if (! INTEGRAL_TYPE_P (arg0_type)
4602 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4603 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4604 break;
4606 n_low = low, n_high = high;
4608 if (n_low != 0)
4609 n_low = fold_convert (arg0_type, n_low);
4611 if (n_high != 0)
4612 n_high = fold_convert (arg0_type, n_high);
4615 /* If we're converting arg0 from an unsigned type, to exp,
4616 a signed type, we will be doing the comparison as unsigned.
4617 The tests above have already verified that LOW and HIGH
4618 are both positive.
4620 So we have to ensure that we will handle large unsigned
4621 values the same way that the current signed bounds treat
4622 negative values. */
4624 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4626 tree high_positive;
4627 tree equiv_type;
4628 /* For fixed-point modes, we need to pass the saturating flag
4629 as the 2nd parameter. */
4630 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4631 equiv_type = lang_hooks.types.type_for_mode
4632 (TYPE_MODE (arg0_type),
4633 TYPE_SATURATING (arg0_type));
4634 else
4635 equiv_type = lang_hooks.types.type_for_mode
4636 (TYPE_MODE (arg0_type), 1);
4638 /* A range without an upper bound is, naturally, unbounded.
4639 Since convert would have cropped a very large value, use
4640 the max value for the destination type. */
4641 high_positive
4642 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4643 : TYPE_MAX_VALUE (arg0_type);
4645 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4646 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4647 fold_convert (arg0_type,
4648 high_positive),
4649 build_int_cst (arg0_type, 1));
4651 /* If the low bound is specified, "and" the range with the
4652 range for which the original unsigned value will be
4653 positive. */
4654 if (low != 0)
4656 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4657 1, n_low, n_high, 1,
4658 fold_convert (arg0_type,
4659 integer_zero_node),
4660 high_positive))
4661 break;
4663 in_p = (n_in_p == in_p);
4665 else
4667 /* Otherwise, "or" the range with the range of the input
4668 that will be interpreted as negative. */
4669 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4670 0, n_low, n_high, 1,
4671 fold_convert (arg0_type,
4672 integer_zero_node),
4673 high_positive))
4674 break;
4676 in_p = (in_p != n_in_p);
4680 exp = arg0;
4681 low = n_low, high = n_high;
4682 continue;
4684 default:
4685 break;
4688 break;
4691 /* If EXP is a constant, we can evaluate whether this is true or false. */
4692 if (TREE_CODE (exp) == INTEGER_CST)
4694 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4695 exp, 0, low, 0))
4696 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4697 exp, 1, high, 1)));
4698 low = high = 0;
4699 exp = 0;
4702 *pin_p = in_p, *plow = low, *phigh = high;
4703 return exp;
4706 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4707 type, TYPE, return an expression to test if EXP is in (or out of, depending
4708 on IN_P) the range. Return 0 if the test couldn't be created. */
4710 tree
4711 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4713 tree etype = TREE_TYPE (exp), value;
4715 #ifdef HAVE_canonicalize_funcptr_for_compare
4716 /* Disable this optimization for function pointer expressions
4717 on targets that require function pointer canonicalization. */
4718 if (HAVE_canonicalize_funcptr_for_compare
4719 && TREE_CODE (etype) == POINTER_TYPE
4720 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4721 return NULL_TREE;
4722 #endif
4724 if (! in_p)
4726 value = build_range_check (type, exp, 1, low, high);
4727 if (value != 0)
4728 return invert_truthvalue (value);
4730 return 0;
4733 if (low == 0 && high == 0)
4734 return build_int_cst (type, 1);
4736 if (low == 0)
4737 return fold_build2 (LE_EXPR, type, exp,
4738 fold_convert (etype, high));
4740 if (high == 0)
4741 return fold_build2 (GE_EXPR, type, exp,
4742 fold_convert (etype, low));
4744 if (operand_equal_p (low, high, 0))
4745 return fold_build2 (EQ_EXPR, type, exp,
4746 fold_convert (etype, low));
4748 if (integer_zerop (low))
4750 if (! TYPE_UNSIGNED (etype))
4752 etype = unsigned_type_for (etype);
4753 high = fold_convert (etype, high);
4754 exp = fold_convert (etype, exp);
4756 return build_range_check (type, exp, 1, 0, high);
4759 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4760 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4762 unsigned HOST_WIDE_INT lo;
4763 HOST_WIDE_INT hi;
4764 int prec;
4766 prec = TYPE_PRECISION (etype);
4767 if (prec <= HOST_BITS_PER_WIDE_INT)
4769 hi = 0;
4770 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4772 else
4774 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4775 lo = (unsigned HOST_WIDE_INT) -1;
4778 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4780 if (TYPE_UNSIGNED (etype))
4782 tree signed_etype = signed_type_for (etype);
4783 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4784 etype
4785 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4786 else
4787 etype = signed_etype;
4788 exp = fold_convert (etype, exp);
4790 return fold_build2 (GT_EXPR, type, exp,
4791 build_int_cst (etype, 0));
4795 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4796 This requires wrap-around arithmetics for the type of the expression.
4797 First make sure that arithmetics in this type is valid, then make sure
4798 that it wraps around. */
4799 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4800 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4801 TYPE_UNSIGNED (etype));
4803 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4805 tree utype, minv, maxv;
4807 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4808 for the type in question, as we rely on this here. */
4809 utype = unsigned_type_for (etype);
4810 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4811 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4812 integer_one_node, 1);
4813 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4815 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4816 minv, 1, maxv, 1)))
4817 etype = utype;
4818 else
4819 return 0;
4822 high = fold_convert (etype, high);
4823 low = fold_convert (etype, low);
4824 exp = fold_convert (etype, exp);
4826 value = const_binop (MINUS_EXPR, high, low, 0);
4829 if (POINTER_TYPE_P (etype))
4831 if (value != 0 && !TREE_OVERFLOW (value))
4833 low = fold_convert (sizetype, low);
4834 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4835 return build_range_check (type,
4836 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4837 1, build_int_cst (etype, 0), value);
4839 return 0;
4842 if (value != 0 && !TREE_OVERFLOW (value))
4843 return build_range_check (type,
4844 fold_build2 (MINUS_EXPR, etype, exp, low),
4845 1, build_int_cst (etype, 0), value);
4847 return 0;
4850 /* Return the predecessor of VAL in its type, handling the infinite case. */
4852 static tree
4853 range_predecessor (tree val)
4855 tree type = TREE_TYPE (val);
4857 if (INTEGRAL_TYPE_P (type)
4858 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4859 return 0;
4860 else
4861 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4864 /* Return the successor of VAL in its type, handling the infinite case. */
4866 static tree
4867 range_successor (tree val)
4869 tree type = TREE_TYPE (val);
4871 if (INTEGRAL_TYPE_P (type)
4872 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4873 return 0;
4874 else
4875 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4878 /* Given two ranges, see if we can merge them into one. Return 1 if we
4879 can, 0 if we can't. Set the output range into the specified parameters. */
4881 bool
4882 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4883 tree high0, int in1_p, tree low1, tree high1)
4885 int no_overlap;
4886 int subset;
4887 int temp;
4888 tree tem;
4889 int in_p;
4890 tree low, high;
4891 int lowequal = ((low0 == 0 && low1 == 0)
4892 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4893 low0, 0, low1, 0)));
4894 int highequal = ((high0 == 0 && high1 == 0)
4895 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4896 high0, 1, high1, 1)));
4898 /* Make range 0 be the range that starts first, or ends last if they
4899 start at the same value. Swap them if it isn't. */
4900 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4901 low0, 0, low1, 0))
4902 || (lowequal
4903 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4904 high1, 1, high0, 1))))
4906 temp = in0_p, in0_p = in1_p, in1_p = temp;
4907 tem = low0, low0 = low1, low1 = tem;
4908 tem = high0, high0 = high1, high1 = tem;
4911 /* Now flag two cases, whether the ranges are disjoint or whether the
4912 second range is totally subsumed in the first. Note that the tests
4913 below are simplified by the ones above. */
4914 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4915 high0, 1, low1, 0));
4916 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4917 high1, 1, high0, 1));
4919 /* We now have four cases, depending on whether we are including or
4920 excluding the two ranges. */
4921 if (in0_p && in1_p)
4923 /* If they don't overlap, the result is false. If the second range
4924 is a subset it is the result. Otherwise, the range is from the start
4925 of the second to the end of the first. */
4926 if (no_overlap)
4927 in_p = 0, low = high = 0;
4928 else if (subset)
4929 in_p = 1, low = low1, high = high1;
4930 else
4931 in_p = 1, low = low1, high = high0;
4934 else if (in0_p && ! in1_p)
4936 /* If they don't overlap, the result is the first range. If they are
4937 equal, the result is false. If the second range is a subset of the
4938 first, and the ranges begin at the same place, we go from just after
4939 the end of the second range to the end of the first. If the second
4940 range is not a subset of the first, or if it is a subset and both
4941 ranges end at the same place, the range starts at the start of the
4942 first range and ends just before the second range.
4943 Otherwise, we can't describe this as a single range. */
4944 if (no_overlap)
4945 in_p = 1, low = low0, high = high0;
4946 else if (lowequal && highequal)
4947 in_p = 0, low = high = 0;
4948 else if (subset && lowequal)
4950 low = range_successor (high1);
4951 high = high0;
4952 in_p = 1;
4953 if (low == 0)
4955 /* We are in the weird situation where high0 > high1 but
4956 high1 has no successor. Punt. */
4957 return 0;
4960 else if (! subset || highequal)
4962 low = low0;
4963 high = range_predecessor (low1);
4964 in_p = 1;
4965 if (high == 0)
4967 /* low0 < low1 but low1 has no predecessor. Punt. */
4968 return 0;
4971 else
4972 return 0;
4975 else if (! in0_p && in1_p)
4977 /* If they don't overlap, the result is the second range. If the second
4978 is a subset of the first, the result is false. Otherwise,
4979 the range starts just after the first range and ends at the
4980 end of the second. */
4981 if (no_overlap)
4982 in_p = 1, low = low1, high = high1;
4983 else if (subset || highequal)
4984 in_p = 0, low = high = 0;
4985 else
4987 low = range_successor (high0);
4988 high = high1;
4989 in_p = 1;
4990 if (low == 0)
4992 /* high1 > high0 but high0 has no successor. Punt. */
4993 return 0;
4998 else
5000 /* The case where we are excluding both ranges. Here the complex case
5001 is if they don't overlap. In that case, the only time we have a
5002 range is if they are adjacent. If the second is a subset of the
5003 first, the result is the first. Otherwise, the range to exclude
5004 starts at the beginning of the first range and ends at the end of the
5005 second. */
5006 if (no_overlap)
5008 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5009 range_successor (high0),
5010 1, low1, 0)))
5011 in_p = 0, low = low0, high = high1;
5012 else
5014 /* Canonicalize - [min, x] into - [-, x]. */
5015 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5016 switch (TREE_CODE (TREE_TYPE (low0)))
5018 case ENUMERAL_TYPE:
5019 if (TYPE_PRECISION (TREE_TYPE (low0))
5020 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5021 break;
5022 /* FALLTHROUGH */
5023 case INTEGER_TYPE:
5024 if (tree_int_cst_equal (low0,
5025 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5026 low0 = 0;
5027 break;
5028 case POINTER_TYPE:
5029 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5030 && integer_zerop (low0))
5031 low0 = 0;
5032 break;
5033 default:
5034 break;
5037 /* Canonicalize - [x, max] into - [x, -]. */
5038 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5039 switch (TREE_CODE (TREE_TYPE (high1)))
5041 case ENUMERAL_TYPE:
5042 if (TYPE_PRECISION (TREE_TYPE (high1))
5043 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5044 break;
5045 /* FALLTHROUGH */
5046 case INTEGER_TYPE:
5047 if (tree_int_cst_equal (high1,
5048 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5049 high1 = 0;
5050 break;
5051 case POINTER_TYPE:
5052 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5053 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5054 high1, 1,
5055 integer_one_node, 1)))
5056 high1 = 0;
5057 break;
5058 default:
5059 break;
5062 /* The ranges might be also adjacent between the maximum and
5063 minimum values of the given type. For
5064 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5065 return + [x + 1, y - 1]. */
5066 if (low0 == 0 && high1 == 0)
5068 low = range_successor (high0);
5069 high = range_predecessor (low1);
5070 if (low == 0 || high == 0)
5071 return 0;
5073 in_p = 1;
5075 else
5076 return 0;
5079 else if (subset)
5080 in_p = 0, low = low0, high = high0;
5081 else
5082 in_p = 0, low = low0, high = high1;
5085 *pin_p = in_p, *plow = low, *phigh = high;
5086 return 1;
5090 /* Subroutine of fold, looking inside expressions of the form
5091 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5092 of the COND_EXPR. This function is being used also to optimize
5093 A op B ? C : A, by reversing the comparison first.
5095 Return a folded expression whose code is not a COND_EXPR
5096 anymore, or NULL_TREE if no folding opportunity is found. */
5098 static tree
5099 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5101 enum tree_code comp_code = TREE_CODE (arg0);
5102 tree arg00 = TREE_OPERAND (arg0, 0);
5103 tree arg01 = TREE_OPERAND (arg0, 1);
5104 tree arg1_type = TREE_TYPE (arg1);
5105 tree tem;
5107 STRIP_NOPS (arg1);
5108 STRIP_NOPS (arg2);
5110 /* If we have A op 0 ? A : -A, consider applying the following
5111 transformations:
5113 A == 0? A : -A same as -A
5114 A != 0? A : -A same as A
5115 A >= 0? A : -A same as abs (A)
5116 A > 0? A : -A same as abs (A)
5117 A <= 0? A : -A same as -abs (A)
5118 A < 0? A : -A same as -abs (A)
5120 None of these transformations work for modes with signed
5121 zeros. If A is +/-0, the first two transformations will
5122 change the sign of the result (from +0 to -0, or vice
5123 versa). The last four will fix the sign of the result,
5124 even though the original expressions could be positive or
5125 negative, depending on the sign of A.
5127 Note that all these transformations are correct if A is
5128 NaN, since the two alternatives (A and -A) are also NaNs. */
5129 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5130 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5131 ? real_zerop (arg01)
5132 : integer_zerop (arg01))
5133 && ((TREE_CODE (arg2) == NEGATE_EXPR
5134 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5135 /* In the case that A is of the form X-Y, '-A' (arg2) may
5136 have already been folded to Y-X, check for that. */
5137 || (TREE_CODE (arg1) == MINUS_EXPR
5138 && TREE_CODE (arg2) == MINUS_EXPR
5139 && operand_equal_p (TREE_OPERAND (arg1, 0),
5140 TREE_OPERAND (arg2, 1), 0)
5141 && operand_equal_p (TREE_OPERAND (arg1, 1),
5142 TREE_OPERAND (arg2, 0), 0))))
5143 switch (comp_code)
5145 case EQ_EXPR:
5146 case UNEQ_EXPR:
5147 tem = fold_convert (arg1_type, arg1);
5148 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5149 case NE_EXPR:
5150 case LTGT_EXPR:
5151 return pedantic_non_lvalue (fold_convert (type, arg1));
5152 case UNGE_EXPR:
5153 case UNGT_EXPR:
5154 if (flag_trapping_math)
5155 break;
5156 /* Fall through. */
5157 case GE_EXPR:
5158 case GT_EXPR:
5159 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5160 arg1 = fold_convert (signed_type_for
5161 (TREE_TYPE (arg1)), arg1);
5162 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5163 return pedantic_non_lvalue (fold_convert (type, tem));
5164 case UNLE_EXPR:
5165 case UNLT_EXPR:
5166 if (flag_trapping_math)
5167 break;
5168 case LE_EXPR:
5169 case LT_EXPR:
5170 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5171 arg1 = fold_convert (signed_type_for
5172 (TREE_TYPE (arg1)), arg1);
5173 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5174 return negate_expr (fold_convert (type, tem));
5175 default:
5176 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5177 break;
5180 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5181 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5182 both transformations are correct when A is NaN: A != 0
5183 is then true, and A == 0 is false. */
5185 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5186 && integer_zerop (arg01) && integer_zerop (arg2))
5188 if (comp_code == NE_EXPR)
5189 return pedantic_non_lvalue (fold_convert (type, arg1));
5190 else if (comp_code == EQ_EXPR)
5191 return build_int_cst (type, 0);
5194 /* Try some transformations of A op B ? A : B.
5196 A == B? A : B same as B
5197 A != B? A : B same as A
5198 A >= B? A : B same as max (A, B)
5199 A > B? A : B same as max (B, A)
5200 A <= B? A : B same as min (A, B)
5201 A < B? A : B same as min (B, A)
5203 As above, these transformations don't work in the presence
5204 of signed zeros. For example, if A and B are zeros of
5205 opposite sign, the first two transformations will change
5206 the sign of the result. In the last four, the original
5207 expressions give different results for (A=+0, B=-0) and
5208 (A=-0, B=+0), but the transformed expressions do not.
5210 The first two transformations are correct if either A or B
5211 is a NaN. In the first transformation, the condition will
5212 be false, and B will indeed be chosen. In the case of the
5213 second transformation, the condition A != B will be true,
5214 and A will be chosen.
5216 The conversions to max() and min() are not correct if B is
5217 a number and A is not. The conditions in the original
5218 expressions will be false, so all four give B. The min()
5219 and max() versions would give a NaN instead. */
5220 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5221 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5222 /* Avoid these transformations if the COND_EXPR may be used
5223 as an lvalue in the C++ front-end. PR c++/19199. */
5224 && (in_gimple_form
5225 || (strcmp (lang_hooks.name, "GNU C++") != 0
5226 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5227 || ! maybe_lvalue_p (arg1)
5228 || ! maybe_lvalue_p (arg2)))
5230 tree comp_op0 = arg00;
5231 tree comp_op1 = arg01;
5232 tree comp_type = TREE_TYPE (comp_op0);
5234 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5235 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5237 comp_type = type;
5238 comp_op0 = arg1;
5239 comp_op1 = arg2;
5242 switch (comp_code)
5244 case EQ_EXPR:
5245 return pedantic_non_lvalue (fold_convert (type, arg2));
5246 case NE_EXPR:
5247 return pedantic_non_lvalue (fold_convert (type, arg1));
5248 case LE_EXPR:
5249 case LT_EXPR:
5250 case UNLE_EXPR:
5251 case UNLT_EXPR:
5252 /* In C++ a ?: expression can be an lvalue, so put the
5253 operand which will be used if they are equal first
5254 so that we can convert this back to the
5255 corresponding COND_EXPR. */
5256 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5258 comp_op0 = fold_convert (comp_type, comp_op0);
5259 comp_op1 = fold_convert (comp_type, comp_op1);
5260 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5261 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5262 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5263 return pedantic_non_lvalue (fold_convert (type, tem));
5265 break;
5266 case GE_EXPR:
5267 case GT_EXPR:
5268 case UNGE_EXPR:
5269 case UNGT_EXPR:
5270 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5272 comp_op0 = fold_convert (comp_type, comp_op0);
5273 comp_op1 = fold_convert (comp_type, comp_op1);
5274 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5275 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5276 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5277 return pedantic_non_lvalue (fold_convert (type, tem));
5279 break;
5280 case UNEQ_EXPR:
5281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5282 return pedantic_non_lvalue (fold_convert (type, arg2));
5283 break;
5284 case LTGT_EXPR:
5285 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5286 return pedantic_non_lvalue (fold_convert (type, arg1));
5287 break;
5288 default:
5289 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5290 break;
5294 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5295 we might still be able to simplify this. For example,
5296 if C1 is one less or one more than C2, this might have started
5297 out as a MIN or MAX and been transformed by this function.
5298 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5300 if (INTEGRAL_TYPE_P (type)
5301 && TREE_CODE (arg01) == INTEGER_CST
5302 && TREE_CODE (arg2) == INTEGER_CST)
5303 switch (comp_code)
5305 case EQ_EXPR:
5306 /* We can replace A with C1 in this case. */
5307 arg1 = fold_convert (type, arg01);
5308 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5310 case LT_EXPR:
5311 /* If C1 is C2 + 1, this is min(A, C2). */
5312 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5313 OEP_ONLY_CONST)
5314 && operand_equal_p (arg01,
5315 const_binop (PLUS_EXPR, arg2,
5316 build_int_cst (type, 1), 0),
5317 OEP_ONLY_CONST))
5318 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5319 type,
5320 fold_convert (type, arg1),
5321 arg2));
5322 break;
5324 case LE_EXPR:
5325 /* If C1 is C2 - 1, this is min(A, C2). */
5326 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5327 OEP_ONLY_CONST)
5328 && operand_equal_p (arg01,
5329 const_binop (MINUS_EXPR, arg2,
5330 build_int_cst (type, 1), 0),
5331 OEP_ONLY_CONST))
5332 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5333 type,
5334 fold_convert (type, arg1),
5335 arg2));
5336 break;
5338 case GT_EXPR:
5339 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5340 MAX_EXPR, to preserve the signedness of the comparison. */
5341 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5342 OEP_ONLY_CONST)
5343 && operand_equal_p (arg01,
5344 const_binop (MINUS_EXPR, arg2,
5345 build_int_cst (type, 1), 0),
5346 OEP_ONLY_CONST))
5347 return pedantic_non_lvalue (fold_convert (type,
5348 fold_build2 (MAX_EXPR, TREE_TYPE (arg00),
5349 arg00,
5350 fold_convert (TREE_TYPE (arg00),
5351 arg2))));
5352 break;
5354 case GE_EXPR:
5355 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5356 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5357 OEP_ONLY_CONST)
5358 && operand_equal_p (arg01,
5359 const_binop (PLUS_EXPR, arg2,
5360 build_int_cst (type, 1), 0),
5361 OEP_ONLY_CONST))
5362 return pedantic_non_lvalue (fold_convert (type,
5363 fold_build2 (MAX_EXPR, TREE_TYPE (arg00),
5364 arg00,
5365 fold_convert (TREE_TYPE (arg00),
5366 arg2))));
5367 break;
5368 case NE_EXPR:
5369 break;
5370 default:
5371 gcc_unreachable ();
5374 return NULL_TREE;
5379 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5380 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5381 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5382 false) >= 2)
5383 #endif
5385 /* EXP is some logical combination of boolean tests. See if we can
5386 merge it into some range test. Return the new tree if so. */
5388 static tree
5389 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5391 int or_op = (code == TRUTH_ORIF_EXPR
5392 || code == TRUTH_OR_EXPR);
5393 int in0_p, in1_p, in_p;
5394 tree low0, low1, low, high0, high1, high;
5395 bool strict_overflow_p = false;
5396 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5397 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5398 tree tem;
5399 const char * const warnmsg = G_("assuming signed overflow does not occur "
5400 "when simplifying range test");
5402 /* If this is an OR operation, invert both sides; we will invert
5403 again at the end. */
5404 if (or_op)
5405 in0_p = ! in0_p, in1_p = ! in1_p;
5407 /* If both expressions are the same, if we can merge the ranges, and we
5408 can build the range test, return it or it inverted. If one of the
5409 ranges is always true or always false, consider it to be the same
5410 expression as the other. */
5411 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5412 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5413 in1_p, low1, high1)
5414 && 0 != (tem = (build_range_check (type,
5415 lhs != 0 ? lhs
5416 : rhs != 0 ? rhs : integer_zero_node,
5417 in_p, low, high))))
5419 if (strict_overflow_p)
5420 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5421 return or_op ? invert_truthvalue (tem) : tem;
5424 /* On machines where the branch cost is expensive, if this is a
5425 short-circuited branch and the underlying object on both sides
5426 is the same, make a non-short-circuit operation. */
5427 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5428 && lhs != 0 && rhs != 0
5429 && (code == TRUTH_ANDIF_EXPR
5430 || code == TRUTH_ORIF_EXPR)
5431 && operand_equal_p (lhs, rhs, 0))
5433 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5434 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5435 which cases we can't do this. */
5436 if (simple_operand_p (lhs))
5437 return build2 (code == TRUTH_ANDIF_EXPR
5438 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5439 type, op0, op1);
5441 else if (lang_hooks.decls.global_bindings_p () == 0
5442 && ! CONTAINS_PLACEHOLDER_P (lhs))
5444 tree common = save_expr (lhs);
5446 if (0 != (lhs = build_range_check (type, common,
5447 or_op ? ! in0_p : in0_p,
5448 low0, high0))
5449 && (0 != (rhs = build_range_check (type, common,
5450 or_op ? ! in1_p : in1_p,
5451 low1, high1))))
5453 if (strict_overflow_p)
5454 fold_overflow_warning (warnmsg,
5455 WARN_STRICT_OVERFLOW_COMPARISON);
5456 return build2 (code == TRUTH_ANDIF_EXPR
5457 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5458 type, lhs, rhs);
5463 return 0;
5466 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5467 bit value. Arrange things so the extra bits will be set to zero if and
5468 only if C is signed-extended to its full width. If MASK is nonzero,
5469 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5471 static tree
5472 unextend (tree c, int p, int unsignedp, tree mask)
5474 tree type = TREE_TYPE (c);
5475 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5476 tree temp;
5478 if (p == modesize || unsignedp)
5479 return c;
5481 /* We work by getting just the sign bit into the low-order bit, then
5482 into the high-order bit, then sign-extend. We then XOR that value
5483 with C. */
5484 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5485 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5487 /* We must use a signed type in order to get an arithmetic right shift.
5488 However, we must also avoid introducing accidental overflows, so that
5489 a subsequent call to integer_zerop will work. Hence we must
5490 do the type conversion here. At this point, the constant is either
5491 zero or one, and the conversion to a signed type can never overflow.
5492 We could get an overflow if this conversion is done anywhere else. */
5493 if (TYPE_UNSIGNED (type))
5494 temp = fold_convert (signed_type_for (type), temp);
5496 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5497 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5498 if (mask != 0)
5499 temp = const_binop (BIT_AND_EXPR, temp,
5500 fold_convert (TREE_TYPE (c), mask), 0);
5501 /* If necessary, convert the type back to match the type of C. */
5502 if (TYPE_UNSIGNED (type))
5503 temp = fold_convert (type, temp);
5505 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5508 /* Find ways of folding logical expressions of LHS and RHS:
5509 Try to merge two comparisons to the same innermost item.
5510 Look for range tests like "ch >= '0' && ch <= '9'".
5511 Look for combinations of simple terms on machines with expensive branches
5512 and evaluate the RHS unconditionally.
5514 For example, if we have p->a == 2 && p->b == 4 and we can make an
5515 object large enough to span both A and B, we can do this with a comparison
5516 against the object ANDed with the a mask.
5518 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5519 operations to do this with one comparison.
5521 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5522 function and the one above.
5524 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5525 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5527 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5528 two operands.
5530 We return the simplified tree or 0 if no optimization is possible. */
5532 static tree
5533 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5535 /* If this is the "or" of two comparisons, we can do something if
5536 the comparisons are NE_EXPR. If this is the "and", we can do something
5537 if the comparisons are EQ_EXPR. I.e.,
5538 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5540 WANTED_CODE is this operation code. For single bit fields, we can
5541 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5542 comparison for one-bit fields. */
5544 enum tree_code wanted_code;
5545 enum tree_code lcode, rcode;
5546 tree ll_arg, lr_arg, rl_arg, rr_arg;
5547 tree ll_inner, lr_inner, rl_inner, rr_inner;
5548 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5549 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5550 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5551 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5552 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5553 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5554 enum machine_mode lnmode, rnmode;
5555 tree ll_mask, lr_mask, rl_mask, rr_mask;
5556 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5557 tree l_const, r_const;
5558 tree lntype, rntype, result;
5559 HOST_WIDE_INT first_bit, end_bit;
5560 int volatilep;
5561 tree orig_lhs = lhs, orig_rhs = rhs;
5562 enum tree_code orig_code = code;
5564 /* Start by getting the comparison codes. Fail if anything is volatile.
5565 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5566 it were surrounded with a NE_EXPR. */
5568 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5569 return 0;
5571 lcode = TREE_CODE (lhs);
5572 rcode = TREE_CODE (rhs);
5574 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5576 lhs = build2 (NE_EXPR, truth_type, lhs,
5577 build_int_cst (TREE_TYPE (lhs), 0));
5578 lcode = NE_EXPR;
5581 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5583 rhs = build2 (NE_EXPR, truth_type, rhs,
5584 build_int_cst (TREE_TYPE (rhs), 0));
5585 rcode = NE_EXPR;
5588 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5589 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5590 return 0;
5592 ll_arg = TREE_OPERAND (lhs, 0);
5593 lr_arg = TREE_OPERAND (lhs, 1);
5594 rl_arg = TREE_OPERAND (rhs, 0);
5595 rr_arg = TREE_OPERAND (rhs, 1);
5597 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5598 if (simple_operand_p (ll_arg)
5599 && simple_operand_p (lr_arg))
5601 tree result;
5602 if (operand_equal_p (ll_arg, rl_arg, 0)
5603 && operand_equal_p (lr_arg, rr_arg, 0))
5605 result = combine_comparisons (code, lcode, rcode,
5606 truth_type, ll_arg, lr_arg);
5607 if (result)
5608 return result;
5610 else if (operand_equal_p (ll_arg, rr_arg, 0)
5611 && operand_equal_p (lr_arg, rl_arg, 0))
5613 result = combine_comparisons (code, lcode,
5614 swap_tree_comparison (rcode),
5615 truth_type, ll_arg, lr_arg);
5616 if (result)
5617 return result;
5621 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5622 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5624 /* If the RHS can be evaluated unconditionally and its operands are
5625 simple, it wins to evaluate the RHS unconditionally on machines
5626 with expensive branches. In this case, this isn't a comparison
5627 that can be merged. Avoid doing this if the RHS is a floating-point
5628 comparison since those can trap. */
5630 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5631 false) >= 2
5632 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5633 && simple_operand_p (rl_arg)
5634 && simple_operand_p (rr_arg))
5636 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5637 if (code == TRUTH_OR_EXPR
5638 && lcode == NE_EXPR && integer_zerop (lr_arg)
5639 && rcode == NE_EXPR && integer_zerop (rr_arg)
5640 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5641 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5642 return build2 (NE_EXPR, truth_type,
5643 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5644 ll_arg, rl_arg),
5645 build_int_cst (TREE_TYPE (ll_arg), 0));
5647 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5648 if (code == TRUTH_AND_EXPR
5649 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5650 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5651 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5652 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5653 return build2 (EQ_EXPR, truth_type,
5654 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5655 ll_arg, rl_arg),
5656 build_int_cst (TREE_TYPE (ll_arg), 0));
5658 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5660 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5661 return build2 (code, truth_type, lhs, rhs);
5662 return NULL_TREE;
5666 /* See if the comparisons can be merged. Then get all the parameters for
5667 each side. */
5669 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5670 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5671 return 0;
5673 volatilep = 0;
5674 ll_inner = decode_field_reference (ll_arg,
5675 &ll_bitsize, &ll_bitpos, &ll_mode,
5676 &ll_unsignedp, &volatilep, &ll_mask,
5677 &ll_and_mask);
5678 lr_inner = decode_field_reference (lr_arg,
5679 &lr_bitsize, &lr_bitpos, &lr_mode,
5680 &lr_unsignedp, &volatilep, &lr_mask,
5681 &lr_and_mask);
5682 rl_inner = decode_field_reference (rl_arg,
5683 &rl_bitsize, &rl_bitpos, &rl_mode,
5684 &rl_unsignedp, &volatilep, &rl_mask,
5685 &rl_and_mask);
5686 rr_inner = decode_field_reference (rr_arg,
5687 &rr_bitsize, &rr_bitpos, &rr_mode,
5688 &rr_unsignedp, &volatilep, &rr_mask,
5689 &rr_and_mask);
5691 /* It must be true that the inner operation on the lhs of each
5692 comparison must be the same if we are to be able to do anything.
5693 Then see if we have constants. If not, the same must be true for
5694 the rhs's. */
5695 if (volatilep || ll_inner == 0 || rl_inner == 0
5696 || ! operand_equal_p (ll_inner, rl_inner, 0))
5697 return 0;
5699 if (TREE_CODE (lr_arg) == INTEGER_CST
5700 && TREE_CODE (rr_arg) == INTEGER_CST)
5701 l_const = lr_arg, r_const = rr_arg;
5702 else if (lr_inner == 0 || rr_inner == 0
5703 || ! operand_equal_p (lr_inner, rr_inner, 0))
5704 return 0;
5705 else
5706 l_const = r_const = 0;
5708 /* If either comparison code is not correct for our logical operation,
5709 fail. However, we can convert a one-bit comparison against zero into
5710 the opposite comparison against that bit being set in the field. */
5712 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5713 if (lcode != wanted_code)
5715 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5717 /* Make the left operand unsigned, since we are only interested
5718 in the value of one bit. Otherwise we are doing the wrong
5719 thing below. */
5720 ll_unsignedp = 1;
5721 l_const = ll_mask;
5723 else
5724 return 0;
5727 /* This is analogous to the code for l_const above. */
5728 if (rcode != wanted_code)
5730 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5732 rl_unsignedp = 1;
5733 r_const = rl_mask;
5735 else
5736 return 0;
5739 /* See if we can find a mode that contains both fields being compared on
5740 the left. If we can't, fail. Otherwise, update all constants and masks
5741 to be relative to a field of that size. */
5742 first_bit = MIN (ll_bitpos, rl_bitpos);
5743 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5744 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5745 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5746 volatilep);
5747 if (lnmode == VOIDmode)
5748 return 0;
5750 lnbitsize = GET_MODE_BITSIZE (lnmode);
5751 lnbitpos = first_bit & ~ (lnbitsize - 1);
5752 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5753 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5755 if (BYTES_BIG_ENDIAN)
5757 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5758 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5761 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5762 size_int (xll_bitpos), 0);
5763 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5764 size_int (xrl_bitpos), 0);
5766 if (l_const)
5768 l_const = fold_convert (lntype, l_const);
5769 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5770 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5771 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5772 fold_build1 (BIT_NOT_EXPR,
5773 lntype, ll_mask),
5774 0)))
5776 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5778 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5781 if (r_const)
5783 r_const = fold_convert (lntype, r_const);
5784 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5785 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5786 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5787 fold_build1 (BIT_NOT_EXPR,
5788 lntype, rl_mask),
5789 0)))
5791 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5793 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5797 /* If the right sides are not constant, do the same for it. Also,
5798 disallow this optimization if a size or signedness mismatch occurs
5799 between the left and right sides. */
5800 if (l_const == 0)
5802 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5803 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5804 /* Make sure the two fields on the right
5805 correspond to the left without being swapped. */
5806 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5807 return 0;
5809 first_bit = MIN (lr_bitpos, rr_bitpos);
5810 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5811 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5812 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5813 volatilep);
5814 if (rnmode == VOIDmode)
5815 return 0;
5817 rnbitsize = GET_MODE_BITSIZE (rnmode);
5818 rnbitpos = first_bit & ~ (rnbitsize - 1);
5819 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5820 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5822 if (BYTES_BIG_ENDIAN)
5824 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5825 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5828 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5829 size_int (xlr_bitpos), 0);
5830 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5831 size_int (xrr_bitpos), 0);
5833 /* Make a mask that corresponds to both fields being compared.
5834 Do this for both items being compared. If the operands are the
5835 same size and the bits being compared are in the same position
5836 then we can do this by masking both and comparing the masked
5837 results. */
5838 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5839 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5840 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5842 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5843 ll_unsignedp || rl_unsignedp);
5844 if (! all_ones_mask_p (ll_mask, lnbitsize))
5845 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5847 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5848 lr_unsignedp || rr_unsignedp);
5849 if (! all_ones_mask_p (lr_mask, rnbitsize))
5850 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5852 return build2 (wanted_code, truth_type, lhs, rhs);
5855 /* There is still another way we can do something: If both pairs of
5856 fields being compared are adjacent, we may be able to make a wider
5857 field containing them both.
5859 Note that we still must mask the lhs/rhs expressions. Furthermore,
5860 the mask must be shifted to account for the shift done by
5861 make_bit_field_ref. */
5862 if ((ll_bitsize + ll_bitpos == rl_bitpos
5863 && lr_bitsize + lr_bitpos == rr_bitpos)
5864 || (ll_bitpos == rl_bitpos + rl_bitsize
5865 && lr_bitpos == rr_bitpos + rr_bitsize))
5867 tree type;
5869 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5870 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5871 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5872 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5874 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5875 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5876 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5877 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5879 /* Convert to the smaller type before masking out unwanted bits. */
5880 type = lntype;
5881 if (lntype != rntype)
5883 if (lnbitsize > rnbitsize)
5885 lhs = fold_convert (rntype, lhs);
5886 ll_mask = fold_convert (rntype, ll_mask);
5887 type = rntype;
5889 else if (lnbitsize < rnbitsize)
5891 rhs = fold_convert (lntype, rhs);
5892 lr_mask = fold_convert (lntype, lr_mask);
5893 type = lntype;
5897 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5898 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5900 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5901 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5903 return build2 (wanted_code, truth_type, lhs, rhs);
5906 return 0;
5909 /* Handle the case of comparisons with constants. If there is something in
5910 common between the masks, those bits of the constants must be the same.
5911 If not, the condition is always false. Test for this to avoid generating
5912 incorrect code below. */
5913 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5914 if (! integer_zerop (result)
5915 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5916 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5918 if (wanted_code == NE_EXPR)
5920 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5921 return constant_boolean_node (true, truth_type);
5923 else
5925 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5926 return constant_boolean_node (false, truth_type);
5930 /* Construct the expression we will return. First get the component
5931 reference we will make. Unless the mask is all ones the width of
5932 that field, perform the mask operation. Then compare with the
5933 merged constant. */
5934 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5935 ll_unsignedp || rl_unsignedp);
5937 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5938 if (! all_ones_mask_p (ll_mask, lnbitsize))
5939 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5941 return build2 (wanted_code, truth_type, result,
5942 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5945 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5946 constant. */
5948 static tree
5949 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5951 tree arg0 = op0;
5952 enum tree_code op_code;
5953 tree comp_const;
5954 tree minmax_const;
5955 int consts_equal, consts_lt;
5956 tree inner;
5958 STRIP_SIGN_NOPS (arg0);
5960 op_code = TREE_CODE (arg0);
5961 minmax_const = TREE_OPERAND (arg0, 1);
5962 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5963 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5964 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5965 inner = TREE_OPERAND (arg0, 0);
5967 /* If something does not permit us to optimize, return the original tree. */
5968 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5969 || TREE_CODE (comp_const) != INTEGER_CST
5970 || TREE_OVERFLOW (comp_const)
5971 || TREE_CODE (minmax_const) != INTEGER_CST
5972 || TREE_OVERFLOW (minmax_const))
5973 return NULL_TREE;
5975 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5976 and GT_EXPR, doing the rest with recursive calls using logical
5977 simplifications. */
5978 switch (code)
5980 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5982 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5983 type, op0, op1);
5984 if (tem)
5985 return invert_truthvalue (tem);
5986 return NULL_TREE;
5989 case GE_EXPR:
5990 return
5991 fold_build2 (TRUTH_ORIF_EXPR, type,
5992 optimize_minmax_comparison
5993 (EQ_EXPR, type, arg0, comp_const),
5994 optimize_minmax_comparison
5995 (GT_EXPR, type, arg0, comp_const));
5997 case EQ_EXPR:
5998 if (op_code == MAX_EXPR && consts_equal)
5999 /* MAX (X, 0) == 0 -> X <= 0 */
6000 return fold_build2 (LE_EXPR, type, inner, comp_const);
6002 else if (op_code == MAX_EXPR && consts_lt)
6003 /* MAX (X, 0) == 5 -> X == 5 */
6004 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6006 else if (op_code == MAX_EXPR)
6007 /* MAX (X, 0) == -1 -> false */
6008 return omit_one_operand (type, integer_zero_node, inner);
6010 else if (consts_equal)
6011 /* MIN (X, 0) == 0 -> X >= 0 */
6012 return fold_build2 (GE_EXPR, type, inner, comp_const);
6014 else if (consts_lt)
6015 /* MIN (X, 0) == 5 -> false */
6016 return omit_one_operand (type, integer_zero_node, inner);
6018 else
6019 /* MIN (X, 0) == -1 -> X == -1 */
6020 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6022 case GT_EXPR:
6023 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6024 /* MAX (X, 0) > 0 -> X > 0
6025 MAX (X, 0) > 5 -> X > 5 */
6026 return fold_build2 (GT_EXPR, type, inner, comp_const);
6028 else if (op_code == MAX_EXPR)
6029 /* MAX (X, 0) > -1 -> true */
6030 return omit_one_operand (type, integer_one_node, inner);
6032 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6033 /* MIN (X, 0) > 0 -> false
6034 MIN (X, 0) > 5 -> false */
6035 return omit_one_operand (type, integer_zero_node, inner);
6037 else
6038 /* MIN (X, 0) > -1 -> X > -1 */
6039 return fold_build2 (GT_EXPR, type, inner, comp_const);
6041 default:
6042 return NULL_TREE;
6046 /* T is an integer expression that is being multiplied, divided, or taken a
6047 modulus (CODE says which and what kind of divide or modulus) by a
6048 constant C. See if we can eliminate that operation by folding it with
6049 other operations already in T. WIDE_TYPE, if non-null, is a type that
6050 should be used for the computation if wider than our type.
6052 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6053 (X * 2) + (Y * 4). We must, however, be assured that either the original
6054 expression would not overflow or that overflow is undefined for the type
6055 in the language in question.
6057 If we return a non-null expression, it is an equivalent form of the
6058 original computation, but need not be in the original type.
6060 We set *STRICT_OVERFLOW_P to true if the return values depends on
6061 signed overflow being undefined. Otherwise we do not change
6062 *STRICT_OVERFLOW_P. */
6064 static tree
6065 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6066 bool *strict_overflow_p)
6068 /* To avoid exponential search depth, refuse to allow recursion past
6069 three levels. Beyond that (1) it's highly unlikely that we'll find
6070 something interesting and (2) we've probably processed it before
6071 when we built the inner expression. */
6073 static int depth;
6074 tree ret;
6076 if (depth > 3)
6077 return NULL;
6079 depth++;
6080 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6081 depth--;
6083 return ret;
6086 static tree
6087 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6088 bool *strict_overflow_p)
6090 tree type = TREE_TYPE (t);
6091 enum tree_code tcode = TREE_CODE (t);
6092 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6093 > GET_MODE_SIZE (TYPE_MODE (type)))
6094 ? wide_type : type);
6095 tree t1, t2;
6096 int same_p = tcode == code;
6097 tree op0 = NULL_TREE, op1 = NULL_TREE;
6098 bool sub_strict_overflow_p;
6100 /* Don't deal with constants of zero here; they confuse the code below. */
6101 if (integer_zerop (c))
6102 return NULL_TREE;
6104 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6105 op0 = TREE_OPERAND (t, 0);
6107 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6108 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6110 /* Note that we need not handle conditional operations here since fold
6111 already handles those cases. So just do arithmetic here. */
6112 switch (tcode)
6114 case INTEGER_CST:
6115 /* For a constant, we can always simplify if we are a multiply
6116 or (for divide and modulus) if it is a multiple of our constant. */
6117 if (code == MULT_EXPR
6118 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6119 return const_binop (code, fold_convert (ctype, t),
6120 fold_convert (ctype, c), 0);
6121 break;
6123 CASE_CONVERT: case NON_LVALUE_EXPR:
6124 /* If op0 is an expression ... */
6125 if ((COMPARISON_CLASS_P (op0)
6126 || UNARY_CLASS_P (op0)
6127 || BINARY_CLASS_P (op0)
6128 || VL_EXP_CLASS_P (op0)
6129 || EXPRESSION_CLASS_P (op0))
6130 /* ... and has wrapping overflow, and its type is smaller
6131 than ctype, then we cannot pass through as widening. */
6132 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6133 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6134 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6135 && (TYPE_PRECISION (ctype)
6136 > TYPE_PRECISION (TREE_TYPE (op0))))
6137 /* ... or this is a truncation (t is narrower than op0),
6138 then we cannot pass through this narrowing. */
6139 || (TYPE_PRECISION (type)
6140 < TYPE_PRECISION (TREE_TYPE (op0)))
6141 /* ... or signedness changes for division or modulus,
6142 then we cannot pass through this conversion. */
6143 || (code != MULT_EXPR
6144 && (TYPE_UNSIGNED (ctype)
6145 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6146 /* ... or has undefined overflow while the converted to
6147 type has not, we cannot do the operation in the inner type
6148 as that would introduce undefined overflow. */
6149 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6150 && !TYPE_OVERFLOW_UNDEFINED (type))))
6151 break;
6153 /* Pass the constant down and see if we can make a simplification. If
6154 we can, replace this expression with the inner simplification for
6155 possible later conversion to our or some other type. */
6156 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6157 && TREE_CODE (t2) == INTEGER_CST
6158 && !TREE_OVERFLOW (t2)
6159 && (0 != (t1 = extract_muldiv (op0, t2, code,
6160 code == MULT_EXPR
6161 ? ctype : NULL_TREE,
6162 strict_overflow_p))))
6163 return t1;
6164 break;
6166 case ABS_EXPR:
6167 /* If widening the type changes it from signed to unsigned, then we
6168 must avoid building ABS_EXPR itself as unsigned. */
6169 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6171 tree cstype = (*signed_type_for) (ctype);
6172 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6173 != 0)
6175 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6176 return fold_convert (ctype, t1);
6178 break;
6180 /* If the constant is negative, we cannot simplify this. */
6181 if (tree_int_cst_sgn (c) == -1)
6182 break;
6183 /* FALLTHROUGH */
6184 case NEGATE_EXPR:
6185 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6186 != 0)
6187 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6188 break;
6190 case MIN_EXPR: case MAX_EXPR:
6191 /* If widening the type changes the signedness, then we can't perform
6192 this optimization as that changes the result. */
6193 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6194 break;
6196 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6197 sub_strict_overflow_p = false;
6198 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6199 &sub_strict_overflow_p)) != 0
6200 && (t2 = extract_muldiv (op1, c, code, wide_type,
6201 &sub_strict_overflow_p)) != 0)
6203 if (tree_int_cst_sgn (c) < 0)
6204 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6205 if (sub_strict_overflow_p)
6206 *strict_overflow_p = true;
6207 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6208 fold_convert (ctype, t2));
6210 break;
6212 case LSHIFT_EXPR: case RSHIFT_EXPR:
6213 /* If the second operand is constant, this is a multiplication
6214 or floor division, by a power of two, so we can treat it that
6215 way unless the multiplier or divisor overflows. Signed
6216 left-shift overflow is implementation-defined rather than
6217 undefined in C90, so do not convert signed left shift into
6218 multiplication. */
6219 if (TREE_CODE (op1) == INTEGER_CST
6220 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6221 /* const_binop may not detect overflow correctly,
6222 so check for it explicitly here. */
6223 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6224 && TREE_INT_CST_HIGH (op1) == 0
6225 && 0 != (t1 = fold_convert (ctype,
6226 const_binop (LSHIFT_EXPR,
6227 size_one_node,
6228 op1, 0)))
6229 && !TREE_OVERFLOW (t1))
6230 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6231 ? MULT_EXPR : FLOOR_DIV_EXPR,
6232 ctype, fold_convert (ctype, op0), t1),
6233 c, code, wide_type, strict_overflow_p);
6234 break;
6236 case PLUS_EXPR: case MINUS_EXPR:
6237 /* See if we can eliminate the operation on both sides. If we can, we
6238 can return a new PLUS or MINUS. If we can't, the only remaining
6239 cases where we can do anything are if the second operand is a
6240 constant. */
6241 sub_strict_overflow_p = false;
6242 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6243 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6244 if (t1 != 0 && t2 != 0
6245 && (code == MULT_EXPR
6246 /* If not multiplication, we can only do this if both operands
6247 are divisible by c. */
6248 || (multiple_of_p (ctype, op0, c)
6249 && multiple_of_p (ctype, op1, c))))
6251 if (sub_strict_overflow_p)
6252 *strict_overflow_p = true;
6253 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6254 fold_convert (ctype, t2));
6257 /* If this was a subtraction, negate OP1 and set it to be an addition.
6258 This simplifies the logic below. */
6259 if (tcode == MINUS_EXPR)
6260 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6262 if (TREE_CODE (op1) != INTEGER_CST)
6263 break;
6265 /* If either OP1 or C are negative, this optimization is not safe for
6266 some of the division and remainder types while for others we need
6267 to change the code. */
6268 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6270 if (code == CEIL_DIV_EXPR)
6271 code = FLOOR_DIV_EXPR;
6272 else if (code == FLOOR_DIV_EXPR)
6273 code = CEIL_DIV_EXPR;
6274 else if (code != MULT_EXPR
6275 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6276 break;
6279 /* If it's a multiply or a division/modulus operation of a multiple
6280 of our constant, do the operation and verify it doesn't overflow. */
6281 if (code == MULT_EXPR
6282 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6284 op1 = const_binop (code, fold_convert (ctype, op1),
6285 fold_convert (ctype, c), 0);
6286 /* We allow the constant to overflow with wrapping semantics. */
6287 if (op1 == 0
6288 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6289 break;
6291 else
6292 break;
6294 /* If we have an unsigned type is not a sizetype, we cannot widen
6295 the operation since it will change the result if the original
6296 computation overflowed. */
6297 if (TYPE_UNSIGNED (ctype)
6298 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6299 && ctype != type)
6300 break;
6302 /* If we were able to eliminate our operation from the first side,
6303 apply our operation to the second side and reform the PLUS. */
6304 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6305 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6307 /* The last case is if we are a multiply. In that case, we can
6308 apply the distributive law to commute the multiply and addition
6309 if the multiplication of the constants doesn't overflow. */
6310 if (code == MULT_EXPR)
6311 return fold_build2 (tcode, ctype,
6312 fold_build2 (code, ctype,
6313 fold_convert (ctype, op0),
6314 fold_convert (ctype, c)),
6315 op1);
6317 break;
6319 case MULT_EXPR:
6320 /* We have a special case here if we are doing something like
6321 (C * 8) % 4 since we know that's zero. */
6322 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6323 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6324 /* If the multiplication can overflow we cannot optimize this.
6325 ??? Until we can properly mark individual operations as
6326 not overflowing we need to treat sizetype special here as
6327 stor-layout relies on this opimization to make
6328 DECL_FIELD_BIT_OFFSET always a constant. */
6329 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6330 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6331 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6332 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6333 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6335 *strict_overflow_p = true;
6336 return omit_one_operand (type, integer_zero_node, op0);
6339 /* ... fall through ... */
6341 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6342 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6343 /* If we can extract our operation from the LHS, do so and return a
6344 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6345 do something only if the second operand is a constant. */
6346 if (same_p
6347 && (t1 = extract_muldiv (op0, c, code, wide_type,
6348 strict_overflow_p)) != 0)
6349 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6350 fold_convert (ctype, op1));
6351 else if (tcode == MULT_EXPR && code == MULT_EXPR
6352 && (t1 = extract_muldiv (op1, c, code, wide_type,
6353 strict_overflow_p)) != 0)
6354 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6355 fold_convert (ctype, t1));
6356 else if (TREE_CODE (op1) != INTEGER_CST)
6357 return 0;
6359 /* If these are the same operation types, we can associate them
6360 assuming no overflow. */
6361 if (tcode == code
6362 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6363 fold_convert (ctype, c), 1))
6364 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6365 TREE_INT_CST_HIGH (t1),
6366 (TYPE_UNSIGNED (ctype)
6367 && tcode != MULT_EXPR) ? -1 : 1,
6368 TREE_OVERFLOW (t1)))
6369 && !TREE_OVERFLOW (t1))
6370 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6372 /* If these operations "cancel" each other, we have the main
6373 optimizations of this pass, which occur when either constant is a
6374 multiple of the other, in which case we replace this with either an
6375 operation or CODE or TCODE.
6377 If we have an unsigned type that is not a sizetype, we cannot do
6378 this since it will change the result if the original computation
6379 overflowed. */
6380 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6381 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6382 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6383 || (tcode == MULT_EXPR
6384 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6385 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6386 && code != MULT_EXPR)))
6388 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6390 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6391 *strict_overflow_p = true;
6392 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6393 fold_convert (ctype,
6394 const_binop (TRUNC_DIV_EXPR,
6395 op1, c, 0)));
6397 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6399 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6400 *strict_overflow_p = true;
6401 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6402 fold_convert (ctype,
6403 const_binop (TRUNC_DIV_EXPR,
6404 c, op1, 0)));
6407 break;
6409 default:
6410 break;
6413 return 0;
6416 /* Return a node which has the indicated constant VALUE (either 0 or
6417 1), and is of the indicated TYPE. */
6419 tree
6420 constant_boolean_node (int value, tree type)
6422 if (type == integer_type_node)
6423 return value ? integer_one_node : integer_zero_node;
6424 else if (type == boolean_type_node)
6425 return value ? boolean_true_node : boolean_false_node;
6426 else
6427 return build_int_cst (type, value);
6431 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6432 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6433 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6434 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6435 COND is the first argument to CODE; otherwise (as in the example
6436 given here), it is the second argument. TYPE is the type of the
6437 original expression. Return NULL_TREE if no simplification is
6438 possible. */
6440 static tree
6441 fold_binary_op_with_conditional_arg (enum tree_code code,
6442 tree type, tree op0, tree op1,
6443 tree cond, tree arg, int cond_first_p)
6445 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6446 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6447 tree test, true_value, false_value;
6448 tree lhs = NULL_TREE;
6449 tree rhs = NULL_TREE;
6451 /* This transformation is only worthwhile if we don't have to wrap
6452 arg in a SAVE_EXPR, and the operation can be simplified on at least
6453 one of the branches once its pushed inside the COND_EXPR. */
6454 if (!TREE_CONSTANT (arg))
6455 return NULL_TREE;
6457 if (TREE_CODE (cond) == COND_EXPR)
6459 test = TREE_OPERAND (cond, 0);
6460 true_value = TREE_OPERAND (cond, 1);
6461 false_value = TREE_OPERAND (cond, 2);
6462 /* If this operand throws an expression, then it does not make
6463 sense to try to perform a logical or arithmetic operation
6464 involving it. */
6465 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6466 lhs = true_value;
6467 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6468 rhs = false_value;
6470 else
6472 tree testtype = TREE_TYPE (cond);
6473 test = cond;
6474 true_value = constant_boolean_node (true, testtype);
6475 false_value = constant_boolean_node (false, testtype);
6478 arg = fold_convert (arg_type, arg);
6479 if (lhs == 0)
6481 true_value = fold_convert (cond_type, true_value);
6482 if (cond_first_p)
6483 lhs = fold_build2 (code, type, true_value, arg);
6484 else
6485 lhs = fold_build2 (code, type, arg, true_value);
6487 if (rhs == 0)
6489 false_value = fold_convert (cond_type, false_value);
6490 if (cond_first_p)
6491 rhs = fold_build2 (code, type, false_value, arg);
6492 else
6493 rhs = fold_build2 (code, type, arg, false_value);
6496 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6497 return fold_convert (type, test);
6501 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6503 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6504 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6505 ADDEND is the same as X.
6507 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6508 and finite. The problematic cases are when X is zero, and its mode
6509 has signed zeros. In the case of rounding towards -infinity,
6510 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6511 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6513 bool
6514 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6516 if (!real_zerop (addend))
6517 return false;
6519 /* Don't allow the fold with -fsignaling-nans. */
6520 if (HONOR_SNANS (TYPE_MODE (type)))
6521 return false;
6523 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6524 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6525 return true;
6527 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6528 if (TREE_CODE (addend) == REAL_CST
6529 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6530 negate = !negate;
6532 /* The mode has signed zeros, and we have to honor their sign.
6533 In this situation, there is only one case we can return true for.
6534 X - 0 is the same as X unless rounding towards -infinity is
6535 supported. */
6536 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6539 /* Subroutine of fold() that checks comparisons of built-in math
6540 functions against real constants.
6542 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6543 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6544 is the type of the result and ARG0 and ARG1 are the operands of the
6545 comparison. ARG1 must be a TREE_REAL_CST.
6547 The function returns the constant folded tree if a simplification
6548 can be made, and NULL_TREE otherwise. */
6550 static tree
6551 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6552 tree type, tree arg0, tree arg1)
6554 REAL_VALUE_TYPE c;
6556 if (BUILTIN_SQRT_P (fcode))
6558 tree arg = CALL_EXPR_ARG (arg0, 0);
6559 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6561 c = TREE_REAL_CST (arg1);
6562 if (REAL_VALUE_NEGATIVE (c))
6564 /* sqrt(x) < y is always false, if y is negative. */
6565 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6566 return omit_one_operand (type, integer_zero_node, arg);
6568 /* sqrt(x) > y is always true, if y is negative and we
6569 don't care about NaNs, i.e. negative values of x. */
6570 if (code == NE_EXPR || !HONOR_NANS (mode))
6571 return omit_one_operand (type, integer_one_node, arg);
6573 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6574 return fold_build2 (GE_EXPR, type, arg,
6575 build_real (TREE_TYPE (arg), dconst0));
6577 else if (code == GT_EXPR || code == GE_EXPR)
6579 REAL_VALUE_TYPE c2;
6581 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6582 real_convert (&c2, mode, &c2);
6584 if (REAL_VALUE_ISINF (c2))
6586 /* sqrt(x) > y is x == +Inf, when y is very large. */
6587 if (HONOR_INFINITIES (mode))
6588 return fold_build2 (EQ_EXPR, type, arg,
6589 build_real (TREE_TYPE (arg), c2));
6591 /* sqrt(x) > y is always false, when y is very large
6592 and we don't care about infinities. */
6593 return omit_one_operand (type, integer_zero_node, arg);
6596 /* sqrt(x) > c is the same as x > c*c. */
6597 return fold_build2 (code, type, arg,
6598 build_real (TREE_TYPE (arg), c2));
6600 else if (code == LT_EXPR || code == LE_EXPR)
6602 REAL_VALUE_TYPE c2;
6604 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6605 real_convert (&c2, mode, &c2);
6607 if (REAL_VALUE_ISINF (c2))
6609 /* sqrt(x) < y is always true, when y is a very large
6610 value and we don't care about NaNs or Infinities. */
6611 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6612 return omit_one_operand (type, integer_one_node, arg);
6614 /* sqrt(x) < y is x != +Inf when y is very large and we
6615 don't care about NaNs. */
6616 if (! HONOR_NANS (mode))
6617 return fold_build2 (NE_EXPR, type, arg,
6618 build_real (TREE_TYPE (arg), c2));
6620 /* sqrt(x) < y is x >= 0 when y is very large and we
6621 don't care about Infinities. */
6622 if (! HONOR_INFINITIES (mode))
6623 return fold_build2 (GE_EXPR, type, arg,
6624 build_real (TREE_TYPE (arg), dconst0));
6626 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6627 if (lang_hooks.decls.global_bindings_p () != 0
6628 || CONTAINS_PLACEHOLDER_P (arg))
6629 return NULL_TREE;
6631 arg = save_expr (arg);
6632 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6633 fold_build2 (GE_EXPR, type, arg,
6634 build_real (TREE_TYPE (arg),
6635 dconst0)),
6636 fold_build2 (NE_EXPR, type, arg,
6637 build_real (TREE_TYPE (arg),
6638 c2)));
6641 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6642 if (! HONOR_NANS (mode))
6643 return fold_build2 (code, type, arg,
6644 build_real (TREE_TYPE (arg), c2));
6646 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6647 if (lang_hooks.decls.global_bindings_p () == 0
6648 && ! CONTAINS_PLACEHOLDER_P (arg))
6650 arg = save_expr (arg);
6651 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6652 fold_build2 (GE_EXPR, type, arg,
6653 build_real (TREE_TYPE (arg),
6654 dconst0)),
6655 fold_build2 (code, type, arg,
6656 build_real (TREE_TYPE (arg),
6657 c2)));
6662 return NULL_TREE;
6665 /* Subroutine of fold() that optimizes comparisons against Infinities,
6666 either +Inf or -Inf.
6668 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6669 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6670 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6672 The function returns the constant folded tree if a simplification
6673 can be made, and NULL_TREE otherwise. */
6675 static tree
6676 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6678 enum machine_mode mode;
6679 REAL_VALUE_TYPE max;
6680 tree temp;
6681 bool neg;
6683 mode = TYPE_MODE (TREE_TYPE (arg0));
6685 /* For negative infinity swap the sense of the comparison. */
6686 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6687 if (neg)
6688 code = swap_tree_comparison (code);
6690 switch (code)
6692 case GT_EXPR:
6693 /* x > +Inf is always false, if with ignore sNANs. */
6694 if (HONOR_SNANS (mode))
6695 return NULL_TREE;
6696 return omit_one_operand (type, integer_zero_node, arg0);
6698 case LE_EXPR:
6699 /* x <= +Inf is always true, if we don't case about NaNs. */
6700 if (! HONOR_NANS (mode))
6701 return omit_one_operand (type, integer_one_node, arg0);
6703 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6704 if (lang_hooks.decls.global_bindings_p () == 0
6705 && ! CONTAINS_PLACEHOLDER_P (arg0))
6707 arg0 = save_expr (arg0);
6708 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6710 break;
6712 case EQ_EXPR:
6713 case GE_EXPR:
6714 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6715 real_maxval (&max, neg, mode);
6716 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6717 arg0, build_real (TREE_TYPE (arg0), max));
6719 case LT_EXPR:
6720 /* x < +Inf is always equal to x <= DBL_MAX. */
6721 real_maxval (&max, neg, mode);
6722 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6723 arg0, build_real (TREE_TYPE (arg0), max));
6725 case NE_EXPR:
6726 /* x != +Inf is always equal to !(x > DBL_MAX). */
6727 real_maxval (&max, neg, mode);
6728 if (! HONOR_NANS (mode))
6729 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6730 arg0, build_real (TREE_TYPE (arg0), max));
6732 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6733 arg0, build_real (TREE_TYPE (arg0), max));
6734 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6736 default:
6737 break;
6740 return NULL_TREE;
6743 /* Subroutine of fold() that optimizes comparisons of a division by
6744 a nonzero integer constant against an integer constant, i.e.
6745 X/C1 op C2.
6747 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6748 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6749 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6751 The function returns the constant folded tree if a simplification
6752 can be made, and NULL_TREE otherwise. */
6754 static tree
6755 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6757 tree prod, tmp, hi, lo;
6758 tree arg00 = TREE_OPERAND (arg0, 0);
6759 tree arg01 = TREE_OPERAND (arg0, 1);
6760 unsigned HOST_WIDE_INT lpart;
6761 HOST_WIDE_INT hpart;
6762 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6763 bool neg_overflow;
6764 int overflow;
6766 /* We have to do this the hard way to detect unsigned overflow.
6767 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6768 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6769 TREE_INT_CST_HIGH (arg01),
6770 TREE_INT_CST_LOW (arg1),
6771 TREE_INT_CST_HIGH (arg1),
6772 &lpart, &hpart, unsigned_p);
6773 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6774 -1, overflow);
6775 neg_overflow = false;
6777 if (unsigned_p)
6779 tmp = int_const_binop (MINUS_EXPR, arg01,
6780 build_int_cst (TREE_TYPE (arg01), 1), 0);
6781 lo = prod;
6783 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6784 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6785 TREE_INT_CST_HIGH (prod),
6786 TREE_INT_CST_LOW (tmp),
6787 TREE_INT_CST_HIGH (tmp),
6788 &lpart, &hpart, unsigned_p);
6789 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6790 -1, overflow | TREE_OVERFLOW (prod));
6792 else if (tree_int_cst_sgn (arg01) >= 0)
6794 tmp = int_const_binop (MINUS_EXPR, arg01,
6795 build_int_cst (TREE_TYPE (arg01), 1), 0);
6796 switch (tree_int_cst_sgn (arg1))
6798 case -1:
6799 neg_overflow = true;
6800 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6801 hi = prod;
6802 break;
6804 case 0:
6805 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6806 hi = tmp;
6807 break;
6809 case 1:
6810 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6811 lo = prod;
6812 break;
6814 default:
6815 gcc_unreachable ();
6818 else
6820 /* A negative divisor reverses the relational operators. */
6821 code = swap_tree_comparison (code);
6823 tmp = int_const_binop (PLUS_EXPR, arg01,
6824 build_int_cst (TREE_TYPE (arg01), 1), 0);
6825 switch (tree_int_cst_sgn (arg1))
6827 case -1:
6828 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6829 lo = prod;
6830 break;
6832 case 0:
6833 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6834 lo = tmp;
6835 break;
6837 case 1:
6838 neg_overflow = true;
6839 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6840 hi = prod;
6841 break;
6843 default:
6844 gcc_unreachable ();
6848 switch (code)
6850 case EQ_EXPR:
6851 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6852 return omit_one_operand (type, integer_zero_node, arg00);
6853 if (TREE_OVERFLOW (hi))
6854 return fold_build2 (GE_EXPR, type, arg00, lo);
6855 if (TREE_OVERFLOW (lo))
6856 return fold_build2 (LE_EXPR, type, arg00, hi);
6857 return build_range_check (type, arg00, 1, lo, hi);
6859 case NE_EXPR:
6860 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6861 return omit_one_operand (type, integer_one_node, arg00);
6862 if (TREE_OVERFLOW (hi))
6863 return fold_build2 (LT_EXPR, type, arg00, lo);
6864 if (TREE_OVERFLOW (lo))
6865 return fold_build2 (GT_EXPR, type, arg00, hi);
6866 return build_range_check (type, arg00, 0, lo, hi);
6868 case LT_EXPR:
6869 if (TREE_OVERFLOW (lo))
6871 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6872 return omit_one_operand (type, tmp, arg00);
6874 return fold_build2 (LT_EXPR, type, arg00, lo);
6876 case LE_EXPR:
6877 if (TREE_OVERFLOW (hi))
6879 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6880 return omit_one_operand (type, tmp, arg00);
6882 return fold_build2 (LE_EXPR, type, arg00, hi);
6884 case GT_EXPR:
6885 if (TREE_OVERFLOW (hi))
6887 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6888 return omit_one_operand (type, tmp, arg00);
6890 return fold_build2 (GT_EXPR, type, arg00, hi);
6892 case GE_EXPR:
6893 if (TREE_OVERFLOW (lo))
6895 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6896 return omit_one_operand (type, tmp, arg00);
6898 return fold_build2 (GE_EXPR, type, arg00, lo);
6900 default:
6901 break;
6904 return NULL_TREE;
6908 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6909 equality/inequality test, then return a simplified form of the test
6910 using a sign testing. Otherwise return NULL. TYPE is the desired
6911 result type. */
6913 static tree
6914 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6915 tree result_type)
6917 /* If this is testing a single bit, we can optimize the test. */
6918 if ((code == NE_EXPR || code == EQ_EXPR)
6919 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6920 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6922 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6923 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6924 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6926 if (arg00 != NULL_TREE
6927 /* This is only a win if casting to a signed type is cheap,
6928 i.e. when arg00's type is not a partial mode. */
6929 && TYPE_PRECISION (TREE_TYPE (arg00))
6930 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6932 tree stype = signed_type_for (TREE_TYPE (arg00));
6933 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6934 result_type, fold_convert (stype, arg00),
6935 build_int_cst (stype, 0));
6939 return NULL_TREE;
6942 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6943 equality/inequality test, then return a simplified form of
6944 the test using shifts and logical operations. Otherwise return
6945 NULL. TYPE is the desired result type. */
6947 tree
6948 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6949 tree result_type)
6951 /* If this is testing a single bit, we can optimize the test. */
6952 if ((code == NE_EXPR || code == EQ_EXPR)
6953 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6954 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6956 tree inner = TREE_OPERAND (arg0, 0);
6957 tree type = TREE_TYPE (arg0);
6958 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6959 enum machine_mode operand_mode = TYPE_MODE (type);
6960 int ops_unsigned;
6961 tree signed_type, unsigned_type, intermediate_type;
6962 tree tem, one;
6964 /* First, see if we can fold the single bit test into a sign-bit
6965 test. */
6966 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6967 result_type);
6968 if (tem)
6969 return tem;
6971 /* Otherwise we have (A & C) != 0 where C is a single bit,
6972 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6973 Similarly for (A & C) == 0. */
6975 /* If INNER is a right shift of a constant and it plus BITNUM does
6976 not overflow, adjust BITNUM and INNER. */
6977 if (TREE_CODE (inner) == RSHIFT_EXPR
6978 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6979 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6980 && bitnum < TYPE_PRECISION (type)
6981 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6982 bitnum - TYPE_PRECISION (type)))
6984 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6985 inner = TREE_OPERAND (inner, 0);
6988 /* If we are going to be able to omit the AND below, we must do our
6989 operations as unsigned. If we must use the AND, we have a choice.
6990 Normally unsigned is faster, but for some machines signed is. */
6991 #ifdef LOAD_EXTEND_OP
6992 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6993 && !flag_syntax_only) ? 0 : 1;
6994 #else
6995 ops_unsigned = 1;
6996 #endif
6998 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6999 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7000 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7001 inner = fold_convert (intermediate_type, inner);
7003 if (bitnum != 0)
7004 inner = build2 (RSHIFT_EXPR, intermediate_type,
7005 inner, size_int (bitnum));
7007 one = build_int_cst (intermediate_type, 1);
7009 if (code == EQ_EXPR)
7010 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
7012 /* Put the AND last so it can combine with more things. */
7013 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7015 /* Make sure to return the proper type. */
7016 inner = fold_convert (result_type, inner);
7018 return inner;
7020 return NULL_TREE;
7023 /* Check whether we are allowed to reorder operands arg0 and arg1,
7024 such that the evaluation of arg1 occurs before arg0. */
7026 static bool
7027 reorder_operands_p (const_tree arg0, const_tree arg1)
7029 if (! flag_evaluation_order)
7030 return true;
7031 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7032 return true;
7033 return ! TREE_SIDE_EFFECTS (arg0)
7034 && ! TREE_SIDE_EFFECTS (arg1);
7037 /* Test whether it is preferable two swap two operands, ARG0 and
7038 ARG1, for example because ARG0 is an integer constant and ARG1
7039 isn't. If REORDER is true, only recommend swapping if we can
7040 evaluate the operands in reverse order. */
7042 bool
7043 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7045 STRIP_SIGN_NOPS (arg0);
7046 STRIP_SIGN_NOPS (arg1);
7048 if (TREE_CODE (arg1) == INTEGER_CST)
7049 return 0;
7050 if (TREE_CODE (arg0) == INTEGER_CST)
7051 return 1;
7053 if (TREE_CODE (arg1) == REAL_CST)
7054 return 0;
7055 if (TREE_CODE (arg0) == REAL_CST)
7056 return 1;
7058 if (TREE_CODE (arg1) == FIXED_CST)
7059 return 0;
7060 if (TREE_CODE (arg0) == FIXED_CST)
7061 return 1;
7063 if (TREE_CODE (arg1) == COMPLEX_CST)
7064 return 0;
7065 if (TREE_CODE (arg0) == COMPLEX_CST)
7066 return 1;
7068 if (TREE_CONSTANT (arg1))
7069 return 0;
7070 if (TREE_CONSTANT (arg0))
7071 return 1;
7073 if (optimize_function_for_size_p (cfun))
7074 return 0;
7076 if (reorder && flag_evaluation_order
7077 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7078 return 0;
7080 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7081 for commutative and comparison operators. Ensuring a canonical
7082 form allows the optimizers to find additional redundancies without
7083 having to explicitly check for both orderings. */
7084 if (TREE_CODE (arg0) == SSA_NAME
7085 && TREE_CODE (arg1) == SSA_NAME
7086 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7087 return 1;
7089 /* Put SSA_NAMEs last. */
7090 if (TREE_CODE (arg1) == SSA_NAME)
7091 return 0;
7092 if (TREE_CODE (arg0) == SSA_NAME)
7093 return 1;
7095 /* Put variables last. */
7096 if (DECL_P (arg1))
7097 return 0;
7098 if (DECL_P (arg0))
7099 return 1;
7101 return 0;
7104 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7105 ARG0 is extended to a wider type. */
7107 static tree
7108 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7110 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7111 tree arg1_unw;
7112 tree shorter_type, outer_type;
7113 tree min, max;
7114 bool above, below;
7116 if (arg0_unw == arg0)
7117 return NULL_TREE;
7118 shorter_type = TREE_TYPE (arg0_unw);
7120 #ifdef HAVE_canonicalize_funcptr_for_compare
7121 /* Disable this optimization if we're casting a function pointer
7122 type on targets that require function pointer canonicalization. */
7123 if (HAVE_canonicalize_funcptr_for_compare
7124 && TREE_CODE (shorter_type) == POINTER_TYPE
7125 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7126 return NULL_TREE;
7127 #endif
7129 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7130 return NULL_TREE;
7132 arg1_unw = get_unwidened (arg1, NULL_TREE);
7134 /* If possible, express the comparison in the shorter mode. */
7135 if ((code == EQ_EXPR || code == NE_EXPR
7136 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7137 && (TREE_TYPE (arg1_unw) == shorter_type
7138 || ((TYPE_PRECISION (shorter_type)
7139 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7140 && (TYPE_UNSIGNED (shorter_type)
7141 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7142 || (TREE_CODE (arg1_unw) == INTEGER_CST
7143 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7144 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7145 && int_fits_type_p (arg1_unw, shorter_type))))
7146 return fold_build2 (code, type, arg0_unw,
7147 fold_convert (shorter_type, arg1_unw));
7149 if (TREE_CODE (arg1_unw) != INTEGER_CST
7150 || TREE_CODE (shorter_type) != INTEGER_TYPE
7151 || !int_fits_type_p (arg1_unw, shorter_type))
7152 return NULL_TREE;
7154 /* If we are comparing with the integer that does not fit into the range
7155 of the shorter type, the result is known. */
7156 outer_type = TREE_TYPE (arg1_unw);
7157 min = lower_bound_in_type (outer_type, shorter_type);
7158 max = upper_bound_in_type (outer_type, shorter_type);
7160 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7161 max, arg1_unw));
7162 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7163 arg1_unw, min));
7165 switch (code)
7167 case EQ_EXPR:
7168 if (above || below)
7169 return omit_one_operand (type, integer_zero_node, arg0);
7170 break;
7172 case NE_EXPR:
7173 if (above || below)
7174 return omit_one_operand (type, integer_one_node, arg0);
7175 break;
7177 case LT_EXPR:
7178 case LE_EXPR:
7179 if (above)
7180 return omit_one_operand (type, integer_one_node, arg0);
7181 else if (below)
7182 return omit_one_operand (type, integer_zero_node, arg0);
7184 case GT_EXPR:
7185 case GE_EXPR:
7186 if (above)
7187 return omit_one_operand (type, integer_zero_node, arg0);
7188 else if (below)
7189 return omit_one_operand (type, integer_one_node, arg0);
7191 default:
7192 break;
7195 return NULL_TREE;
7198 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7199 ARG0 just the signedness is changed. */
7201 static tree
7202 fold_sign_changed_comparison (enum tree_code code, tree type,
7203 tree arg0, tree arg1)
7205 tree arg0_inner;
7206 tree inner_type, outer_type;
7208 if (!CONVERT_EXPR_P (arg0))
7209 return NULL_TREE;
7211 outer_type = TREE_TYPE (arg0);
7212 arg0_inner = TREE_OPERAND (arg0, 0);
7213 inner_type = TREE_TYPE (arg0_inner);
7215 #ifdef HAVE_canonicalize_funcptr_for_compare
7216 /* Disable this optimization if we're casting a function pointer
7217 type on targets that require function pointer canonicalization. */
7218 if (HAVE_canonicalize_funcptr_for_compare
7219 && TREE_CODE (inner_type) == POINTER_TYPE
7220 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7221 return NULL_TREE;
7222 #endif
7224 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7225 return NULL_TREE;
7227 if (TREE_CODE (arg1) != INTEGER_CST
7228 && !(CONVERT_EXPR_P (arg1)
7229 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7230 return NULL_TREE;
7232 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7233 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7234 && code != NE_EXPR
7235 && code != EQ_EXPR)
7236 return NULL_TREE;
7238 if (TREE_CODE (arg1) == INTEGER_CST)
7239 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7240 TREE_INT_CST_HIGH (arg1), 0,
7241 TREE_OVERFLOW (arg1));
7242 else
7243 arg1 = fold_convert (inner_type, arg1);
7245 return fold_build2 (code, type, arg0_inner, arg1);
7248 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7249 step of the array. Reconstructs s and delta in the case of s * delta
7250 being an integer constant (and thus already folded).
7251 ADDR is the address. MULT is the multiplicative expression.
7252 If the function succeeds, the new address expression is returned. Otherwise
7253 NULL_TREE is returned. */
7255 static tree
7256 try_move_mult_to_index (tree addr, tree op1)
7258 tree s, delta, step;
7259 tree ref = TREE_OPERAND (addr, 0), pref;
7260 tree ret, pos;
7261 tree itype;
7262 bool mdim = false;
7264 /* Strip the nops that might be added when converting op1 to sizetype. */
7265 STRIP_NOPS (op1);
7267 /* Canonicalize op1 into a possibly non-constant delta
7268 and an INTEGER_CST s. */
7269 if (TREE_CODE (op1) == MULT_EXPR)
7271 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7273 STRIP_NOPS (arg0);
7274 STRIP_NOPS (arg1);
7276 if (TREE_CODE (arg0) == INTEGER_CST)
7278 s = arg0;
7279 delta = arg1;
7281 else if (TREE_CODE (arg1) == INTEGER_CST)
7283 s = arg1;
7284 delta = arg0;
7286 else
7287 return NULL_TREE;
7289 else if (TREE_CODE (op1) == INTEGER_CST)
7291 delta = op1;
7292 s = NULL_TREE;
7294 else
7296 /* Simulate we are delta * 1. */
7297 delta = op1;
7298 s = integer_one_node;
7301 for (;; ref = TREE_OPERAND (ref, 0))
7303 if (TREE_CODE (ref) == ARRAY_REF)
7305 /* Remember if this was a multi-dimensional array. */
7306 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7307 mdim = true;
7309 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7310 if (! itype)
7311 continue;
7313 step = array_ref_element_size (ref);
7314 if (TREE_CODE (step) != INTEGER_CST)
7315 continue;
7317 if (s)
7319 if (! tree_int_cst_equal (step, s))
7320 continue;
7322 else
7324 /* Try if delta is a multiple of step. */
7325 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7326 if (! tmp)
7327 continue;
7328 delta = tmp;
7331 /* Only fold here if we can verify we do not overflow one
7332 dimension of a multi-dimensional array. */
7333 if (mdim)
7335 tree tmp;
7337 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7338 || !INTEGRAL_TYPE_P (itype)
7339 || !TYPE_MAX_VALUE (itype)
7340 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7341 continue;
7343 tmp = fold_binary (PLUS_EXPR, itype,
7344 fold_convert (itype,
7345 TREE_OPERAND (ref, 1)),
7346 fold_convert (itype, delta));
7347 if (!tmp
7348 || TREE_CODE (tmp) != INTEGER_CST
7349 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7350 continue;
7353 break;
7355 else
7356 mdim = false;
7358 if (!handled_component_p (ref))
7359 return NULL_TREE;
7362 /* We found the suitable array reference. So copy everything up to it,
7363 and replace the index. */
7365 pref = TREE_OPERAND (addr, 0);
7366 ret = copy_node (pref);
7367 pos = ret;
7369 while (pref != ref)
7371 pref = TREE_OPERAND (pref, 0);
7372 TREE_OPERAND (pos, 0) = copy_node (pref);
7373 pos = TREE_OPERAND (pos, 0);
7376 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7377 fold_convert (itype,
7378 TREE_OPERAND (pos, 1)),
7379 fold_convert (itype, delta));
7381 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7385 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7386 means A >= Y && A != MAX, but in this case we know that
7387 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7389 static tree
7390 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7392 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7394 if (TREE_CODE (bound) == LT_EXPR)
7395 a = TREE_OPERAND (bound, 0);
7396 else if (TREE_CODE (bound) == GT_EXPR)
7397 a = TREE_OPERAND (bound, 1);
7398 else
7399 return NULL_TREE;
7401 typea = TREE_TYPE (a);
7402 if (!INTEGRAL_TYPE_P (typea)
7403 && !POINTER_TYPE_P (typea))
7404 return NULL_TREE;
7406 if (TREE_CODE (ineq) == LT_EXPR)
7408 a1 = TREE_OPERAND (ineq, 1);
7409 y = TREE_OPERAND (ineq, 0);
7411 else if (TREE_CODE (ineq) == GT_EXPR)
7413 a1 = TREE_OPERAND (ineq, 0);
7414 y = TREE_OPERAND (ineq, 1);
7416 else
7417 return NULL_TREE;
7419 if (TREE_TYPE (a1) != typea)
7420 return NULL_TREE;
7422 if (POINTER_TYPE_P (typea))
7424 /* Convert the pointer types into integer before taking the difference. */
7425 tree ta = fold_convert (ssizetype, a);
7426 tree ta1 = fold_convert (ssizetype, a1);
7427 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7429 else
7430 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7432 if (!diff || !integer_onep (diff))
7433 return NULL_TREE;
7435 return fold_build2 (GE_EXPR, type, a, y);
7438 /* Fold a sum or difference of at least one multiplication.
7439 Returns the folded tree or NULL if no simplification could be made. */
7441 static tree
7442 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7444 tree arg00, arg01, arg10, arg11;
7445 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7447 /* (A * C) +- (B * C) -> (A+-B) * C.
7448 (A * C) +- A -> A * (C+-1).
7449 We are most concerned about the case where C is a constant,
7450 but other combinations show up during loop reduction. Since
7451 it is not difficult, try all four possibilities. */
7453 if (TREE_CODE (arg0) == MULT_EXPR)
7455 arg00 = TREE_OPERAND (arg0, 0);
7456 arg01 = TREE_OPERAND (arg0, 1);
7458 else if (TREE_CODE (arg0) == INTEGER_CST)
7460 arg00 = build_one_cst (type);
7461 arg01 = arg0;
7463 else
7465 /* We cannot generate constant 1 for fract. */
7466 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7467 return NULL_TREE;
7468 arg00 = arg0;
7469 arg01 = build_one_cst (type);
7471 if (TREE_CODE (arg1) == MULT_EXPR)
7473 arg10 = TREE_OPERAND (arg1, 0);
7474 arg11 = TREE_OPERAND (arg1, 1);
7476 else if (TREE_CODE (arg1) == INTEGER_CST)
7478 arg10 = build_one_cst (type);
7479 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7480 the purpose of this canonicalization. */
7481 if (TREE_INT_CST_HIGH (arg1) == -1
7482 && negate_expr_p (arg1)
7483 && code == PLUS_EXPR)
7485 arg11 = negate_expr (arg1);
7486 code = MINUS_EXPR;
7488 else
7489 arg11 = arg1;
7491 else
7493 /* We cannot generate constant 1 for fract. */
7494 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7495 return NULL_TREE;
7496 arg10 = arg1;
7497 arg11 = build_one_cst (type);
7499 same = NULL_TREE;
7501 if (operand_equal_p (arg01, arg11, 0))
7502 same = arg01, alt0 = arg00, alt1 = arg10;
7503 else if (operand_equal_p (arg00, arg10, 0))
7504 same = arg00, alt0 = arg01, alt1 = arg11;
7505 else if (operand_equal_p (arg00, arg11, 0))
7506 same = arg00, alt0 = arg01, alt1 = arg10;
7507 else if (operand_equal_p (arg01, arg10, 0))
7508 same = arg01, alt0 = arg00, alt1 = arg11;
7510 /* No identical multiplicands; see if we can find a common
7511 power-of-two factor in non-power-of-two multiplies. This
7512 can help in multi-dimensional array access. */
7513 else if (host_integerp (arg01, 0)
7514 && host_integerp (arg11, 0))
7516 HOST_WIDE_INT int01, int11, tmp;
7517 bool swap = false;
7518 tree maybe_same;
7519 int01 = TREE_INT_CST_LOW (arg01);
7520 int11 = TREE_INT_CST_LOW (arg11);
7522 /* Move min of absolute values to int11. */
7523 if ((int01 >= 0 ? int01 : -int01)
7524 < (int11 >= 0 ? int11 : -int11))
7526 tmp = int01, int01 = int11, int11 = tmp;
7527 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7528 maybe_same = arg01;
7529 swap = true;
7531 else
7532 maybe_same = arg11;
7534 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7535 /* The remainder should not be a constant, otherwise we
7536 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7537 increased the number of multiplications necessary. */
7538 && TREE_CODE (arg10) != INTEGER_CST)
7540 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7541 build_int_cst (TREE_TYPE (arg00),
7542 int01 / int11));
7543 alt1 = arg10;
7544 same = maybe_same;
7545 if (swap)
7546 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7550 if (same)
7551 return fold_build2 (MULT_EXPR, type,
7552 fold_build2 (code, type,
7553 fold_convert (type, alt0),
7554 fold_convert (type, alt1)),
7555 fold_convert (type, same));
7557 return NULL_TREE;
7560 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7561 specified by EXPR into the buffer PTR of length LEN bytes.
7562 Return the number of bytes placed in the buffer, or zero
7563 upon failure. */
7565 static int
7566 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7568 tree type = TREE_TYPE (expr);
7569 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7570 int byte, offset, word, words;
7571 unsigned char value;
7573 if (total_bytes > len)
7574 return 0;
7575 words = total_bytes / UNITS_PER_WORD;
7577 for (byte = 0; byte < total_bytes; byte++)
7579 int bitpos = byte * BITS_PER_UNIT;
7580 if (bitpos < HOST_BITS_PER_WIDE_INT)
7581 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7582 else
7583 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7584 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7586 if (total_bytes > UNITS_PER_WORD)
7588 word = byte / UNITS_PER_WORD;
7589 if (WORDS_BIG_ENDIAN)
7590 word = (words - 1) - word;
7591 offset = word * UNITS_PER_WORD;
7592 if (BYTES_BIG_ENDIAN)
7593 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7594 else
7595 offset += byte % UNITS_PER_WORD;
7597 else
7598 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7599 ptr[offset] = value;
7601 return total_bytes;
7605 /* Subroutine of native_encode_expr. Encode the REAL_CST
7606 specified by EXPR into the buffer PTR of length LEN bytes.
7607 Return the number of bytes placed in the buffer, or zero
7608 upon failure. */
7610 static int
7611 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7613 tree type = TREE_TYPE (expr);
7614 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7615 int byte, offset, word, words, bitpos;
7616 unsigned char value;
7618 /* There are always 32 bits in each long, no matter the size of
7619 the hosts long. We handle floating point representations with
7620 up to 192 bits. */
7621 long tmp[6];
7623 if (total_bytes > len)
7624 return 0;
7625 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7627 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7629 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7630 bitpos += BITS_PER_UNIT)
7632 byte = (bitpos / BITS_PER_UNIT) & 3;
7633 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7635 if (UNITS_PER_WORD < 4)
7637 word = byte / UNITS_PER_WORD;
7638 if (WORDS_BIG_ENDIAN)
7639 word = (words - 1) - word;
7640 offset = word * UNITS_PER_WORD;
7641 if (BYTES_BIG_ENDIAN)
7642 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7643 else
7644 offset += byte % UNITS_PER_WORD;
7646 else
7647 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7648 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7650 return total_bytes;
7653 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7654 specified by EXPR into the buffer PTR of length LEN bytes.
7655 Return the number of bytes placed in the buffer, or zero
7656 upon failure. */
7658 static int
7659 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7661 int rsize, isize;
7662 tree part;
7664 part = TREE_REALPART (expr);
7665 rsize = native_encode_expr (part, ptr, len);
7666 if (rsize == 0)
7667 return 0;
7668 part = TREE_IMAGPART (expr);
7669 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7670 if (isize != rsize)
7671 return 0;
7672 return rsize + isize;
7676 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7677 specified by EXPR into the buffer PTR of length LEN bytes.
7678 Return the number of bytes placed in the buffer, or zero
7679 upon failure. */
7681 static int
7682 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7684 int i, size, offset, count;
7685 tree itype, elem, elements;
7687 offset = 0;
7688 elements = TREE_VECTOR_CST_ELTS (expr);
7689 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7690 itype = TREE_TYPE (TREE_TYPE (expr));
7691 size = GET_MODE_SIZE (TYPE_MODE (itype));
7692 for (i = 0; i < count; i++)
7694 if (elements)
7696 elem = TREE_VALUE (elements);
7697 elements = TREE_CHAIN (elements);
7699 else
7700 elem = NULL_TREE;
7702 if (elem)
7704 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7705 return 0;
7707 else
7709 if (offset + size > len)
7710 return 0;
7711 memset (ptr+offset, 0, size);
7713 offset += size;
7715 return offset;
7719 /* Subroutine of native_encode_expr. Encode the STRING_CST
7720 specified by EXPR into the buffer PTR of length LEN bytes.
7721 Return the number of bytes placed in the buffer, or zero
7722 upon failure. */
7724 static int
7725 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7727 tree type = TREE_TYPE (expr);
7728 HOST_WIDE_INT total_bytes;
7730 if (TREE_CODE (type) != ARRAY_TYPE
7731 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7732 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7733 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7734 return 0;
7735 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7736 if (total_bytes > len)
7737 return 0;
7738 if (TREE_STRING_LENGTH (expr) < total_bytes)
7740 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7741 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7742 total_bytes - TREE_STRING_LENGTH (expr));
7744 else
7745 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7746 return total_bytes;
7750 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7751 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7752 buffer PTR of length LEN bytes. Return the number of bytes
7753 placed in the buffer, or zero upon failure. */
7756 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7758 switch (TREE_CODE (expr))
7760 case INTEGER_CST:
7761 return native_encode_int (expr, ptr, len);
7763 case REAL_CST:
7764 return native_encode_real (expr, ptr, len);
7766 case COMPLEX_CST:
7767 return native_encode_complex (expr, ptr, len);
7769 case VECTOR_CST:
7770 return native_encode_vector (expr, ptr, len);
7772 case STRING_CST:
7773 return native_encode_string (expr, ptr, len);
7775 default:
7776 return 0;
7781 /* Subroutine of native_interpret_expr. Interpret the contents of
7782 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7783 If the buffer cannot be interpreted, return NULL_TREE. */
7785 static tree
7786 native_interpret_int (tree type, const unsigned char *ptr, int len)
7788 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7789 int byte, offset, word, words;
7790 unsigned char value;
7791 unsigned int HOST_WIDE_INT lo = 0;
7792 HOST_WIDE_INT hi = 0;
7794 if (total_bytes > len)
7795 return NULL_TREE;
7796 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7797 return NULL_TREE;
7798 words = total_bytes / UNITS_PER_WORD;
7800 for (byte = 0; byte < total_bytes; byte++)
7802 int bitpos = byte * BITS_PER_UNIT;
7803 if (total_bytes > UNITS_PER_WORD)
7805 word = byte / UNITS_PER_WORD;
7806 if (WORDS_BIG_ENDIAN)
7807 word = (words - 1) - word;
7808 offset = word * UNITS_PER_WORD;
7809 if (BYTES_BIG_ENDIAN)
7810 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7811 else
7812 offset += byte % UNITS_PER_WORD;
7814 else
7815 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7816 value = ptr[offset];
7818 if (bitpos < HOST_BITS_PER_WIDE_INT)
7819 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7820 else
7821 hi |= (unsigned HOST_WIDE_INT) value
7822 << (bitpos - HOST_BITS_PER_WIDE_INT);
7825 return build_int_cst_wide_type (type, lo, hi);
7829 /* Subroutine of native_interpret_expr. Interpret the contents of
7830 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7831 If the buffer cannot be interpreted, return NULL_TREE. */
7833 static tree
7834 native_interpret_real (tree type, const unsigned char *ptr, int len)
7836 enum machine_mode mode = TYPE_MODE (type);
7837 int total_bytes = GET_MODE_SIZE (mode);
7838 int byte, offset, word, words, bitpos;
7839 unsigned char value;
7840 /* There are always 32 bits in each long, no matter the size of
7841 the hosts long. We handle floating point representations with
7842 up to 192 bits. */
7843 REAL_VALUE_TYPE r;
7844 long tmp[6];
7846 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7847 if (total_bytes > len || total_bytes > 24)
7848 return NULL_TREE;
7849 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7851 memset (tmp, 0, sizeof (tmp));
7852 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7853 bitpos += BITS_PER_UNIT)
7855 byte = (bitpos / BITS_PER_UNIT) & 3;
7856 if (UNITS_PER_WORD < 4)
7858 word = byte / UNITS_PER_WORD;
7859 if (WORDS_BIG_ENDIAN)
7860 word = (words - 1) - word;
7861 offset = word * UNITS_PER_WORD;
7862 if (BYTES_BIG_ENDIAN)
7863 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7864 else
7865 offset += byte % UNITS_PER_WORD;
7867 else
7868 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7869 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7871 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7874 real_from_target (&r, tmp, mode);
7875 return build_real (type, r);
7879 /* Subroutine of native_interpret_expr. Interpret the contents of
7880 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7881 If the buffer cannot be interpreted, return NULL_TREE. */
7883 static tree
7884 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7886 tree etype, rpart, ipart;
7887 int size;
7889 etype = TREE_TYPE (type);
7890 size = GET_MODE_SIZE (TYPE_MODE (etype));
7891 if (size * 2 > len)
7892 return NULL_TREE;
7893 rpart = native_interpret_expr (etype, ptr, size);
7894 if (!rpart)
7895 return NULL_TREE;
7896 ipart = native_interpret_expr (etype, ptr+size, size);
7897 if (!ipart)
7898 return NULL_TREE;
7899 return build_complex (type, rpart, ipart);
7903 /* Subroutine of native_interpret_expr. Interpret the contents of
7904 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7905 If the buffer cannot be interpreted, return NULL_TREE. */
7907 static tree
7908 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7910 tree etype, elem, elements;
7911 int i, size, count;
7913 etype = TREE_TYPE (type);
7914 size = GET_MODE_SIZE (TYPE_MODE (etype));
7915 count = TYPE_VECTOR_SUBPARTS (type);
7916 if (size * count > len)
7917 return NULL_TREE;
7919 elements = NULL_TREE;
7920 for (i = count - 1; i >= 0; i--)
7922 elem = native_interpret_expr (etype, ptr+(i*size), size);
7923 if (!elem)
7924 return NULL_TREE;
7925 elements = tree_cons (NULL_TREE, elem, elements);
7927 return build_vector (type, elements);
7931 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7932 the buffer PTR of length LEN as a constant of type TYPE. For
7933 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7934 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7935 return NULL_TREE. */
7937 tree
7938 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7940 switch (TREE_CODE (type))
7942 case INTEGER_TYPE:
7943 case ENUMERAL_TYPE:
7944 case BOOLEAN_TYPE:
7945 return native_interpret_int (type, ptr, len);
7947 case REAL_TYPE:
7948 return native_interpret_real (type, ptr, len);
7950 case COMPLEX_TYPE:
7951 return native_interpret_complex (type, ptr, len);
7953 case VECTOR_TYPE:
7954 return native_interpret_vector (type, ptr, len);
7956 default:
7957 return NULL_TREE;
7962 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7963 TYPE at compile-time. If we're unable to perform the conversion
7964 return NULL_TREE. */
7966 static tree
7967 fold_view_convert_expr (tree type, tree expr)
7969 /* We support up to 512-bit values (for V8DFmode). */
7970 unsigned char buffer[64];
7971 int len;
7973 /* Check that the host and target are sane. */
7974 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7975 return NULL_TREE;
7977 len = native_encode_expr (expr, buffer, sizeof (buffer));
7978 if (len == 0)
7979 return NULL_TREE;
7981 return native_interpret_expr (type, buffer, len);
7984 /* Build an expression for the address of T. Folds away INDIRECT_REF
7985 to avoid confusing the gimplify process. */
7987 tree
7988 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7990 /* The size of the object is not relevant when talking about its address. */
7991 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7992 t = TREE_OPERAND (t, 0);
7994 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7995 if (TREE_CODE (t) == INDIRECT_REF
7996 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7998 t = TREE_OPERAND (t, 0);
8000 if (TREE_TYPE (t) != ptrtype)
8001 t = build1 (NOP_EXPR, ptrtype, t);
8003 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8005 t = build_fold_addr_expr (TREE_OPERAND (t, 0));
8007 if (TREE_TYPE (t) != ptrtype)
8008 t = fold_convert (ptrtype, t);
8010 else
8011 t = build1 (ADDR_EXPR, ptrtype, t);
8013 return t;
8016 /* Build an expression for the address of T. */
8018 tree
8019 build_fold_addr_expr (tree t)
8021 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8023 return build_fold_addr_expr_with_type (t, ptrtype);
8026 /* Fold a unary expression of code CODE and type TYPE with operand
8027 OP0. Return the folded expression if folding is successful.
8028 Otherwise, return NULL_TREE. */
8030 tree
8031 fold_unary (enum tree_code code, tree type, tree op0)
8033 tree tem;
8034 tree arg0;
8035 enum tree_code_class kind = TREE_CODE_CLASS (code);
8037 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8038 && TREE_CODE_LENGTH (code) == 1);
8040 arg0 = op0;
8041 if (arg0)
8043 if (CONVERT_EXPR_CODE_P (code)
8044 || code == FLOAT_EXPR || code == ABS_EXPR)
8046 /* Don't use STRIP_NOPS, because signedness of argument type
8047 matters. */
8048 STRIP_SIGN_NOPS (arg0);
8050 else
8052 /* Strip any conversions that don't change the mode. This
8053 is safe for every expression, except for a comparison
8054 expression because its signedness is derived from its
8055 operands.
8057 Note that this is done as an internal manipulation within
8058 the constant folder, in order to find the simplest
8059 representation of the arguments so that their form can be
8060 studied. In any cases, the appropriate type conversions
8061 should be put back in the tree that will get out of the
8062 constant folder. */
8063 STRIP_NOPS (arg0);
8067 if (TREE_CODE_CLASS (code) == tcc_unary)
8069 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8070 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8071 fold_build1 (code, type,
8072 fold_convert (TREE_TYPE (op0),
8073 TREE_OPERAND (arg0, 1))));
8074 else if (TREE_CODE (arg0) == COND_EXPR)
8076 tree arg01 = TREE_OPERAND (arg0, 1);
8077 tree arg02 = TREE_OPERAND (arg0, 2);
8078 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8079 arg01 = fold_build1 (code, type,
8080 fold_convert (TREE_TYPE (op0), arg01));
8081 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8082 arg02 = fold_build1 (code, type,
8083 fold_convert (TREE_TYPE (op0), arg02));
8084 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8085 arg01, arg02);
8087 /* If this was a conversion, and all we did was to move into
8088 inside the COND_EXPR, bring it back out. But leave it if
8089 it is a conversion from integer to integer and the
8090 result precision is no wider than a word since such a
8091 conversion is cheap and may be optimized away by combine,
8092 while it couldn't if it were outside the COND_EXPR. Then return
8093 so we don't get into an infinite recursion loop taking the
8094 conversion out and then back in. */
8096 if ((CONVERT_EXPR_CODE_P (code)
8097 || code == NON_LVALUE_EXPR)
8098 && TREE_CODE (tem) == COND_EXPR
8099 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8100 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8101 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8102 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8103 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8104 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8105 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8106 && (INTEGRAL_TYPE_P
8107 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8108 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8109 || flag_syntax_only))
8110 tem = build1 (code, type,
8111 build3 (COND_EXPR,
8112 TREE_TYPE (TREE_OPERAND
8113 (TREE_OPERAND (tem, 1), 0)),
8114 TREE_OPERAND (tem, 0),
8115 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8116 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8117 return tem;
8119 else if (COMPARISON_CLASS_P (arg0))
8121 if (TREE_CODE (type) == BOOLEAN_TYPE)
8123 arg0 = copy_node (arg0);
8124 TREE_TYPE (arg0) = type;
8125 return arg0;
8127 else if (TREE_CODE (type) != INTEGER_TYPE)
8128 return fold_build3 (COND_EXPR, type, arg0,
8129 fold_build1 (code, type,
8130 integer_one_node),
8131 fold_build1 (code, type,
8132 integer_zero_node));
8136 switch (code)
8138 case PAREN_EXPR:
8139 /* Re-association barriers around constants and other re-association
8140 barriers can be removed. */
8141 if (CONSTANT_CLASS_P (op0)
8142 || TREE_CODE (op0) == PAREN_EXPR)
8143 return fold_convert (type, op0);
8144 return NULL_TREE;
8146 CASE_CONVERT:
8147 case FLOAT_EXPR:
8148 case FIX_TRUNC_EXPR:
8149 if (TREE_TYPE (op0) == type)
8150 return op0;
8152 /* If we have (type) (a CMP b) and type is an integral type, return
8153 new expression involving the new type. */
8154 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8155 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8156 TREE_OPERAND (op0, 1));
8158 /* Handle cases of two conversions in a row. */
8159 if (CONVERT_EXPR_P (op0))
8161 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8162 tree inter_type = TREE_TYPE (op0);
8163 int inside_int = INTEGRAL_TYPE_P (inside_type);
8164 int inside_ptr = POINTER_TYPE_P (inside_type);
8165 int inside_float = FLOAT_TYPE_P (inside_type);
8166 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8167 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8168 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8169 int inter_int = INTEGRAL_TYPE_P (inter_type);
8170 int inter_ptr = POINTER_TYPE_P (inter_type);
8171 int inter_float = FLOAT_TYPE_P (inter_type);
8172 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8173 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8174 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8175 int final_int = INTEGRAL_TYPE_P (type);
8176 int final_ptr = POINTER_TYPE_P (type);
8177 int final_float = FLOAT_TYPE_P (type);
8178 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8179 unsigned int final_prec = TYPE_PRECISION (type);
8180 int final_unsignedp = TYPE_UNSIGNED (type);
8182 /* In addition to the cases of two conversions in a row
8183 handled below, if we are converting something to its own
8184 type via an object of identical or wider precision, neither
8185 conversion is needed. */
8186 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8187 && (((inter_int || inter_ptr) && final_int)
8188 || (inter_float && final_float))
8189 && inter_prec >= final_prec)
8190 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8192 /* Likewise, if the intermediate and initial types are either both
8193 float or both integer, we don't need the middle conversion if the
8194 former is wider than the latter and doesn't change the signedness
8195 (for integers). Avoid this if the final type is a pointer since
8196 then we sometimes need the middle conversion. Likewise if the
8197 final type has a precision not equal to the size of its mode. */
8198 if (((inter_int && inside_int)
8199 || (inter_float && inside_float)
8200 || (inter_vec && inside_vec))
8201 && inter_prec >= inside_prec
8202 && (inter_float || inter_vec
8203 || inter_unsignedp == inside_unsignedp)
8204 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8205 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8206 && ! final_ptr
8207 && (! final_vec || inter_prec == inside_prec))
8208 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8210 /* If we have a sign-extension of a zero-extended value, we can
8211 replace that by a single zero-extension. */
8212 if (inside_int && inter_int && final_int
8213 && inside_prec < inter_prec && inter_prec < final_prec
8214 && inside_unsignedp && !inter_unsignedp)
8215 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8217 /* Two conversions in a row are not needed unless:
8218 - some conversion is floating-point (overstrict for now), or
8219 - some conversion is a vector (overstrict for now), or
8220 - the intermediate type is narrower than both initial and
8221 final, or
8222 - the intermediate type and innermost type differ in signedness,
8223 and the outermost type is wider than the intermediate, or
8224 - the initial type is a pointer type and the precisions of the
8225 intermediate and final types differ, or
8226 - the final type is a pointer type and the precisions of the
8227 initial and intermediate types differ. */
8228 if (! inside_float && ! inter_float && ! final_float
8229 && ! inside_vec && ! inter_vec && ! final_vec
8230 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8231 && ! (inside_int && inter_int
8232 && inter_unsignedp != inside_unsignedp
8233 && inter_prec < final_prec)
8234 && ((inter_unsignedp && inter_prec > inside_prec)
8235 == (final_unsignedp && final_prec > inter_prec))
8236 && ! (inside_ptr && inter_prec != final_prec)
8237 && ! (final_ptr && inside_prec != inter_prec)
8238 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8239 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8240 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8243 /* Handle (T *)&A.B.C for A being of type T and B and C
8244 living at offset zero. This occurs frequently in
8245 C++ upcasting and then accessing the base. */
8246 if (TREE_CODE (op0) == ADDR_EXPR
8247 && POINTER_TYPE_P (type)
8248 && handled_component_p (TREE_OPERAND (op0, 0)))
8250 HOST_WIDE_INT bitsize, bitpos;
8251 tree offset;
8252 enum machine_mode mode;
8253 int unsignedp, volatilep;
8254 tree base = TREE_OPERAND (op0, 0);
8255 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8256 &mode, &unsignedp, &volatilep, false);
8257 /* If the reference was to a (constant) zero offset, we can use
8258 the address of the base if it has the same base type
8259 as the result type. */
8260 if (! offset && bitpos == 0
8261 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8262 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8263 return fold_convert (type, build_fold_addr_expr (base));
8266 if (TREE_CODE (op0) == MODIFY_EXPR
8267 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8268 /* Detect assigning a bitfield. */
8269 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8270 && DECL_BIT_FIELD
8271 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8273 /* Don't leave an assignment inside a conversion
8274 unless assigning a bitfield. */
8275 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8276 /* First do the assignment, then return converted constant. */
8277 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8278 TREE_NO_WARNING (tem) = 1;
8279 TREE_USED (tem) = 1;
8280 return tem;
8283 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8284 constants (if x has signed type, the sign bit cannot be set
8285 in c). This folds extension into the BIT_AND_EXPR.
8286 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8287 very likely don't have maximal range for their precision and this
8288 transformation effectively doesn't preserve non-maximal ranges. */
8289 if (TREE_CODE (type) == INTEGER_TYPE
8290 && TREE_CODE (op0) == BIT_AND_EXPR
8291 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8293 tree and_expr = op0;
8294 tree and0 = TREE_OPERAND (and_expr, 0);
8295 tree and1 = TREE_OPERAND (and_expr, 1);
8296 int change = 0;
8298 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8299 || (TYPE_PRECISION (type)
8300 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8301 change = 1;
8302 else if (TYPE_PRECISION (TREE_TYPE (and1))
8303 <= HOST_BITS_PER_WIDE_INT
8304 && host_integerp (and1, 1))
8306 unsigned HOST_WIDE_INT cst;
8308 cst = tree_low_cst (and1, 1);
8309 cst &= (HOST_WIDE_INT) -1
8310 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8311 change = (cst == 0);
8312 #ifdef LOAD_EXTEND_OP
8313 if (change
8314 && !flag_syntax_only
8315 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8316 == ZERO_EXTEND))
8318 tree uns = unsigned_type_for (TREE_TYPE (and0));
8319 and0 = fold_convert (uns, and0);
8320 and1 = fold_convert (uns, and1);
8322 #endif
8324 if (change)
8326 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8327 TREE_INT_CST_HIGH (and1), 0,
8328 TREE_OVERFLOW (and1));
8329 return fold_build2 (BIT_AND_EXPR, type,
8330 fold_convert (type, and0), tem);
8334 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8335 when one of the new casts will fold away. Conservatively we assume
8336 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8337 if (POINTER_TYPE_P (type)
8338 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8339 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8340 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8341 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8343 tree arg00 = TREE_OPERAND (arg0, 0);
8344 tree arg01 = TREE_OPERAND (arg0, 1);
8346 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8347 fold_convert (sizetype, arg01));
8350 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8351 of the same precision, and X is an integer type not narrower than
8352 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8353 if (INTEGRAL_TYPE_P (type)
8354 && TREE_CODE (op0) == BIT_NOT_EXPR
8355 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8356 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8357 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8359 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8360 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8361 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8362 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8365 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8366 type of X and Y (integer types only). */
8367 if (INTEGRAL_TYPE_P (type)
8368 && TREE_CODE (op0) == MULT_EXPR
8369 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8370 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8372 /* Be careful not to introduce new overflows. */
8373 tree mult_type;
8374 if (TYPE_OVERFLOW_WRAPS (type))
8375 mult_type = type;
8376 else
8377 mult_type = unsigned_type_for (type);
8379 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8381 tem = fold_build2 (MULT_EXPR, mult_type,
8382 fold_convert (mult_type,
8383 TREE_OPERAND (op0, 0)),
8384 fold_convert (mult_type,
8385 TREE_OPERAND (op0, 1)));
8386 return fold_convert (type, tem);
8390 tem = fold_convert_const (code, type, op0);
8391 return tem ? tem : NULL_TREE;
8393 case FIXED_CONVERT_EXPR:
8394 tem = fold_convert_const (code, type, arg0);
8395 return tem ? tem : NULL_TREE;
8397 case VIEW_CONVERT_EXPR:
8398 if (TREE_TYPE (op0) == type)
8399 return op0;
8400 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8401 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8403 /* For integral conversions with the same precision or pointer
8404 conversions use a NOP_EXPR instead. */
8405 if ((INTEGRAL_TYPE_P (type)
8406 || POINTER_TYPE_P (type))
8407 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8408 || POINTER_TYPE_P (TREE_TYPE (op0)))
8409 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8410 return fold_convert (type, op0);
8412 /* Strip inner integral conversions that do not change the precision. */
8413 if (CONVERT_EXPR_P (op0)
8414 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8415 || POINTER_TYPE_P (TREE_TYPE (op0)))
8416 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8417 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8418 && (TYPE_PRECISION (TREE_TYPE (op0))
8419 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8420 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8422 return fold_view_convert_expr (type, op0);
8424 case NEGATE_EXPR:
8425 tem = fold_negate_expr (arg0);
8426 if (tem)
8427 return fold_convert (type, tem);
8428 return NULL_TREE;
8430 case ABS_EXPR:
8431 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8432 return fold_abs_const (arg0, type);
8433 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8434 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8435 /* Convert fabs((double)float) into (double)fabsf(float). */
8436 else if (TREE_CODE (arg0) == NOP_EXPR
8437 && TREE_CODE (type) == REAL_TYPE)
8439 tree targ0 = strip_float_extensions (arg0);
8440 if (targ0 != arg0)
8441 return fold_convert (type, fold_build1 (ABS_EXPR,
8442 TREE_TYPE (targ0),
8443 targ0));
8445 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8446 else if (TREE_CODE (arg0) == ABS_EXPR)
8447 return arg0;
8448 else if (tree_expr_nonnegative_p (arg0))
8449 return arg0;
8451 /* Strip sign ops from argument. */
8452 if (TREE_CODE (type) == REAL_TYPE)
8454 tem = fold_strip_sign_ops (arg0);
8455 if (tem)
8456 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8458 return NULL_TREE;
8460 case CONJ_EXPR:
8461 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8462 return fold_convert (type, arg0);
8463 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8465 tree itype = TREE_TYPE (type);
8466 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8467 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8468 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8470 if (TREE_CODE (arg0) == COMPLEX_CST)
8472 tree itype = TREE_TYPE (type);
8473 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8474 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8475 return build_complex (type, rpart, negate_expr (ipart));
8477 if (TREE_CODE (arg0) == CONJ_EXPR)
8478 return fold_convert (type, TREE_OPERAND (arg0, 0));
8479 return NULL_TREE;
8481 case BIT_NOT_EXPR:
8482 if (TREE_CODE (arg0) == INTEGER_CST)
8483 return fold_not_const (arg0, type);
8484 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8485 return fold_convert (type, TREE_OPERAND (arg0, 0));
8486 /* Convert ~ (-A) to A - 1. */
8487 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8488 return fold_build2 (MINUS_EXPR, type,
8489 fold_convert (type, TREE_OPERAND (arg0, 0)),
8490 build_int_cst (type, 1));
8491 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8492 else if (INTEGRAL_TYPE_P (type)
8493 && ((TREE_CODE (arg0) == MINUS_EXPR
8494 && integer_onep (TREE_OPERAND (arg0, 1)))
8495 || (TREE_CODE (arg0) == PLUS_EXPR
8496 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8497 return fold_build1 (NEGATE_EXPR, type,
8498 fold_convert (type, TREE_OPERAND (arg0, 0)));
8499 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8500 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8501 && (tem = fold_unary (BIT_NOT_EXPR, type,
8502 fold_convert (type,
8503 TREE_OPERAND (arg0, 0)))))
8504 return fold_build2 (BIT_XOR_EXPR, type, tem,
8505 fold_convert (type, TREE_OPERAND (arg0, 1)));
8506 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8507 && (tem = fold_unary (BIT_NOT_EXPR, type,
8508 fold_convert (type,
8509 TREE_OPERAND (arg0, 1)))))
8510 return fold_build2 (BIT_XOR_EXPR, type,
8511 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8512 /* Perform BIT_NOT_EXPR on each element individually. */
8513 else if (TREE_CODE (arg0) == VECTOR_CST)
8515 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8516 int count = TYPE_VECTOR_SUBPARTS (type), i;
8518 for (i = 0; i < count; i++)
8520 if (elements)
8522 elem = TREE_VALUE (elements);
8523 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8524 if (elem == NULL_TREE)
8525 break;
8526 elements = TREE_CHAIN (elements);
8528 else
8529 elem = build_int_cst (TREE_TYPE (type), -1);
8530 list = tree_cons (NULL_TREE, elem, list);
8532 if (i == count)
8533 return build_vector (type, nreverse (list));
8536 return NULL_TREE;
8538 case TRUTH_NOT_EXPR:
8539 /* The argument to invert_truthvalue must have Boolean type. */
8540 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8541 arg0 = fold_convert (boolean_type_node, arg0);
8543 /* Note that the operand of this must be an int
8544 and its values must be 0 or 1.
8545 ("true" is a fixed value perhaps depending on the language,
8546 but we don't handle values other than 1 correctly yet.) */
8547 tem = fold_truth_not_expr (arg0);
8548 if (!tem)
8549 return NULL_TREE;
8550 return fold_convert (type, tem);
8552 case REALPART_EXPR:
8553 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8554 return fold_convert (type, arg0);
8555 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8556 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8557 TREE_OPERAND (arg0, 1));
8558 if (TREE_CODE (arg0) == COMPLEX_CST)
8559 return fold_convert (type, TREE_REALPART (arg0));
8560 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8562 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8563 tem = fold_build2 (TREE_CODE (arg0), itype,
8564 fold_build1 (REALPART_EXPR, itype,
8565 TREE_OPERAND (arg0, 0)),
8566 fold_build1 (REALPART_EXPR, itype,
8567 TREE_OPERAND (arg0, 1)));
8568 return fold_convert (type, tem);
8570 if (TREE_CODE (arg0) == CONJ_EXPR)
8572 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8573 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8574 return fold_convert (type, tem);
8576 if (TREE_CODE (arg0) == CALL_EXPR)
8578 tree fn = get_callee_fndecl (arg0);
8579 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8580 switch (DECL_FUNCTION_CODE (fn))
8582 CASE_FLT_FN (BUILT_IN_CEXPI):
8583 fn = mathfn_built_in (type, BUILT_IN_COS);
8584 if (fn)
8585 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8586 break;
8588 default:
8589 break;
8592 return NULL_TREE;
8594 case IMAGPART_EXPR:
8595 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8596 return fold_convert (type, integer_zero_node);
8597 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8598 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8599 TREE_OPERAND (arg0, 0));
8600 if (TREE_CODE (arg0) == COMPLEX_CST)
8601 return fold_convert (type, TREE_IMAGPART (arg0));
8602 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8604 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8605 tem = fold_build2 (TREE_CODE (arg0), itype,
8606 fold_build1 (IMAGPART_EXPR, itype,
8607 TREE_OPERAND (arg0, 0)),
8608 fold_build1 (IMAGPART_EXPR, itype,
8609 TREE_OPERAND (arg0, 1)));
8610 return fold_convert (type, tem);
8612 if (TREE_CODE (arg0) == CONJ_EXPR)
8614 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8615 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8616 return fold_convert (type, negate_expr (tem));
8618 if (TREE_CODE (arg0) == CALL_EXPR)
8620 tree fn = get_callee_fndecl (arg0);
8621 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8622 switch (DECL_FUNCTION_CODE (fn))
8624 CASE_FLT_FN (BUILT_IN_CEXPI):
8625 fn = mathfn_built_in (type, BUILT_IN_SIN);
8626 if (fn)
8627 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8628 break;
8630 default:
8631 break;
8634 return NULL_TREE;
8636 default:
8637 return NULL_TREE;
8638 } /* switch (code) */
8642 /* If the operation was a conversion do _not_ mark a resulting constant
8643 with TREE_OVERFLOW if the original constant was not. These conversions
8644 have implementation defined behavior and retaining the TREE_OVERFLOW
8645 flag here would confuse later passes such as VRP. */
8646 tree
8647 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8649 tree res = fold_unary (code, type, op0);
8650 if (res
8651 && TREE_CODE (res) == INTEGER_CST
8652 && TREE_CODE (op0) == INTEGER_CST
8653 && CONVERT_EXPR_CODE_P (code))
8654 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8656 return res;
8659 /* Fold a binary expression of code CODE and type TYPE with operands
8660 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8661 Return the folded expression if folding is successful. Otherwise,
8662 return NULL_TREE. */
8664 static tree
8665 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8667 enum tree_code compl_code;
8669 if (code == MIN_EXPR)
8670 compl_code = MAX_EXPR;
8671 else if (code == MAX_EXPR)
8672 compl_code = MIN_EXPR;
8673 else
8674 gcc_unreachable ();
8676 /* MIN (MAX (a, b), b) == b. */
8677 if (TREE_CODE (op0) == compl_code
8678 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8679 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8681 /* MIN (MAX (b, a), b) == b. */
8682 if (TREE_CODE (op0) == compl_code
8683 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8684 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8685 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8687 /* MIN (a, MAX (a, b)) == a. */
8688 if (TREE_CODE (op1) == compl_code
8689 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8690 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8691 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8693 /* MIN (a, MAX (b, a)) == a. */
8694 if (TREE_CODE (op1) == compl_code
8695 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8696 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8697 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8699 return NULL_TREE;
8702 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8703 by changing CODE to reduce the magnitude of constants involved in
8704 ARG0 of the comparison.
8705 Returns a canonicalized comparison tree if a simplification was
8706 possible, otherwise returns NULL_TREE.
8707 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8708 valid if signed overflow is undefined. */
8710 static tree
8711 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8712 tree arg0, tree arg1,
8713 bool *strict_overflow_p)
8715 enum tree_code code0 = TREE_CODE (arg0);
8716 tree t, cst0 = NULL_TREE;
8717 int sgn0;
8718 bool swap = false;
8720 /* Match A +- CST code arg1 and CST code arg1. We can change the
8721 first form only if overflow is undefined. */
8722 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8723 /* In principle pointers also have undefined overflow behavior,
8724 but that causes problems elsewhere. */
8725 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8726 && (code0 == MINUS_EXPR
8727 || code0 == PLUS_EXPR)
8728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8729 || code0 == INTEGER_CST))
8730 return NULL_TREE;
8732 /* Identify the constant in arg0 and its sign. */
8733 if (code0 == INTEGER_CST)
8734 cst0 = arg0;
8735 else
8736 cst0 = TREE_OPERAND (arg0, 1);
8737 sgn0 = tree_int_cst_sgn (cst0);
8739 /* Overflowed constants and zero will cause problems. */
8740 if (integer_zerop (cst0)
8741 || TREE_OVERFLOW (cst0))
8742 return NULL_TREE;
8744 /* See if we can reduce the magnitude of the constant in
8745 arg0 by changing the comparison code. */
8746 if (code0 == INTEGER_CST)
8748 /* CST <= arg1 -> CST-1 < arg1. */
8749 if (code == LE_EXPR && sgn0 == 1)
8750 code = LT_EXPR;
8751 /* -CST < arg1 -> -CST-1 <= arg1. */
8752 else if (code == LT_EXPR && sgn0 == -1)
8753 code = LE_EXPR;
8754 /* CST > arg1 -> CST-1 >= arg1. */
8755 else if (code == GT_EXPR && sgn0 == 1)
8756 code = GE_EXPR;
8757 /* -CST >= arg1 -> -CST-1 > arg1. */
8758 else if (code == GE_EXPR && sgn0 == -1)
8759 code = GT_EXPR;
8760 else
8761 return NULL_TREE;
8762 /* arg1 code' CST' might be more canonical. */
8763 swap = true;
8765 else
8767 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8768 if (code == LT_EXPR
8769 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8770 code = LE_EXPR;
8771 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8772 else if (code == GT_EXPR
8773 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8774 code = GE_EXPR;
8775 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8776 else if (code == LE_EXPR
8777 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8778 code = LT_EXPR;
8779 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8780 else if (code == GE_EXPR
8781 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8782 code = GT_EXPR;
8783 else
8784 return NULL_TREE;
8785 *strict_overflow_p = true;
8788 /* Now build the constant reduced in magnitude. But not if that
8789 would produce one outside of its types range. */
8790 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8791 && ((sgn0 == 1
8792 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8793 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8794 || (sgn0 == -1
8795 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8796 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8797 /* We cannot swap the comparison here as that would cause us to
8798 endlessly recurse. */
8799 return NULL_TREE;
8801 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8802 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8803 if (code0 != INTEGER_CST)
8804 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8806 /* If swapping might yield to a more canonical form, do so. */
8807 if (swap)
8808 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8809 else
8810 return fold_build2 (code, type, t, arg1);
8813 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8814 overflow further. Try to decrease the magnitude of constants involved
8815 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8816 and put sole constants at the second argument position.
8817 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8819 static tree
8820 maybe_canonicalize_comparison (enum tree_code code, tree type,
8821 tree arg0, tree arg1)
8823 tree t;
8824 bool strict_overflow_p;
8825 const char * const warnmsg = G_("assuming signed overflow does not occur "
8826 "when reducing constant in comparison");
8828 /* Try canonicalization by simplifying arg0. */
8829 strict_overflow_p = false;
8830 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8831 &strict_overflow_p);
8832 if (t)
8834 if (strict_overflow_p)
8835 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8836 return t;
8839 /* Try canonicalization by simplifying arg1 using the swapped
8840 comparison. */
8841 code = swap_tree_comparison (code);
8842 strict_overflow_p = false;
8843 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8844 &strict_overflow_p);
8845 if (t && strict_overflow_p)
8846 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8847 return t;
8850 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8851 space. This is used to avoid issuing overflow warnings for
8852 expressions like &p->x which can not wrap. */
8854 static bool
8855 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8857 unsigned HOST_WIDE_INT offset_low, total_low;
8858 HOST_WIDE_INT size, offset_high, total_high;
8860 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8861 return true;
8863 if (bitpos < 0)
8864 return true;
8866 if (offset == NULL_TREE)
8868 offset_low = 0;
8869 offset_high = 0;
8871 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8872 return true;
8873 else
8875 offset_low = TREE_INT_CST_LOW (offset);
8876 offset_high = TREE_INT_CST_HIGH (offset);
8879 if (add_double_with_sign (offset_low, offset_high,
8880 bitpos / BITS_PER_UNIT, 0,
8881 &total_low, &total_high,
8882 true))
8883 return true;
8885 if (total_high != 0)
8886 return true;
8888 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8889 if (size <= 0)
8890 return true;
8892 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8893 array. */
8894 if (TREE_CODE (base) == ADDR_EXPR)
8896 HOST_WIDE_INT base_size;
8898 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8899 if (base_size > 0 && size < base_size)
8900 size = base_size;
8903 return total_low > (unsigned HOST_WIDE_INT) size;
8906 /* Subroutine of fold_binary. This routine performs all of the
8907 transformations that are common to the equality/inequality
8908 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8909 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8910 fold_binary should call fold_binary. Fold a comparison with
8911 tree code CODE and type TYPE with operands OP0 and OP1. Return
8912 the folded comparison or NULL_TREE. */
8914 static tree
8915 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8917 tree arg0, arg1, tem;
8919 arg0 = op0;
8920 arg1 = op1;
8922 STRIP_SIGN_NOPS (arg0);
8923 STRIP_SIGN_NOPS (arg1);
8925 tem = fold_relational_const (code, type, arg0, arg1);
8926 if (tem != NULL_TREE)
8927 return tem;
8929 /* If one arg is a real or integer constant, put it last. */
8930 if (tree_swap_operands_p (arg0, arg1, true))
8931 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8933 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8934 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8935 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8936 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8938 && (TREE_CODE (arg1) == INTEGER_CST
8939 && !TREE_OVERFLOW (arg1)))
8941 tree const1 = TREE_OPERAND (arg0, 1);
8942 tree const2 = arg1;
8943 tree variable = TREE_OPERAND (arg0, 0);
8944 tree lhs;
8945 int lhs_add;
8946 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8948 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8949 TREE_TYPE (arg1), const2, const1);
8951 /* If the constant operation overflowed this can be
8952 simplified as a comparison against INT_MAX/INT_MIN. */
8953 if (TREE_CODE (lhs) == INTEGER_CST
8954 && TREE_OVERFLOW (lhs))
8956 int const1_sgn = tree_int_cst_sgn (const1);
8957 enum tree_code code2 = code;
8959 /* Get the sign of the constant on the lhs if the
8960 operation were VARIABLE + CONST1. */
8961 if (TREE_CODE (arg0) == MINUS_EXPR)
8962 const1_sgn = -const1_sgn;
8964 /* The sign of the constant determines if we overflowed
8965 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8966 Canonicalize to the INT_MIN overflow by swapping the comparison
8967 if necessary. */
8968 if (const1_sgn == -1)
8969 code2 = swap_tree_comparison (code);
8971 /* We now can look at the canonicalized case
8972 VARIABLE + 1 CODE2 INT_MIN
8973 and decide on the result. */
8974 if (code2 == LT_EXPR
8975 || code2 == LE_EXPR
8976 || code2 == EQ_EXPR)
8977 return omit_one_operand (type, boolean_false_node, variable);
8978 else if (code2 == NE_EXPR
8979 || code2 == GE_EXPR
8980 || code2 == GT_EXPR)
8981 return omit_one_operand (type, boolean_true_node, variable);
8984 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8985 && (TREE_CODE (lhs) != INTEGER_CST
8986 || !TREE_OVERFLOW (lhs)))
8988 fold_overflow_warning (("assuming signed overflow does not occur "
8989 "when changing X +- C1 cmp C2 to "
8990 "X cmp C1 +- C2"),
8991 WARN_STRICT_OVERFLOW_COMPARISON);
8992 return fold_build2 (code, type, variable, lhs);
8996 /* For comparisons of pointers we can decompose it to a compile time
8997 comparison of the base objects and the offsets into the object.
8998 This requires at least one operand being an ADDR_EXPR or a
8999 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9000 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9001 && (TREE_CODE (arg0) == ADDR_EXPR
9002 || TREE_CODE (arg1) == ADDR_EXPR
9003 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9004 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9006 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9007 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9008 enum machine_mode mode;
9009 int volatilep, unsignedp;
9010 bool indirect_base0 = false, indirect_base1 = false;
9012 /* Get base and offset for the access. Strip ADDR_EXPR for
9013 get_inner_reference, but put it back by stripping INDIRECT_REF
9014 off the base object if possible. indirect_baseN will be true
9015 if baseN is not an address but refers to the object itself. */
9016 base0 = arg0;
9017 if (TREE_CODE (arg0) == ADDR_EXPR)
9019 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9020 &bitsize, &bitpos0, &offset0, &mode,
9021 &unsignedp, &volatilep, false);
9022 if (TREE_CODE (base0) == INDIRECT_REF)
9023 base0 = TREE_OPERAND (base0, 0);
9024 else
9025 indirect_base0 = true;
9027 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9029 base0 = TREE_OPERAND (arg0, 0);
9030 offset0 = TREE_OPERAND (arg0, 1);
9033 base1 = arg1;
9034 if (TREE_CODE (arg1) == ADDR_EXPR)
9036 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9037 &bitsize, &bitpos1, &offset1, &mode,
9038 &unsignedp, &volatilep, false);
9039 if (TREE_CODE (base1) == INDIRECT_REF)
9040 base1 = TREE_OPERAND (base1, 0);
9041 else
9042 indirect_base1 = true;
9044 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9046 base1 = TREE_OPERAND (arg1, 0);
9047 offset1 = TREE_OPERAND (arg1, 1);
9050 /* If we have equivalent bases we might be able to simplify. */
9051 if (indirect_base0 == indirect_base1
9052 && operand_equal_p (base0, base1, 0))
9054 /* We can fold this expression to a constant if the non-constant
9055 offset parts are equal. */
9056 if ((offset0 == offset1
9057 || (offset0 && offset1
9058 && operand_equal_p (offset0, offset1, 0)))
9059 && (code == EQ_EXPR
9060 || code == NE_EXPR
9061 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9064 if (code != EQ_EXPR
9065 && code != NE_EXPR
9066 && bitpos0 != bitpos1
9067 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9068 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9069 fold_overflow_warning (("assuming pointer wraparound does not "
9070 "occur when comparing P +- C1 with "
9071 "P +- C2"),
9072 WARN_STRICT_OVERFLOW_CONDITIONAL);
9074 switch (code)
9076 case EQ_EXPR:
9077 return constant_boolean_node (bitpos0 == bitpos1, type);
9078 case NE_EXPR:
9079 return constant_boolean_node (bitpos0 != bitpos1, type);
9080 case LT_EXPR:
9081 return constant_boolean_node (bitpos0 < bitpos1, type);
9082 case LE_EXPR:
9083 return constant_boolean_node (bitpos0 <= bitpos1, type);
9084 case GE_EXPR:
9085 return constant_boolean_node (bitpos0 >= bitpos1, type);
9086 case GT_EXPR:
9087 return constant_boolean_node (bitpos0 > bitpos1, type);
9088 default:;
9091 /* We can simplify the comparison to a comparison of the variable
9092 offset parts if the constant offset parts are equal.
9093 Be careful to use signed size type here because otherwise we
9094 mess with array offsets in the wrong way. This is possible
9095 because pointer arithmetic is restricted to retain within an
9096 object and overflow on pointer differences is undefined as of
9097 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9098 else if (bitpos0 == bitpos1
9099 && ((code == EQ_EXPR || code == NE_EXPR)
9100 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9102 tree signed_size_type_node;
9103 signed_size_type_node = signed_type_for (size_type_node);
9105 /* By converting to signed size type we cover middle-end pointer
9106 arithmetic which operates on unsigned pointer types of size
9107 type size and ARRAY_REF offsets which are properly sign or
9108 zero extended from their type in case it is narrower than
9109 size type. */
9110 if (offset0 == NULL_TREE)
9111 offset0 = build_int_cst (signed_size_type_node, 0);
9112 else
9113 offset0 = fold_convert (signed_size_type_node, offset0);
9114 if (offset1 == NULL_TREE)
9115 offset1 = build_int_cst (signed_size_type_node, 0);
9116 else
9117 offset1 = fold_convert (signed_size_type_node, offset1);
9119 if (code != EQ_EXPR
9120 && code != NE_EXPR
9121 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9122 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9123 fold_overflow_warning (("assuming pointer wraparound does not "
9124 "occur when comparing P +- C1 with "
9125 "P +- C2"),
9126 WARN_STRICT_OVERFLOW_COMPARISON);
9128 return fold_build2 (code, type, offset0, offset1);
9131 /* For non-equal bases we can simplify if they are addresses
9132 of local binding decls or constants. */
9133 else if (indirect_base0 && indirect_base1
9134 /* We know that !operand_equal_p (base0, base1, 0)
9135 because the if condition was false. But make
9136 sure two decls are not the same. */
9137 && base0 != base1
9138 && TREE_CODE (arg0) == ADDR_EXPR
9139 && TREE_CODE (arg1) == ADDR_EXPR
9140 && (((TREE_CODE (base0) == VAR_DECL
9141 || TREE_CODE (base0) == PARM_DECL)
9142 && (targetm.binds_local_p (base0)
9143 || CONSTANT_CLASS_P (base1)))
9144 || CONSTANT_CLASS_P (base0))
9145 && (((TREE_CODE (base1) == VAR_DECL
9146 || TREE_CODE (base1) == PARM_DECL)
9147 && (targetm.binds_local_p (base1)
9148 || CONSTANT_CLASS_P (base0)))
9149 || CONSTANT_CLASS_P (base1)))
9151 if (code == EQ_EXPR)
9152 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9153 else if (code == NE_EXPR)
9154 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9156 /* For equal offsets we can simplify to a comparison of the
9157 base addresses. */
9158 else if (bitpos0 == bitpos1
9159 && (indirect_base0
9160 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9161 && (indirect_base1
9162 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9163 && ((offset0 == offset1)
9164 || (offset0 && offset1
9165 && operand_equal_p (offset0, offset1, 0))))
9167 if (indirect_base0)
9168 base0 = build_fold_addr_expr (base0);
9169 if (indirect_base1)
9170 base1 = build_fold_addr_expr (base1);
9171 return fold_build2 (code, type, base0, base1);
9175 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9176 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9177 the resulting offset is smaller in absolute value than the
9178 original one. */
9179 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9180 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9181 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9182 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9183 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9184 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9185 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9187 tree const1 = TREE_OPERAND (arg0, 1);
9188 tree const2 = TREE_OPERAND (arg1, 1);
9189 tree variable1 = TREE_OPERAND (arg0, 0);
9190 tree variable2 = TREE_OPERAND (arg1, 0);
9191 tree cst;
9192 const char * const warnmsg = G_("assuming signed overflow does not "
9193 "occur when combining constants around "
9194 "a comparison");
9196 /* Put the constant on the side where it doesn't overflow and is
9197 of lower absolute value than before. */
9198 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9199 ? MINUS_EXPR : PLUS_EXPR,
9200 const2, const1, 0);
9201 if (!TREE_OVERFLOW (cst)
9202 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9204 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9205 return fold_build2 (code, type,
9206 variable1,
9207 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9208 variable2, cst));
9211 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9212 ? MINUS_EXPR : PLUS_EXPR,
9213 const1, const2, 0);
9214 if (!TREE_OVERFLOW (cst)
9215 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9217 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9218 return fold_build2 (code, type,
9219 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9220 variable1, cst),
9221 variable2);
9225 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9226 signed arithmetic case. That form is created by the compiler
9227 often enough for folding it to be of value. One example is in
9228 computing loop trip counts after Operator Strength Reduction. */
9229 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9230 && TREE_CODE (arg0) == MULT_EXPR
9231 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9232 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9233 && integer_zerop (arg1))
9235 tree const1 = TREE_OPERAND (arg0, 1);
9236 tree const2 = arg1; /* zero */
9237 tree variable1 = TREE_OPERAND (arg0, 0);
9238 enum tree_code cmp_code = code;
9240 gcc_assert (!integer_zerop (const1));
9242 fold_overflow_warning (("assuming signed overflow does not occur when "
9243 "eliminating multiplication in comparison "
9244 "with zero"),
9245 WARN_STRICT_OVERFLOW_COMPARISON);
9247 /* If const1 is negative we swap the sense of the comparison. */
9248 if (tree_int_cst_sgn (const1) < 0)
9249 cmp_code = swap_tree_comparison (cmp_code);
9251 return fold_build2 (cmp_code, type, variable1, const2);
9254 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9255 if (tem)
9256 return tem;
9258 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9260 tree targ0 = strip_float_extensions (arg0);
9261 tree targ1 = strip_float_extensions (arg1);
9262 tree newtype = TREE_TYPE (targ0);
9264 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9265 newtype = TREE_TYPE (targ1);
9267 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9268 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9269 return fold_build2 (code, type, fold_convert (newtype, targ0),
9270 fold_convert (newtype, targ1));
9272 /* (-a) CMP (-b) -> b CMP a */
9273 if (TREE_CODE (arg0) == NEGATE_EXPR
9274 && TREE_CODE (arg1) == NEGATE_EXPR)
9275 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9276 TREE_OPERAND (arg0, 0));
9278 if (TREE_CODE (arg1) == REAL_CST)
9280 REAL_VALUE_TYPE cst;
9281 cst = TREE_REAL_CST (arg1);
9283 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9284 if (TREE_CODE (arg0) == NEGATE_EXPR)
9285 return fold_build2 (swap_tree_comparison (code), type,
9286 TREE_OPERAND (arg0, 0),
9287 build_real (TREE_TYPE (arg1),
9288 REAL_VALUE_NEGATE (cst)));
9290 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9291 /* a CMP (-0) -> a CMP 0 */
9292 if (REAL_VALUE_MINUS_ZERO (cst))
9293 return fold_build2 (code, type, arg0,
9294 build_real (TREE_TYPE (arg1), dconst0));
9296 /* x != NaN is always true, other ops are always false. */
9297 if (REAL_VALUE_ISNAN (cst)
9298 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9300 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9301 return omit_one_operand (type, tem, arg0);
9304 /* Fold comparisons against infinity. */
9305 if (REAL_VALUE_ISINF (cst)
9306 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9308 tem = fold_inf_compare (code, type, arg0, arg1);
9309 if (tem != NULL_TREE)
9310 return tem;
9314 /* If this is a comparison of a real constant with a PLUS_EXPR
9315 or a MINUS_EXPR of a real constant, we can convert it into a
9316 comparison with a revised real constant as long as no overflow
9317 occurs when unsafe_math_optimizations are enabled. */
9318 if (flag_unsafe_math_optimizations
9319 && TREE_CODE (arg1) == REAL_CST
9320 && (TREE_CODE (arg0) == PLUS_EXPR
9321 || TREE_CODE (arg0) == MINUS_EXPR)
9322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9323 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9324 ? MINUS_EXPR : PLUS_EXPR,
9325 arg1, TREE_OPERAND (arg0, 1), 0))
9326 && !TREE_OVERFLOW (tem))
9327 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9329 /* Likewise, we can simplify a comparison of a real constant with
9330 a MINUS_EXPR whose first operand is also a real constant, i.e.
9331 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9332 floating-point types only if -fassociative-math is set. */
9333 if (flag_associative_math
9334 && TREE_CODE (arg1) == REAL_CST
9335 && TREE_CODE (arg0) == MINUS_EXPR
9336 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9337 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9338 arg1, 0))
9339 && !TREE_OVERFLOW (tem))
9340 return fold_build2 (swap_tree_comparison (code), type,
9341 TREE_OPERAND (arg0, 1), tem);
9343 /* Fold comparisons against built-in math functions. */
9344 if (TREE_CODE (arg1) == REAL_CST
9345 && flag_unsafe_math_optimizations
9346 && ! flag_errno_math)
9348 enum built_in_function fcode = builtin_mathfn_code (arg0);
9350 if (fcode != END_BUILTINS)
9352 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9353 if (tem != NULL_TREE)
9354 return tem;
9359 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9360 && CONVERT_EXPR_P (arg0))
9362 /* If we are widening one operand of an integer comparison,
9363 see if the other operand is similarly being widened. Perhaps we
9364 can do the comparison in the narrower type. */
9365 tem = fold_widened_comparison (code, type, arg0, arg1);
9366 if (tem)
9367 return tem;
9369 /* Or if we are changing signedness. */
9370 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9371 if (tem)
9372 return tem;
9375 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9376 constant, we can simplify it. */
9377 if (TREE_CODE (arg1) == INTEGER_CST
9378 && (TREE_CODE (arg0) == MIN_EXPR
9379 || TREE_CODE (arg0) == MAX_EXPR)
9380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9382 tem = optimize_minmax_comparison (code, type, op0, op1);
9383 if (tem)
9384 return tem;
9387 /* Simplify comparison of something with itself. (For IEEE
9388 floating-point, we can only do some of these simplifications.) */
9389 if (operand_equal_p (arg0, arg1, 0))
9391 switch (code)
9393 case EQ_EXPR:
9394 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9395 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9396 return constant_boolean_node (1, type);
9397 break;
9399 case GE_EXPR:
9400 case LE_EXPR:
9401 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9402 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9403 return constant_boolean_node (1, type);
9404 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9406 case NE_EXPR:
9407 /* For NE, we can only do this simplification if integer
9408 or we don't honor IEEE floating point NaNs. */
9409 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9410 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9411 break;
9412 /* ... fall through ... */
9413 case GT_EXPR:
9414 case LT_EXPR:
9415 return constant_boolean_node (0, type);
9416 default:
9417 gcc_unreachable ();
9421 /* If we are comparing an expression that just has comparisons
9422 of two integer values, arithmetic expressions of those comparisons,
9423 and constants, we can simplify it. There are only three cases
9424 to check: the two values can either be equal, the first can be
9425 greater, or the second can be greater. Fold the expression for
9426 those three values. Since each value must be 0 or 1, we have
9427 eight possibilities, each of which corresponds to the constant 0
9428 or 1 or one of the six possible comparisons.
9430 This handles common cases like (a > b) == 0 but also handles
9431 expressions like ((x > y) - (y > x)) > 0, which supposedly
9432 occur in macroized code. */
9434 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9436 tree cval1 = 0, cval2 = 0;
9437 int save_p = 0;
9439 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9440 /* Don't handle degenerate cases here; they should already
9441 have been handled anyway. */
9442 && cval1 != 0 && cval2 != 0
9443 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9444 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9445 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9446 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9447 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9448 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9449 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9451 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9452 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9454 /* We can't just pass T to eval_subst in case cval1 or cval2
9455 was the same as ARG1. */
9457 tree high_result
9458 = fold_build2 (code, type,
9459 eval_subst (arg0, cval1, maxval,
9460 cval2, minval),
9461 arg1);
9462 tree equal_result
9463 = fold_build2 (code, type,
9464 eval_subst (arg0, cval1, maxval,
9465 cval2, maxval),
9466 arg1);
9467 tree low_result
9468 = fold_build2 (code, type,
9469 eval_subst (arg0, cval1, minval,
9470 cval2, maxval),
9471 arg1);
9473 /* All three of these results should be 0 or 1. Confirm they are.
9474 Then use those values to select the proper code to use. */
9476 if (TREE_CODE (high_result) == INTEGER_CST
9477 && TREE_CODE (equal_result) == INTEGER_CST
9478 && TREE_CODE (low_result) == INTEGER_CST)
9480 /* Make a 3-bit mask with the high-order bit being the
9481 value for `>', the next for '=', and the low for '<'. */
9482 switch ((integer_onep (high_result) * 4)
9483 + (integer_onep (equal_result) * 2)
9484 + integer_onep (low_result))
9486 case 0:
9487 /* Always false. */
9488 return omit_one_operand (type, integer_zero_node, arg0);
9489 case 1:
9490 code = LT_EXPR;
9491 break;
9492 case 2:
9493 code = EQ_EXPR;
9494 break;
9495 case 3:
9496 code = LE_EXPR;
9497 break;
9498 case 4:
9499 code = GT_EXPR;
9500 break;
9501 case 5:
9502 code = NE_EXPR;
9503 break;
9504 case 6:
9505 code = GE_EXPR;
9506 break;
9507 case 7:
9508 /* Always true. */
9509 return omit_one_operand (type, integer_one_node, arg0);
9512 if (save_p)
9513 return save_expr (build2 (code, type, cval1, cval2));
9514 return fold_build2 (code, type, cval1, cval2);
9519 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9520 into a single range test. */
9521 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9522 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9523 && TREE_CODE (arg1) == INTEGER_CST
9524 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9525 && !integer_zerop (TREE_OPERAND (arg0, 1))
9526 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9527 && !TREE_OVERFLOW (arg1))
9529 tem = fold_div_compare (code, type, arg0, arg1);
9530 if (tem != NULL_TREE)
9531 return tem;
9534 /* Fold ~X op ~Y as Y op X. */
9535 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9536 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9538 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9539 return fold_build2 (code, type,
9540 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9541 TREE_OPERAND (arg0, 0));
9544 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9545 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9546 && TREE_CODE (arg1) == INTEGER_CST)
9548 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9549 return fold_build2 (swap_tree_comparison (code), type,
9550 TREE_OPERAND (arg0, 0),
9551 fold_build1 (BIT_NOT_EXPR, cmp_type,
9552 fold_convert (cmp_type, arg1)));
9555 return NULL_TREE;
9559 /* Subroutine of fold_binary. Optimize complex multiplications of the
9560 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9561 argument EXPR represents the expression "z" of type TYPE. */
9563 static tree
9564 fold_mult_zconjz (tree type, tree expr)
9566 tree itype = TREE_TYPE (type);
9567 tree rpart, ipart, tem;
9569 if (TREE_CODE (expr) == COMPLEX_EXPR)
9571 rpart = TREE_OPERAND (expr, 0);
9572 ipart = TREE_OPERAND (expr, 1);
9574 else if (TREE_CODE (expr) == COMPLEX_CST)
9576 rpart = TREE_REALPART (expr);
9577 ipart = TREE_IMAGPART (expr);
9579 else
9581 expr = save_expr (expr);
9582 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9583 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9586 rpart = save_expr (rpart);
9587 ipart = save_expr (ipart);
9588 tem = fold_build2 (PLUS_EXPR, itype,
9589 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9590 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9591 return fold_build2 (COMPLEX_EXPR, type, tem,
9592 fold_convert (itype, integer_zero_node));
9596 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9597 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9598 guarantees that P and N have the same least significant log2(M) bits.
9599 N is not otherwise constrained. In particular, N is not normalized to
9600 0 <= N < M as is common. In general, the precise value of P is unknown.
9601 M is chosen as large as possible such that constant N can be determined.
9603 Returns M and sets *RESIDUE to N.
9605 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9606 account. This is not always possible due to PR 35705.
9609 static unsigned HOST_WIDE_INT
9610 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9611 bool allow_func_align)
9613 enum tree_code code;
9615 *residue = 0;
9617 code = TREE_CODE (expr);
9618 if (code == ADDR_EXPR)
9620 expr = TREE_OPERAND (expr, 0);
9621 if (handled_component_p (expr))
9623 HOST_WIDE_INT bitsize, bitpos;
9624 tree offset;
9625 enum machine_mode mode;
9626 int unsignedp, volatilep;
9628 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9629 &mode, &unsignedp, &volatilep, false);
9630 *residue = bitpos / BITS_PER_UNIT;
9631 if (offset)
9633 if (TREE_CODE (offset) == INTEGER_CST)
9634 *residue += TREE_INT_CST_LOW (offset);
9635 else
9636 /* We don't handle more complicated offset expressions. */
9637 return 1;
9641 if (DECL_P (expr)
9642 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9643 return DECL_ALIGN_UNIT (expr);
9645 else if (code == POINTER_PLUS_EXPR)
9647 tree op0, op1;
9648 unsigned HOST_WIDE_INT modulus;
9649 enum tree_code inner_code;
9651 op0 = TREE_OPERAND (expr, 0);
9652 STRIP_NOPS (op0);
9653 modulus = get_pointer_modulus_and_residue (op0, residue,
9654 allow_func_align);
9656 op1 = TREE_OPERAND (expr, 1);
9657 STRIP_NOPS (op1);
9658 inner_code = TREE_CODE (op1);
9659 if (inner_code == INTEGER_CST)
9661 *residue += TREE_INT_CST_LOW (op1);
9662 return modulus;
9664 else if (inner_code == MULT_EXPR)
9666 op1 = TREE_OPERAND (op1, 1);
9667 if (TREE_CODE (op1) == INTEGER_CST)
9669 unsigned HOST_WIDE_INT align;
9671 /* Compute the greatest power-of-2 divisor of op1. */
9672 align = TREE_INT_CST_LOW (op1);
9673 align &= -align;
9675 /* If align is non-zero and less than *modulus, replace
9676 *modulus with align., If align is 0, then either op1 is 0
9677 or the greatest power-of-2 divisor of op1 doesn't fit in an
9678 unsigned HOST_WIDE_INT. In either case, no additional
9679 constraint is imposed. */
9680 if (align)
9681 modulus = MIN (modulus, align);
9683 return modulus;
9688 /* If we get here, we were unable to determine anything useful about the
9689 expression. */
9690 return 1;
9694 /* Fold a binary expression of code CODE and type TYPE with operands
9695 OP0 and OP1. Return the folded expression if folding is
9696 successful. Otherwise, return NULL_TREE. */
9698 tree
9699 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9701 enum tree_code_class kind = TREE_CODE_CLASS (code);
9702 tree arg0, arg1, tem;
9703 tree t1 = NULL_TREE;
9704 bool strict_overflow_p;
9706 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9707 && TREE_CODE_LENGTH (code) == 2
9708 && op0 != NULL_TREE
9709 && op1 != NULL_TREE);
9711 arg0 = op0;
9712 arg1 = op1;
9714 /* Strip any conversions that don't change the mode. This is
9715 safe for every expression, except for a comparison expression
9716 because its signedness is derived from its operands. So, in
9717 the latter case, only strip conversions that don't change the
9718 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9719 preserved.
9721 Note that this is done as an internal manipulation within the
9722 constant folder, in order to find the simplest representation
9723 of the arguments so that their form can be studied. In any
9724 cases, the appropriate type conversions should be put back in
9725 the tree that will get out of the constant folder. */
9727 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9729 STRIP_SIGN_NOPS (arg0);
9730 STRIP_SIGN_NOPS (arg1);
9732 else
9734 STRIP_NOPS (arg0);
9735 STRIP_NOPS (arg1);
9738 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9739 constant but we can't do arithmetic on them. */
9740 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9741 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9742 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9743 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9744 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9745 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9747 if (kind == tcc_binary)
9749 /* Make sure type and arg0 have the same saturating flag. */
9750 gcc_assert (TYPE_SATURATING (type)
9751 == TYPE_SATURATING (TREE_TYPE (arg0)));
9752 tem = const_binop (code, arg0, arg1, 0);
9754 else if (kind == tcc_comparison)
9755 tem = fold_relational_const (code, type, arg0, arg1);
9756 else
9757 tem = NULL_TREE;
9759 if (tem != NULL_TREE)
9761 if (TREE_TYPE (tem) != type)
9762 tem = fold_convert (type, tem);
9763 return tem;
9767 /* If this is a commutative operation, and ARG0 is a constant, move it
9768 to ARG1 to reduce the number of tests below. */
9769 if (commutative_tree_code (code)
9770 && tree_swap_operands_p (arg0, arg1, true))
9771 return fold_build2 (code, type, op1, op0);
9773 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9775 First check for cases where an arithmetic operation is applied to a
9776 compound, conditional, or comparison operation. Push the arithmetic
9777 operation inside the compound or conditional to see if any folding
9778 can then be done. Convert comparison to conditional for this purpose.
9779 The also optimizes non-constant cases that used to be done in
9780 expand_expr.
9782 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9783 one of the operands is a comparison and the other is a comparison, a
9784 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9785 code below would make the expression more complex. Change it to a
9786 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9787 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9789 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9790 || code == EQ_EXPR || code == NE_EXPR)
9791 && ((truth_value_p (TREE_CODE (arg0))
9792 && (truth_value_p (TREE_CODE (arg1))
9793 || (TREE_CODE (arg1) == BIT_AND_EXPR
9794 && integer_onep (TREE_OPERAND (arg1, 1)))))
9795 || (truth_value_p (TREE_CODE (arg1))
9796 && (truth_value_p (TREE_CODE (arg0))
9797 || (TREE_CODE (arg0) == BIT_AND_EXPR
9798 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9800 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9801 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9802 : TRUTH_XOR_EXPR,
9803 boolean_type_node,
9804 fold_convert (boolean_type_node, arg0),
9805 fold_convert (boolean_type_node, arg1));
9807 if (code == EQ_EXPR)
9808 tem = invert_truthvalue (tem);
9810 return fold_convert (type, tem);
9813 if (TREE_CODE_CLASS (code) == tcc_binary
9814 || TREE_CODE_CLASS (code) == tcc_comparison)
9816 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9817 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9818 fold_build2 (code, type,
9819 fold_convert (TREE_TYPE (op0),
9820 TREE_OPERAND (arg0, 1)),
9821 op1));
9822 if (TREE_CODE (arg1) == COMPOUND_EXPR
9823 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9824 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9825 fold_build2 (code, type, op0,
9826 fold_convert (TREE_TYPE (op1),
9827 TREE_OPERAND (arg1, 1))));
9829 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9831 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9832 arg0, arg1,
9833 /*cond_first_p=*/1);
9834 if (tem != NULL_TREE)
9835 return tem;
9838 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9840 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9841 arg1, arg0,
9842 /*cond_first_p=*/0);
9843 if (tem != NULL_TREE)
9844 return tem;
9848 switch (code)
9850 case POINTER_PLUS_EXPR:
9851 /* 0 +p index -> (type)index */
9852 if (integer_zerop (arg0))
9853 return non_lvalue (fold_convert (type, arg1));
9855 /* PTR +p 0 -> PTR */
9856 if (integer_zerop (arg1))
9857 return non_lvalue (fold_convert (type, arg0));
9859 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9860 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9861 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9862 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9863 fold_convert (sizetype, arg1),
9864 fold_convert (sizetype, arg0)));
9866 /* index +p PTR -> PTR +p index */
9867 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9868 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9869 return fold_build2 (POINTER_PLUS_EXPR, type,
9870 fold_convert (type, arg1),
9871 fold_convert (sizetype, arg0));
9873 /* (PTR +p B) +p A -> PTR +p (B + A) */
9874 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9876 tree inner;
9877 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9878 tree arg00 = TREE_OPERAND (arg0, 0);
9879 inner = fold_build2 (PLUS_EXPR, sizetype,
9880 arg01, fold_convert (sizetype, arg1));
9881 return fold_convert (type,
9882 fold_build2 (POINTER_PLUS_EXPR,
9883 TREE_TYPE (arg00), arg00, inner));
9886 /* PTR_CST +p CST -> CST1 */
9887 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9888 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9890 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9891 of the array. Loop optimizer sometimes produce this type of
9892 expressions. */
9893 if (TREE_CODE (arg0) == ADDR_EXPR)
9895 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9896 if (tem)
9897 return fold_convert (type, tem);
9900 return NULL_TREE;
9902 case PLUS_EXPR:
9903 /* A + (-B) -> A - B */
9904 if (TREE_CODE (arg1) == NEGATE_EXPR)
9905 return fold_build2 (MINUS_EXPR, type,
9906 fold_convert (type, arg0),
9907 fold_convert (type, TREE_OPERAND (arg1, 0)));
9908 /* (-A) + B -> B - A */
9909 if (TREE_CODE (arg0) == NEGATE_EXPR
9910 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9911 return fold_build2 (MINUS_EXPR, type,
9912 fold_convert (type, arg1),
9913 fold_convert (type, TREE_OPERAND (arg0, 0)));
9915 if (INTEGRAL_TYPE_P (type))
9917 /* Convert ~A + 1 to -A. */
9918 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9919 && integer_onep (arg1))
9920 return fold_build1 (NEGATE_EXPR, type,
9921 fold_convert (type, TREE_OPERAND (arg0, 0)));
9923 /* ~X + X is -1. */
9924 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9925 && !TYPE_OVERFLOW_TRAPS (type))
9927 tree tem = TREE_OPERAND (arg0, 0);
9929 STRIP_NOPS (tem);
9930 if (operand_equal_p (tem, arg1, 0))
9932 t1 = build_int_cst_type (type, -1);
9933 return omit_one_operand (type, t1, arg1);
9937 /* X + ~X is -1. */
9938 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9939 && !TYPE_OVERFLOW_TRAPS (type))
9941 tree tem = TREE_OPERAND (arg1, 0);
9943 STRIP_NOPS (tem);
9944 if (operand_equal_p (arg0, tem, 0))
9946 t1 = build_int_cst_type (type, -1);
9947 return omit_one_operand (type, t1, arg0);
9951 /* X + (X / CST) * -CST is X % CST. */
9952 if (TREE_CODE (arg1) == MULT_EXPR
9953 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9954 && operand_equal_p (arg0,
9955 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9957 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9958 tree cst1 = TREE_OPERAND (arg1, 1);
9959 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9960 if (sum && integer_zerop (sum))
9961 return fold_convert (type,
9962 fold_build2 (TRUNC_MOD_EXPR,
9963 TREE_TYPE (arg0), arg0, cst0));
9967 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9968 same or one. Make sure type is not saturating.
9969 fold_plusminus_mult_expr will re-associate. */
9970 if ((TREE_CODE (arg0) == MULT_EXPR
9971 || TREE_CODE (arg1) == MULT_EXPR)
9972 && !TYPE_SATURATING (type)
9973 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9975 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9976 if (tem)
9977 return tem;
9980 if (! FLOAT_TYPE_P (type))
9982 if (integer_zerop (arg1))
9983 return non_lvalue (fold_convert (type, arg0));
9985 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9986 with a constant, and the two constants have no bits in common,
9987 we should treat this as a BIT_IOR_EXPR since this may produce more
9988 simplifications. */
9989 if (TREE_CODE (arg0) == BIT_AND_EXPR
9990 && TREE_CODE (arg1) == BIT_AND_EXPR
9991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9992 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9993 && integer_zerop (const_binop (BIT_AND_EXPR,
9994 TREE_OPERAND (arg0, 1),
9995 TREE_OPERAND (arg1, 1), 0)))
9997 code = BIT_IOR_EXPR;
9998 goto bit_ior;
10001 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10002 (plus (plus (mult) (mult)) (foo)) so that we can
10003 take advantage of the factoring cases below. */
10004 if (((TREE_CODE (arg0) == PLUS_EXPR
10005 || TREE_CODE (arg0) == MINUS_EXPR)
10006 && TREE_CODE (arg1) == MULT_EXPR)
10007 || ((TREE_CODE (arg1) == PLUS_EXPR
10008 || TREE_CODE (arg1) == MINUS_EXPR)
10009 && TREE_CODE (arg0) == MULT_EXPR))
10011 tree parg0, parg1, parg, marg;
10012 enum tree_code pcode;
10014 if (TREE_CODE (arg1) == MULT_EXPR)
10015 parg = arg0, marg = arg1;
10016 else
10017 parg = arg1, marg = arg0;
10018 pcode = TREE_CODE (parg);
10019 parg0 = TREE_OPERAND (parg, 0);
10020 parg1 = TREE_OPERAND (parg, 1);
10021 STRIP_NOPS (parg0);
10022 STRIP_NOPS (parg1);
10024 if (TREE_CODE (parg0) == MULT_EXPR
10025 && TREE_CODE (parg1) != MULT_EXPR)
10026 return fold_build2 (pcode, type,
10027 fold_build2 (PLUS_EXPR, type,
10028 fold_convert (type, parg0),
10029 fold_convert (type, marg)),
10030 fold_convert (type, parg1));
10031 if (TREE_CODE (parg0) != MULT_EXPR
10032 && TREE_CODE (parg1) == MULT_EXPR)
10033 return fold_build2 (PLUS_EXPR, type,
10034 fold_convert (type, parg0),
10035 fold_build2 (pcode, type,
10036 fold_convert (type, marg),
10037 fold_convert (type,
10038 parg1)));
10041 else
10043 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10044 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10045 return non_lvalue (fold_convert (type, arg0));
10047 /* Likewise if the operands are reversed. */
10048 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10049 return non_lvalue (fold_convert (type, arg1));
10051 /* Convert X + -C into X - C. */
10052 if (TREE_CODE (arg1) == REAL_CST
10053 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10055 tem = fold_negate_const (arg1, type);
10056 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10057 return fold_build2 (MINUS_EXPR, type,
10058 fold_convert (type, arg0),
10059 fold_convert (type, tem));
10062 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10063 to __complex__ ( x, y ). This is not the same for SNaNs or
10064 if signed zeros are involved. */
10065 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10066 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10067 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10069 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10070 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10071 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10072 bool arg0rz = false, arg0iz = false;
10073 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10074 || (arg0i && (arg0iz = real_zerop (arg0i))))
10076 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10077 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10078 if (arg0rz && arg1i && real_zerop (arg1i))
10080 tree rp = arg1r ? arg1r
10081 : build1 (REALPART_EXPR, rtype, arg1);
10082 tree ip = arg0i ? arg0i
10083 : build1 (IMAGPART_EXPR, rtype, arg0);
10084 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10086 else if (arg0iz && arg1r && real_zerop (arg1r))
10088 tree rp = arg0r ? arg0r
10089 : build1 (REALPART_EXPR, rtype, arg0);
10090 tree ip = arg1i ? arg1i
10091 : build1 (IMAGPART_EXPR, rtype, arg1);
10092 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10097 if (flag_unsafe_math_optimizations
10098 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10099 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10100 && (tem = distribute_real_division (code, type, arg0, arg1)))
10101 return tem;
10103 /* Convert x+x into x*2.0. */
10104 if (operand_equal_p (arg0, arg1, 0)
10105 && SCALAR_FLOAT_TYPE_P (type))
10106 return fold_build2 (MULT_EXPR, type, arg0,
10107 build_real (type, dconst2));
10109 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10110 We associate floats only if the user has specified
10111 -fassociative-math. */
10112 if (flag_associative_math
10113 && TREE_CODE (arg1) == PLUS_EXPR
10114 && TREE_CODE (arg0) != MULT_EXPR)
10116 tree tree10 = TREE_OPERAND (arg1, 0);
10117 tree tree11 = TREE_OPERAND (arg1, 1);
10118 if (TREE_CODE (tree11) == MULT_EXPR
10119 && TREE_CODE (tree10) == MULT_EXPR)
10121 tree tree0;
10122 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10123 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10126 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10127 We associate floats only if the user has specified
10128 -fassociative-math. */
10129 if (flag_associative_math
10130 && TREE_CODE (arg0) == PLUS_EXPR
10131 && TREE_CODE (arg1) != MULT_EXPR)
10133 tree tree00 = TREE_OPERAND (arg0, 0);
10134 tree tree01 = TREE_OPERAND (arg0, 1);
10135 if (TREE_CODE (tree01) == MULT_EXPR
10136 && TREE_CODE (tree00) == MULT_EXPR)
10138 tree tree0;
10139 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10140 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10145 bit_rotate:
10146 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10147 is a rotate of A by C1 bits. */
10148 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10149 is a rotate of A by B bits. */
10151 enum tree_code code0, code1;
10152 tree rtype;
10153 code0 = TREE_CODE (arg0);
10154 code1 = TREE_CODE (arg1);
10155 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10156 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10157 && operand_equal_p (TREE_OPERAND (arg0, 0),
10158 TREE_OPERAND (arg1, 0), 0)
10159 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10160 TYPE_UNSIGNED (rtype))
10161 /* Only create rotates in complete modes. Other cases are not
10162 expanded properly. */
10163 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10165 tree tree01, tree11;
10166 enum tree_code code01, code11;
10168 tree01 = TREE_OPERAND (arg0, 1);
10169 tree11 = TREE_OPERAND (arg1, 1);
10170 STRIP_NOPS (tree01);
10171 STRIP_NOPS (tree11);
10172 code01 = TREE_CODE (tree01);
10173 code11 = TREE_CODE (tree11);
10174 if (code01 == INTEGER_CST
10175 && code11 == INTEGER_CST
10176 && TREE_INT_CST_HIGH (tree01) == 0
10177 && TREE_INT_CST_HIGH (tree11) == 0
10178 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10179 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10180 return fold_convert (type,
10181 build2 (LROTATE_EXPR,
10182 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10183 TREE_OPERAND (arg0, 0),
10184 code0 == LSHIFT_EXPR
10185 ? tree01 : tree11));
10186 else if (code11 == MINUS_EXPR)
10188 tree tree110, tree111;
10189 tree110 = TREE_OPERAND (tree11, 0);
10190 tree111 = TREE_OPERAND (tree11, 1);
10191 STRIP_NOPS (tree110);
10192 STRIP_NOPS (tree111);
10193 if (TREE_CODE (tree110) == INTEGER_CST
10194 && 0 == compare_tree_int (tree110,
10195 TYPE_PRECISION
10196 (TREE_TYPE (TREE_OPERAND
10197 (arg0, 0))))
10198 && operand_equal_p (tree01, tree111, 0))
10199 return fold_convert (type,
10200 build2 ((code0 == LSHIFT_EXPR
10201 ? LROTATE_EXPR
10202 : RROTATE_EXPR),
10203 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10204 TREE_OPERAND (arg0, 0), tree01));
10206 else if (code01 == MINUS_EXPR)
10208 tree tree010, tree011;
10209 tree010 = TREE_OPERAND (tree01, 0);
10210 tree011 = TREE_OPERAND (tree01, 1);
10211 STRIP_NOPS (tree010);
10212 STRIP_NOPS (tree011);
10213 if (TREE_CODE (tree010) == INTEGER_CST
10214 && 0 == compare_tree_int (tree010,
10215 TYPE_PRECISION
10216 (TREE_TYPE (TREE_OPERAND
10217 (arg0, 0))))
10218 && operand_equal_p (tree11, tree011, 0))
10219 return fold_convert (type,
10220 build2 ((code0 != LSHIFT_EXPR
10221 ? LROTATE_EXPR
10222 : RROTATE_EXPR),
10223 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10224 TREE_OPERAND (arg0, 0), tree11));
10229 associate:
10230 /* In most languages, can't associate operations on floats through
10231 parentheses. Rather than remember where the parentheses were, we
10232 don't associate floats at all, unless the user has specified
10233 -fassociative-math.
10234 And, we need to make sure type is not saturating. */
10236 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10237 && !TYPE_SATURATING (type))
10239 tree var0, con0, lit0, minus_lit0;
10240 tree var1, con1, lit1, minus_lit1;
10241 bool ok = true;
10243 /* Split both trees into variables, constants, and literals. Then
10244 associate each group together, the constants with literals,
10245 then the result with variables. This increases the chances of
10246 literals being recombined later and of generating relocatable
10247 expressions for the sum of a constant and literal. */
10248 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10249 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10250 code == MINUS_EXPR);
10252 /* With undefined overflow we can only associate constants
10253 with one variable. */
10254 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10255 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10256 && var0 && var1)
10258 tree tmp0 = var0;
10259 tree tmp1 = var1;
10261 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10262 tmp0 = TREE_OPERAND (tmp0, 0);
10263 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10264 tmp1 = TREE_OPERAND (tmp1, 0);
10265 /* The only case we can still associate with two variables
10266 is if they are the same, modulo negation. */
10267 if (!operand_equal_p (tmp0, tmp1, 0))
10268 ok = false;
10271 /* Only do something if we found more than two objects. Otherwise,
10272 nothing has changed and we risk infinite recursion. */
10273 if (ok
10274 && (2 < ((var0 != 0) + (var1 != 0)
10275 + (con0 != 0) + (con1 != 0)
10276 + (lit0 != 0) + (lit1 != 0)
10277 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10279 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10280 if (code == MINUS_EXPR)
10281 code = PLUS_EXPR;
10283 var0 = associate_trees (var0, var1, code, type);
10284 con0 = associate_trees (con0, con1, code, type);
10285 lit0 = associate_trees (lit0, lit1, code, type);
10286 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10288 /* Preserve the MINUS_EXPR if the negative part of the literal is
10289 greater than the positive part. Otherwise, the multiplicative
10290 folding code (i.e extract_muldiv) may be fooled in case
10291 unsigned constants are subtracted, like in the following
10292 example: ((X*2 + 4) - 8U)/2. */
10293 if (minus_lit0 && lit0)
10295 if (TREE_CODE (lit0) == INTEGER_CST
10296 && TREE_CODE (minus_lit0) == INTEGER_CST
10297 && tree_int_cst_lt (lit0, minus_lit0))
10299 minus_lit0 = associate_trees (minus_lit0, lit0,
10300 MINUS_EXPR, type);
10301 lit0 = 0;
10303 else
10305 lit0 = associate_trees (lit0, minus_lit0,
10306 MINUS_EXPR, type);
10307 minus_lit0 = 0;
10310 if (minus_lit0)
10312 if (con0 == 0)
10313 return fold_convert (type,
10314 associate_trees (var0, minus_lit0,
10315 MINUS_EXPR, type));
10316 else
10318 con0 = associate_trees (con0, minus_lit0,
10319 MINUS_EXPR, type);
10320 return fold_convert (type,
10321 associate_trees (var0, con0,
10322 PLUS_EXPR, type));
10326 con0 = associate_trees (con0, lit0, code, type);
10327 return fold_convert (type, associate_trees (var0, con0,
10328 code, type));
10332 return NULL_TREE;
10334 case MINUS_EXPR:
10335 /* Pointer simplifications for subtraction, simple reassociations. */
10336 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10338 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10339 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10340 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10342 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10343 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10344 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10345 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10346 return fold_build2 (PLUS_EXPR, type,
10347 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10348 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10350 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10351 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10353 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10354 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10355 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10356 if (tmp)
10357 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10360 /* A - (-B) -> A + B */
10361 if (TREE_CODE (arg1) == NEGATE_EXPR)
10362 return fold_build2 (PLUS_EXPR, type, op0,
10363 fold_convert (type, TREE_OPERAND (arg1, 0)));
10364 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10365 if (TREE_CODE (arg0) == NEGATE_EXPR
10366 && (FLOAT_TYPE_P (type)
10367 || INTEGRAL_TYPE_P (type))
10368 && negate_expr_p (arg1)
10369 && reorder_operands_p (arg0, arg1))
10370 return fold_build2 (MINUS_EXPR, type,
10371 fold_convert (type, negate_expr (arg1)),
10372 fold_convert (type, TREE_OPERAND (arg0, 0)));
10373 /* Convert -A - 1 to ~A. */
10374 if (INTEGRAL_TYPE_P (type)
10375 && TREE_CODE (arg0) == NEGATE_EXPR
10376 && integer_onep (arg1)
10377 && !TYPE_OVERFLOW_TRAPS (type))
10378 return fold_build1 (BIT_NOT_EXPR, type,
10379 fold_convert (type, TREE_OPERAND (arg0, 0)));
10381 /* Convert -1 - A to ~A. */
10382 if (INTEGRAL_TYPE_P (type)
10383 && integer_all_onesp (arg0))
10384 return fold_build1 (BIT_NOT_EXPR, type, op1);
10387 /* X - (X / CST) * CST is X % CST. */
10388 if (INTEGRAL_TYPE_P (type)
10389 && TREE_CODE (arg1) == MULT_EXPR
10390 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10391 && operand_equal_p (arg0,
10392 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10393 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10394 TREE_OPERAND (arg1, 1), 0))
10395 return fold_convert (type,
10396 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10397 arg0, TREE_OPERAND (arg1, 1)));
10399 if (! FLOAT_TYPE_P (type))
10401 if (integer_zerop (arg0))
10402 return negate_expr (fold_convert (type, arg1));
10403 if (integer_zerop (arg1))
10404 return non_lvalue (fold_convert (type, arg0));
10406 /* Fold A - (A & B) into ~B & A. */
10407 if (!TREE_SIDE_EFFECTS (arg0)
10408 && TREE_CODE (arg1) == BIT_AND_EXPR)
10410 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10412 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10413 return fold_build2 (BIT_AND_EXPR, type,
10414 fold_build1 (BIT_NOT_EXPR, type, arg10),
10415 fold_convert (type, arg0));
10417 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10419 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10420 return fold_build2 (BIT_AND_EXPR, type,
10421 fold_build1 (BIT_NOT_EXPR, type, arg11),
10422 fold_convert (type, arg0));
10426 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10427 any power of 2 minus 1. */
10428 if (TREE_CODE (arg0) == BIT_AND_EXPR
10429 && TREE_CODE (arg1) == BIT_AND_EXPR
10430 && operand_equal_p (TREE_OPERAND (arg0, 0),
10431 TREE_OPERAND (arg1, 0), 0))
10433 tree mask0 = TREE_OPERAND (arg0, 1);
10434 tree mask1 = TREE_OPERAND (arg1, 1);
10435 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10437 if (operand_equal_p (tem, mask1, 0))
10439 tem = fold_build2 (BIT_XOR_EXPR, type,
10440 TREE_OPERAND (arg0, 0), mask1);
10441 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10446 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10447 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10448 return non_lvalue (fold_convert (type, arg0));
10450 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10451 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10452 (-ARG1 + ARG0) reduces to -ARG1. */
10453 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10454 return negate_expr (fold_convert (type, arg1));
10456 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10457 __complex__ ( x, -y ). This is not the same for SNaNs or if
10458 signed zeros are involved. */
10459 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10461 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10463 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10464 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10465 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10466 bool arg0rz = false, arg0iz = false;
10467 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10468 || (arg0i && (arg0iz = real_zerop (arg0i))))
10470 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10471 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10472 if (arg0rz && arg1i && real_zerop (arg1i))
10474 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10475 arg1r ? arg1r
10476 : build1 (REALPART_EXPR, rtype, arg1));
10477 tree ip = arg0i ? arg0i
10478 : build1 (IMAGPART_EXPR, rtype, arg0);
10479 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10481 else if (arg0iz && arg1r && real_zerop (arg1r))
10483 tree rp = arg0r ? arg0r
10484 : build1 (REALPART_EXPR, rtype, arg0);
10485 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10486 arg1i ? arg1i
10487 : build1 (IMAGPART_EXPR, rtype, arg1));
10488 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10493 /* Fold &x - &x. This can happen from &x.foo - &x.
10494 This is unsafe for certain floats even in non-IEEE formats.
10495 In IEEE, it is unsafe because it does wrong for NaNs.
10496 Also note that operand_equal_p is always false if an operand
10497 is volatile. */
10499 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10500 && operand_equal_p (arg0, arg1, 0))
10501 return fold_convert (type, integer_zero_node);
10503 /* A - B -> A + (-B) if B is easily negatable. */
10504 if (negate_expr_p (arg1)
10505 && ((FLOAT_TYPE_P (type)
10506 /* Avoid this transformation if B is a positive REAL_CST. */
10507 && (TREE_CODE (arg1) != REAL_CST
10508 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10509 || INTEGRAL_TYPE_P (type)))
10510 return fold_build2 (PLUS_EXPR, type,
10511 fold_convert (type, arg0),
10512 fold_convert (type, negate_expr (arg1)));
10514 /* Try folding difference of addresses. */
10516 HOST_WIDE_INT diff;
10518 if ((TREE_CODE (arg0) == ADDR_EXPR
10519 || TREE_CODE (arg1) == ADDR_EXPR)
10520 && ptr_difference_const (arg0, arg1, &diff))
10521 return build_int_cst_type (type, diff);
10524 /* Fold &a[i] - &a[j] to i-j. */
10525 if (TREE_CODE (arg0) == ADDR_EXPR
10526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10527 && TREE_CODE (arg1) == ADDR_EXPR
10528 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10530 tree aref0 = TREE_OPERAND (arg0, 0);
10531 tree aref1 = TREE_OPERAND (arg1, 0);
10532 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10533 TREE_OPERAND (aref1, 0), 0))
10535 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10536 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10537 tree esz = array_ref_element_size (aref0);
10538 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10539 return fold_build2 (MULT_EXPR, type, diff,
10540 fold_convert (type, esz));
10545 if (FLOAT_TYPE_P (type)
10546 && flag_unsafe_math_optimizations
10547 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10548 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10549 && (tem = distribute_real_division (code, type, arg0, arg1)))
10550 return tem;
10552 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10553 same or one. Make sure type is not saturating.
10554 fold_plusminus_mult_expr will re-associate. */
10555 if ((TREE_CODE (arg0) == MULT_EXPR
10556 || TREE_CODE (arg1) == MULT_EXPR)
10557 && !TYPE_SATURATING (type)
10558 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10560 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10561 if (tem)
10562 return tem;
10565 goto associate;
10567 case MULT_EXPR:
10568 /* (-A) * (-B) -> A * B */
10569 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10570 return fold_build2 (MULT_EXPR, type,
10571 fold_convert (type, TREE_OPERAND (arg0, 0)),
10572 fold_convert (type, negate_expr (arg1)));
10573 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10574 return fold_build2 (MULT_EXPR, type,
10575 fold_convert (type, negate_expr (arg0)),
10576 fold_convert (type, TREE_OPERAND (arg1, 0)));
10578 if (! FLOAT_TYPE_P (type))
10580 if (integer_zerop (arg1))
10581 return omit_one_operand (type, arg1, arg0);
10582 if (integer_onep (arg1))
10583 return non_lvalue (fold_convert (type, arg0));
10584 /* Transform x * -1 into -x. Make sure to do the negation
10585 on the original operand with conversions not stripped
10586 because we can only strip non-sign-changing conversions. */
10587 if (integer_all_onesp (arg1))
10588 return fold_convert (type, negate_expr (op0));
10589 /* Transform x * -C into -x * C if x is easily negatable. */
10590 if (TREE_CODE (arg1) == INTEGER_CST
10591 && tree_int_cst_sgn (arg1) == -1
10592 && negate_expr_p (arg0)
10593 && (tem = negate_expr (arg1)) != arg1
10594 && !TREE_OVERFLOW (tem))
10595 return fold_build2 (MULT_EXPR, type,
10596 fold_convert (type, negate_expr (arg0)), tem);
10598 /* (a * (1 << b)) is (a << b) */
10599 if (TREE_CODE (arg1) == LSHIFT_EXPR
10600 && integer_onep (TREE_OPERAND (arg1, 0)))
10601 return fold_build2 (LSHIFT_EXPR, type, op0,
10602 TREE_OPERAND (arg1, 1));
10603 if (TREE_CODE (arg0) == LSHIFT_EXPR
10604 && integer_onep (TREE_OPERAND (arg0, 0)))
10605 return fold_build2 (LSHIFT_EXPR, type, op1,
10606 TREE_OPERAND (arg0, 1));
10608 /* (A + A) * C -> A * 2 * C */
10609 if (TREE_CODE (arg0) == PLUS_EXPR
10610 && TREE_CODE (arg1) == INTEGER_CST
10611 && operand_equal_p (TREE_OPERAND (arg0, 0),
10612 TREE_OPERAND (arg0, 1), 0))
10613 return fold_build2 (MULT_EXPR, type,
10614 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10615 TREE_OPERAND (arg0, 1)),
10616 fold_build2 (MULT_EXPR, type,
10617 build_int_cst (type, 2) , arg1));
10619 strict_overflow_p = false;
10620 if (TREE_CODE (arg1) == INTEGER_CST
10621 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10622 &strict_overflow_p)))
10624 if (strict_overflow_p)
10625 fold_overflow_warning (("assuming signed overflow does not "
10626 "occur when simplifying "
10627 "multiplication"),
10628 WARN_STRICT_OVERFLOW_MISC);
10629 return fold_convert (type, tem);
10632 /* Optimize z * conj(z) for integer complex numbers. */
10633 if (TREE_CODE (arg0) == CONJ_EXPR
10634 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10635 return fold_mult_zconjz (type, arg1);
10636 if (TREE_CODE (arg1) == CONJ_EXPR
10637 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10638 return fold_mult_zconjz (type, arg0);
10640 else
10642 /* Maybe fold x * 0 to 0. The expressions aren't the same
10643 when x is NaN, since x * 0 is also NaN. Nor are they the
10644 same in modes with signed zeros, since multiplying a
10645 negative value by 0 gives -0, not +0. */
10646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10647 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10648 && real_zerop (arg1))
10649 return omit_one_operand (type, arg1, arg0);
10650 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10651 Likewise for complex arithmetic with signed zeros. */
10652 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10653 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10654 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10655 && real_onep (arg1))
10656 return non_lvalue (fold_convert (type, arg0));
10658 /* Transform x * -1.0 into -x. */
10659 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10660 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10661 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10662 && real_minus_onep (arg1))
10663 return fold_convert (type, negate_expr (arg0));
10665 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10666 the result for floating point types due to rounding so it is applied
10667 only if -fassociative-math was specify. */
10668 if (flag_associative_math
10669 && TREE_CODE (arg0) == RDIV_EXPR
10670 && TREE_CODE (arg1) == REAL_CST
10671 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10673 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10674 arg1, 0);
10675 if (tem)
10676 return fold_build2 (RDIV_EXPR, type, tem,
10677 TREE_OPERAND (arg0, 1));
10680 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10681 if (operand_equal_p (arg0, arg1, 0))
10683 tree tem = fold_strip_sign_ops (arg0);
10684 if (tem != NULL_TREE)
10686 tem = fold_convert (type, tem);
10687 return fold_build2 (MULT_EXPR, type, tem, tem);
10691 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10692 This is not the same for NaNs or if signed zeros are
10693 involved. */
10694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10695 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10696 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10697 && TREE_CODE (arg1) == COMPLEX_CST
10698 && real_zerop (TREE_REALPART (arg1)))
10700 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10701 if (real_onep (TREE_IMAGPART (arg1)))
10702 return fold_build2 (COMPLEX_EXPR, type,
10703 negate_expr (fold_build1 (IMAGPART_EXPR,
10704 rtype, arg0)),
10705 fold_build1 (REALPART_EXPR, rtype, arg0));
10706 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10707 return fold_build2 (COMPLEX_EXPR, type,
10708 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10709 negate_expr (fold_build1 (REALPART_EXPR,
10710 rtype, arg0)));
10713 /* Optimize z * conj(z) for floating point complex numbers.
10714 Guarded by flag_unsafe_math_optimizations as non-finite
10715 imaginary components don't produce scalar results. */
10716 if (flag_unsafe_math_optimizations
10717 && TREE_CODE (arg0) == CONJ_EXPR
10718 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10719 return fold_mult_zconjz (type, arg1);
10720 if (flag_unsafe_math_optimizations
10721 && TREE_CODE (arg1) == CONJ_EXPR
10722 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10723 return fold_mult_zconjz (type, arg0);
10725 if (flag_unsafe_math_optimizations)
10727 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10728 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10730 /* Optimizations of root(...)*root(...). */
10731 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10733 tree rootfn, arg;
10734 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10735 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10737 /* Optimize sqrt(x)*sqrt(x) as x. */
10738 if (BUILTIN_SQRT_P (fcode0)
10739 && operand_equal_p (arg00, arg10, 0)
10740 && ! HONOR_SNANS (TYPE_MODE (type)))
10741 return arg00;
10743 /* Optimize root(x)*root(y) as root(x*y). */
10744 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10745 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10746 return build_call_expr (rootfn, 1, arg);
10749 /* Optimize expN(x)*expN(y) as expN(x+y). */
10750 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10752 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10753 tree arg = fold_build2 (PLUS_EXPR, type,
10754 CALL_EXPR_ARG (arg0, 0),
10755 CALL_EXPR_ARG (arg1, 0));
10756 return build_call_expr (expfn, 1, arg);
10759 /* Optimizations of pow(...)*pow(...). */
10760 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10761 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10762 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10764 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10765 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10766 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10767 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10769 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10770 if (operand_equal_p (arg01, arg11, 0))
10772 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10773 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10774 return build_call_expr (powfn, 2, arg, arg01);
10777 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10778 if (operand_equal_p (arg00, arg10, 0))
10780 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10781 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10782 return build_call_expr (powfn, 2, arg00, arg);
10786 /* Optimize tan(x)*cos(x) as sin(x). */
10787 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10788 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10789 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10790 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10791 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10792 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10793 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10794 CALL_EXPR_ARG (arg1, 0), 0))
10796 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10798 if (sinfn != NULL_TREE)
10799 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10802 /* Optimize x*pow(x,c) as pow(x,c+1). */
10803 if (fcode1 == BUILT_IN_POW
10804 || fcode1 == BUILT_IN_POWF
10805 || fcode1 == BUILT_IN_POWL)
10807 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10808 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10809 if (TREE_CODE (arg11) == REAL_CST
10810 && !TREE_OVERFLOW (arg11)
10811 && operand_equal_p (arg0, arg10, 0))
10813 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10814 REAL_VALUE_TYPE c;
10815 tree arg;
10817 c = TREE_REAL_CST (arg11);
10818 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10819 arg = build_real (type, c);
10820 return build_call_expr (powfn, 2, arg0, arg);
10824 /* Optimize pow(x,c)*x as pow(x,c+1). */
10825 if (fcode0 == BUILT_IN_POW
10826 || fcode0 == BUILT_IN_POWF
10827 || fcode0 == BUILT_IN_POWL)
10829 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10830 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10831 if (TREE_CODE (arg01) == REAL_CST
10832 && !TREE_OVERFLOW (arg01)
10833 && operand_equal_p (arg1, arg00, 0))
10835 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10836 REAL_VALUE_TYPE c;
10837 tree arg;
10839 c = TREE_REAL_CST (arg01);
10840 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10841 arg = build_real (type, c);
10842 return build_call_expr (powfn, 2, arg1, arg);
10846 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10847 if (optimize_function_for_speed_p (cfun)
10848 && operand_equal_p (arg0, arg1, 0))
10850 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10852 if (powfn)
10854 tree arg = build_real (type, dconst2);
10855 return build_call_expr (powfn, 2, arg0, arg);
10860 goto associate;
10862 case BIT_IOR_EXPR:
10863 bit_ior:
10864 if (integer_all_onesp (arg1))
10865 return omit_one_operand (type, arg1, arg0);
10866 if (integer_zerop (arg1))
10867 return non_lvalue (fold_convert (type, arg0));
10868 if (operand_equal_p (arg0, arg1, 0))
10869 return non_lvalue (fold_convert (type, arg0));
10871 /* ~X | X is -1. */
10872 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10875 t1 = fold_convert (type, integer_zero_node);
10876 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10877 return omit_one_operand (type, t1, arg1);
10880 /* X | ~X is -1. */
10881 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10882 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10884 t1 = fold_convert (type, integer_zero_node);
10885 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10886 return omit_one_operand (type, t1, arg0);
10889 /* Canonicalize (X & C1) | C2. */
10890 if (TREE_CODE (arg0) == BIT_AND_EXPR
10891 && TREE_CODE (arg1) == INTEGER_CST
10892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10894 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10895 int width = TYPE_PRECISION (type), w;
10896 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10897 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10898 hi2 = TREE_INT_CST_HIGH (arg1);
10899 lo2 = TREE_INT_CST_LOW (arg1);
10901 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10902 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10903 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10905 if (width > HOST_BITS_PER_WIDE_INT)
10907 mhi = (unsigned HOST_WIDE_INT) -1
10908 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10909 mlo = -1;
10911 else
10913 mhi = 0;
10914 mlo = (unsigned HOST_WIDE_INT) -1
10915 >> (HOST_BITS_PER_WIDE_INT - width);
10918 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10919 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10920 return fold_build2 (BIT_IOR_EXPR, type,
10921 TREE_OPERAND (arg0, 0), arg1);
10923 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10924 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10925 mode which allows further optimizations. */
10926 hi1 &= mhi;
10927 lo1 &= mlo;
10928 hi2 &= mhi;
10929 lo2 &= mlo;
10930 hi3 = hi1 & ~hi2;
10931 lo3 = lo1 & ~lo2;
10932 for (w = BITS_PER_UNIT;
10933 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10934 w <<= 1)
10936 unsigned HOST_WIDE_INT mask
10937 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10938 if (((lo1 | lo2) & mask) == mask
10939 && (lo1 & ~mask) == 0 && hi1 == 0)
10941 hi3 = 0;
10942 lo3 = mask;
10943 break;
10946 if (hi3 != hi1 || lo3 != lo1)
10947 return fold_build2 (BIT_IOR_EXPR, type,
10948 fold_build2 (BIT_AND_EXPR, type,
10949 TREE_OPERAND (arg0, 0),
10950 build_int_cst_wide (type,
10951 lo3, hi3)),
10952 arg1);
10955 /* (X & Y) | Y is (X, Y). */
10956 if (TREE_CODE (arg0) == BIT_AND_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10958 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10959 /* (X & Y) | X is (Y, X). */
10960 if (TREE_CODE (arg0) == BIT_AND_EXPR
10961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10962 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10963 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10964 /* X | (X & Y) is (Y, X). */
10965 if (TREE_CODE (arg1) == BIT_AND_EXPR
10966 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10967 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10968 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10969 /* X | (Y & X) is (Y, X). */
10970 if (TREE_CODE (arg1) == BIT_AND_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10972 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10973 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10975 t1 = distribute_bit_expr (code, type, arg0, arg1);
10976 if (t1 != NULL_TREE)
10977 return t1;
10979 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10981 This results in more efficient code for machines without a NAND
10982 instruction. Combine will canonicalize to the first form
10983 which will allow use of NAND instructions provided by the
10984 backend if they exist. */
10985 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10986 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10988 return fold_build1 (BIT_NOT_EXPR, type,
10989 build2 (BIT_AND_EXPR, type,
10990 fold_convert (type,
10991 TREE_OPERAND (arg0, 0)),
10992 fold_convert (type,
10993 TREE_OPERAND (arg1, 0))));
10996 /* See if this can be simplified into a rotate first. If that
10997 is unsuccessful continue in the association code. */
10998 goto bit_rotate;
11000 case BIT_XOR_EXPR:
11001 if (integer_zerop (arg1))
11002 return non_lvalue (fold_convert (type, arg0));
11003 if (integer_all_onesp (arg1))
11004 return fold_build1 (BIT_NOT_EXPR, type, op0);
11005 if (operand_equal_p (arg0, arg1, 0))
11006 return omit_one_operand (type, integer_zero_node, arg0);
11008 /* ~X ^ X is -1. */
11009 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11010 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11012 t1 = fold_convert (type, integer_zero_node);
11013 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11014 return omit_one_operand (type, t1, arg1);
11017 /* X ^ ~X is -1. */
11018 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11019 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11021 t1 = fold_convert (type, integer_zero_node);
11022 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11023 return omit_one_operand (type, t1, arg0);
11026 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11027 with a constant, and the two constants have no bits in common,
11028 we should treat this as a BIT_IOR_EXPR since this may produce more
11029 simplifications. */
11030 if (TREE_CODE (arg0) == BIT_AND_EXPR
11031 && TREE_CODE (arg1) == BIT_AND_EXPR
11032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11033 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11034 && integer_zerop (const_binop (BIT_AND_EXPR,
11035 TREE_OPERAND (arg0, 1),
11036 TREE_OPERAND (arg1, 1), 0)))
11038 code = BIT_IOR_EXPR;
11039 goto bit_ior;
11042 /* (X | Y) ^ X -> Y & ~ X*/
11043 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11046 tree t2 = TREE_OPERAND (arg0, 1);
11047 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11048 arg1);
11049 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11050 fold_convert (type, t1));
11051 return t1;
11054 /* (Y | X) ^ X -> Y & ~ X*/
11055 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11056 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11058 tree t2 = TREE_OPERAND (arg0, 0);
11059 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11060 arg1);
11061 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11062 fold_convert (type, t1));
11063 return t1;
11066 /* X ^ (X | Y) -> Y & ~ X*/
11067 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11068 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11070 tree t2 = TREE_OPERAND (arg1, 1);
11071 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11072 arg0);
11073 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11074 fold_convert (type, t1));
11075 return t1;
11078 /* X ^ (Y | X) -> Y & ~ X*/
11079 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11082 tree t2 = TREE_OPERAND (arg1, 0);
11083 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11084 arg0);
11085 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11086 fold_convert (type, t1));
11087 return t1;
11090 /* Convert ~X ^ ~Y to X ^ Y. */
11091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11092 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11093 return fold_build2 (code, type,
11094 fold_convert (type, TREE_OPERAND (arg0, 0)),
11095 fold_convert (type, TREE_OPERAND (arg1, 0)));
11097 /* Convert ~X ^ C to X ^ ~C. */
11098 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11099 && TREE_CODE (arg1) == INTEGER_CST)
11100 return fold_build2 (code, type,
11101 fold_convert (type, TREE_OPERAND (arg0, 0)),
11102 fold_build1 (BIT_NOT_EXPR, type, arg1));
11104 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11105 if (TREE_CODE (arg0) == BIT_AND_EXPR
11106 && integer_onep (TREE_OPERAND (arg0, 1))
11107 && integer_onep (arg1))
11108 return fold_build2 (EQ_EXPR, type, arg0,
11109 build_int_cst (TREE_TYPE (arg0), 0));
11111 /* Fold (X & Y) ^ Y as ~X & Y. */
11112 if (TREE_CODE (arg0) == BIT_AND_EXPR
11113 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11115 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11116 return fold_build2 (BIT_AND_EXPR, type,
11117 fold_build1 (BIT_NOT_EXPR, type, tem),
11118 fold_convert (type, arg1));
11120 /* Fold (X & Y) ^ X as ~Y & X. */
11121 if (TREE_CODE (arg0) == BIT_AND_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11123 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11125 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11126 return fold_build2 (BIT_AND_EXPR, type,
11127 fold_build1 (BIT_NOT_EXPR, type, tem),
11128 fold_convert (type, arg1));
11130 /* Fold X ^ (X & Y) as X & ~Y. */
11131 if (TREE_CODE (arg1) == BIT_AND_EXPR
11132 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11134 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11135 return fold_build2 (BIT_AND_EXPR, type,
11136 fold_convert (type, arg0),
11137 fold_build1 (BIT_NOT_EXPR, type, tem));
11139 /* Fold X ^ (Y & X) as ~Y & X. */
11140 if (TREE_CODE (arg1) == BIT_AND_EXPR
11141 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11142 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11144 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11145 return fold_build2 (BIT_AND_EXPR, type,
11146 fold_build1 (BIT_NOT_EXPR, type, tem),
11147 fold_convert (type, arg0));
11150 /* See if this can be simplified into a rotate first. If that
11151 is unsuccessful continue in the association code. */
11152 goto bit_rotate;
11154 case BIT_AND_EXPR:
11155 if (integer_all_onesp (arg1))
11156 return non_lvalue (fold_convert (type, arg0));
11157 if (integer_zerop (arg1))
11158 return omit_one_operand (type, arg1, arg0);
11159 if (operand_equal_p (arg0, arg1, 0))
11160 return non_lvalue (fold_convert (type, arg0));
11162 /* ~X & X is always zero. */
11163 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11165 return omit_one_operand (type, integer_zero_node, arg1);
11167 /* X & ~X is always zero. */
11168 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11169 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11170 return omit_one_operand (type, integer_zero_node, arg0);
11172 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11173 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11174 && TREE_CODE (arg1) == INTEGER_CST
11175 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11177 tree tmp1 = fold_convert (type, arg1);
11178 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11179 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11180 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11181 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11182 return fold_convert (type,
11183 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11186 /* (X | Y) & Y is (X, Y). */
11187 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11188 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11189 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11190 /* (X | Y) & X is (Y, X). */
11191 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11192 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11193 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11194 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11195 /* X & (X | Y) is (Y, X). */
11196 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11197 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11198 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11199 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11200 /* X & (Y | X) is (Y, X). */
11201 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11202 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11203 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11204 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11206 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11207 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11208 && integer_onep (TREE_OPERAND (arg0, 1))
11209 && integer_onep (arg1))
11211 tem = TREE_OPERAND (arg0, 0);
11212 return fold_build2 (EQ_EXPR, type,
11213 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11214 build_int_cst (TREE_TYPE (tem), 1)),
11215 build_int_cst (TREE_TYPE (tem), 0));
11217 /* Fold ~X & 1 as (X & 1) == 0. */
11218 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11219 && integer_onep (arg1))
11221 tem = TREE_OPERAND (arg0, 0);
11222 return fold_build2 (EQ_EXPR, type,
11223 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11224 build_int_cst (TREE_TYPE (tem), 1)),
11225 build_int_cst (TREE_TYPE (tem), 0));
11228 /* Fold (X ^ Y) & Y as ~X & Y. */
11229 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11230 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11232 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11233 return fold_build2 (BIT_AND_EXPR, type,
11234 fold_build1 (BIT_NOT_EXPR, type, tem),
11235 fold_convert (type, arg1));
11237 /* Fold (X ^ Y) & X as ~Y & X. */
11238 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11239 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11240 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11242 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11243 return fold_build2 (BIT_AND_EXPR, type,
11244 fold_build1 (BIT_NOT_EXPR, type, tem),
11245 fold_convert (type, arg1));
11247 /* Fold X & (X ^ Y) as X & ~Y. */
11248 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11249 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11251 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11252 return fold_build2 (BIT_AND_EXPR, type,
11253 fold_convert (type, arg0),
11254 fold_build1 (BIT_NOT_EXPR, type, tem));
11256 /* Fold X & (Y ^ X) as ~Y & X. */
11257 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11259 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11261 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11262 return fold_build2 (BIT_AND_EXPR, type,
11263 fold_build1 (BIT_NOT_EXPR, type, tem),
11264 fold_convert (type, arg0));
11267 t1 = distribute_bit_expr (code, type, arg0, arg1);
11268 if (t1 != NULL_TREE)
11269 return t1;
11270 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11271 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11272 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11274 unsigned int prec
11275 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11277 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11278 && (~TREE_INT_CST_LOW (arg1)
11279 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11280 return fold_convert (type, TREE_OPERAND (arg0, 0));
11283 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11285 This results in more efficient code for machines without a NOR
11286 instruction. Combine will canonicalize to the first form
11287 which will allow use of NOR instructions provided by the
11288 backend if they exist. */
11289 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11290 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11292 return fold_build1 (BIT_NOT_EXPR, type,
11293 build2 (BIT_IOR_EXPR, type,
11294 fold_convert (type,
11295 TREE_OPERAND (arg0, 0)),
11296 fold_convert (type,
11297 TREE_OPERAND (arg1, 0))));
11300 /* If arg0 is derived from the address of an object or function, we may
11301 be able to fold this expression using the object or function's
11302 alignment. */
11303 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11305 unsigned HOST_WIDE_INT modulus, residue;
11306 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11308 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11309 integer_onep (arg1));
11311 /* This works because modulus is a power of 2. If this weren't the
11312 case, we'd have to replace it by its greatest power-of-2
11313 divisor: modulus & -modulus. */
11314 if (low < modulus)
11315 return build_int_cst (type, residue & low);
11318 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11319 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11320 if the new mask might be further optimized. */
11321 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11322 || TREE_CODE (arg0) == RSHIFT_EXPR)
11323 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11324 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11325 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11326 < TYPE_PRECISION (TREE_TYPE (arg0))
11327 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11328 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11330 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11331 unsigned HOST_WIDE_INT mask
11332 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11333 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11334 tree shift_type = TREE_TYPE (arg0);
11336 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11337 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11338 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11339 && TYPE_PRECISION (TREE_TYPE (arg0))
11340 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11342 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11343 tree arg00 = TREE_OPERAND (arg0, 0);
11344 /* See if more bits can be proven as zero because of
11345 zero extension. */
11346 if (TREE_CODE (arg00) == NOP_EXPR
11347 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11349 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11350 if (TYPE_PRECISION (inner_type)
11351 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11352 && TYPE_PRECISION (inner_type) < prec)
11354 prec = TYPE_PRECISION (inner_type);
11355 /* See if we can shorten the right shift. */
11356 if (shiftc < prec)
11357 shift_type = inner_type;
11360 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11361 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11362 zerobits <<= prec - shiftc;
11363 /* For arithmetic shift if sign bit could be set, zerobits
11364 can contain actually sign bits, so no transformation is
11365 possible, unless MASK masks them all away. In that
11366 case the shift needs to be converted into logical shift. */
11367 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11368 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11370 if ((mask & zerobits) == 0)
11371 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11372 else
11373 zerobits = 0;
11377 /* ((X << 16) & 0xff00) is (X, 0). */
11378 if ((mask & zerobits) == mask)
11379 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11381 newmask = mask | zerobits;
11382 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11384 unsigned int prec;
11386 /* Only do the transformation if NEWMASK is some integer
11387 mode's mask. */
11388 for (prec = BITS_PER_UNIT;
11389 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11390 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11391 break;
11392 if (prec < HOST_BITS_PER_WIDE_INT
11393 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11395 tree newmaskt;
11397 if (shift_type != TREE_TYPE (arg0))
11399 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11400 fold_convert (shift_type,
11401 TREE_OPERAND (arg0, 0)),
11402 TREE_OPERAND (arg0, 1));
11403 tem = fold_convert (type, tem);
11405 else
11406 tem = op0;
11407 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11408 if (!tree_int_cst_equal (newmaskt, arg1))
11409 return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
11414 goto associate;
11416 case RDIV_EXPR:
11417 /* Don't touch a floating-point divide by zero unless the mode
11418 of the constant can represent infinity. */
11419 if (TREE_CODE (arg1) == REAL_CST
11420 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11421 && real_zerop (arg1))
11422 return NULL_TREE;
11424 /* Optimize A / A to 1.0 if we don't care about
11425 NaNs or Infinities. Skip the transformation
11426 for non-real operands. */
11427 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11428 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11429 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11430 && operand_equal_p (arg0, arg1, 0))
11432 tree r = build_real (TREE_TYPE (arg0), dconst1);
11434 return omit_two_operands (type, r, arg0, arg1);
11437 /* The complex version of the above A / A optimization. */
11438 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11439 && operand_equal_p (arg0, arg1, 0))
11441 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11442 if (! HONOR_NANS (TYPE_MODE (elem_type))
11443 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11445 tree r = build_real (elem_type, dconst1);
11446 /* omit_two_operands will call fold_convert for us. */
11447 return omit_two_operands (type, r, arg0, arg1);
11451 /* (-A) / (-B) -> A / B */
11452 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11453 return fold_build2 (RDIV_EXPR, type,
11454 TREE_OPERAND (arg0, 0),
11455 negate_expr (arg1));
11456 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11457 return fold_build2 (RDIV_EXPR, type,
11458 negate_expr (arg0),
11459 TREE_OPERAND (arg1, 0));
11461 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11462 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11463 && real_onep (arg1))
11464 return non_lvalue (fold_convert (type, arg0));
11466 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11467 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11468 && real_minus_onep (arg1))
11469 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11471 /* If ARG1 is a constant, we can convert this to a multiply by the
11472 reciprocal. This does not have the same rounding properties,
11473 so only do this if -freciprocal-math. We can actually
11474 always safely do it if ARG1 is a power of two, but it's hard to
11475 tell if it is or not in a portable manner. */
11476 if (TREE_CODE (arg1) == REAL_CST)
11478 if (flag_reciprocal_math
11479 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11480 arg1, 0)))
11481 return fold_build2 (MULT_EXPR, type, arg0, tem);
11482 /* Find the reciprocal if optimizing and the result is exact. */
11483 if (optimize)
11485 REAL_VALUE_TYPE r;
11486 r = TREE_REAL_CST (arg1);
11487 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11489 tem = build_real (type, r);
11490 return fold_build2 (MULT_EXPR, type,
11491 fold_convert (type, arg0), tem);
11495 /* Convert A/B/C to A/(B*C). */
11496 if (flag_reciprocal_math
11497 && TREE_CODE (arg0) == RDIV_EXPR)
11498 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11499 fold_build2 (MULT_EXPR, type,
11500 TREE_OPERAND (arg0, 1), arg1));
11502 /* Convert A/(B/C) to (A/B)*C. */
11503 if (flag_reciprocal_math
11504 && TREE_CODE (arg1) == RDIV_EXPR)
11505 return fold_build2 (MULT_EXPR, type,
11506 fold_build2 (RDIV_EXPR, type, arg0,
11507 TREE_OPERAND (arg1, 0)),
11508 TREE_OPERAND (arg1, 1));
11510 /* Convert C1/(X*C2) into (C1/C2)/X. */
11511 if (flag_reciprocal_math
11512 && TREE_CODE (arg1) == MULT_EXPR
11513 && TREE_CODE (arg0) == REAL_CST
11514 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11516 tree tem = const_binop (RDIV_EXPR, arg0,
11517 TREE_OPERAND (arg1, 1), 0);
11518 if (tem)
11519 return fold_build2 (RDIV_EXPR, type, tem,
11520 TREE_OPERAND (arg1, 0));
11523 if (flag_unsafe_math_optimizations)
11525 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11526 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11528 /* Optimize sin(x)/cos(x) as tan(x). */
11529 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11530 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11531 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11532 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11533 CALL_EXPR_ARG (arg1, 0), 0))
11535 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11537 if (tanfn != NULL_TREE)
11538 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11541 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11542 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11543 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11544 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11545 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11546 CALL_EXPR_ARG (arg1, 0), 0))
11548 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11550 if (tanfn != NULL_TREE)
11552 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11553 return fold_build2 (RDIV_EXPR, type,
11554 build_real (type, dconst1), tmp);
11558 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11559 NaNs or Infinities. */
11560 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11561 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11562 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11564 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11565 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11567 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11568 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11569 && operand_equal_p (arg00, arg01, 0))
11571 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11573 if (cosfn != NULL_TREE)
11574 return build_call_expr (cosfn, 1, arg00);
11578 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11579 NaNs or Infinities. */
11580 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11581 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11582 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11584 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11585 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11587 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11588 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11589 && operand_equal_p (arg00, arg01, 0))
11591 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11593 if (cosfn != NULL_TREE)
11595 tree tmp = build_call_expr (cosfn, 1, arg00);
11596 return fold_build2 (RDIV_EXPR, type,
11597 build_real (type, dconst1),
11598 tmp);
11603 /* Optimize pow(x,c)/x as pow(x,c-1). */
11604 if (fcode0 == BUILT_IN_POW
11605 || fcode0 == BUILT_IN_POWF
11606 || fcode0 == BUILT_IN_POWL)
11608 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11609 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11610 if (TREE_CODE (arg01) == REAL_CST
11611 && !TREE_OVERFLOW (arg01)
11612 && operand_equal_p (arg1, arg00, 0))
11614 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11615 REAL_VALUE_TYPE c;
11616 tree arg;
11618 c = TREE_REAL_CST (arg01);
11619 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11620 arg = build_real (type, c);
11621 return build_call_expr (powfn, 2, arg1, arg);
11625 /* Optimize a/root(b/c) into a*root(c/b). */
11626 if (BUILTIN_ROOT_P (fcode1))
11628 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11630 if (TREE_CODE (rootarg) == RDIV_EXPR)
11632 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11633 tree b = TREE_OPERAND (rootarg, 0);
11634 tree c = TREE_OPERAND (rootarg, 1);
11636 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11638 tmp = build_call_expr (rootfn, 1, tmp);
11639 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11643 /* Optimize x/expN(y) into x*expN(-y). */
11644 if (BUILTIN_EXPONENT_P (fcode1))
11646 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11647 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11648 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11649 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11652 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11653 if (fcode1 == BUILT_IN_POW
11654 || fcode1 == BUILT_IN_POWF
11655 || fcode1 == BUILT_IN_POWL)
11657 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11658 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11659 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11660 tree neg11 = fold_convert (type, negate_expr (arg11));
11661 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11662 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11665 return NULL_TREE;
11667 case TRUNC_DIV_EXPR:
11668 case FLOOR_DIV_EXPR:
11669 /* Simplify A / (B << N) where A and B are positive and B is
11670 a power of 2, to A >> (N + log2(B)). */
11671 strict_overflow_p = false;
11672 if (TREE_CODE (arg1) == LSHIFT_EXPR
11673 && (TYPE_UNSIGNED (type)
11674 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11676 tree sval = TREE_OPERAND (arg1, 0);
11677 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11679 tree sh_cnt = TREE_OPERAND (arg1, 1);
11680 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11682 if (strict_overflow_p)
11683 fold_overflow_warning (("assuming signed overflow does not "
11684 "occur when simplifying A / (B << N)"),
11685 WARN_STRICT_OVERFLOW_MISC);
11687 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11688 sh_cnt, build_int_cst (NULL_TREE, pow2));
11689 return fold_build2 (RSHIFT_EXPR, type,
11690 fold_convert (type, arg0), sh_cnt);
11694 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11695 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11696 if (INTEGRAL_TYPE_P (type)
11697 && TYPE_UNSIGNED (type)
11698 && code == FLOOR_DIV_EXPR)
11699 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11701 /* Fall thru */
11703 case ROUND_DIV_EXPR:
11704 case CEIL_DIV_EXPR:
11705 case EXACT_DIV_EXPR:
11706 if (integer_onep (arg1))
11707 return non_lvalue (fold_convert (type, arg0));
11708 if (integer_zerop (arg1))
11709 return NULL_TREE;
11710 /* X / -1 is -X. */
11711 if (!TYPE_UNSIGNED (type)
11712 && TREE_CODE (arg1) == INTEGER_CST
11713 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11714 && TREE_INT_CST_HIGH (arg1) == -1)
11715 return fold_convert (type, negate_expr (arg0));
11717 /* Convert -A / -B to A / B when the type is signed and overflow is
11718 undefined. */
11719 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11720 && TREE_CODE (arg0) == NEGATE_EXPR
11721 && negate_expr_p (arg1))
11723 if (INTEGRAL_TYPE_P (type))
11724 fold_overflow_warning (("assuming signed overflow does not occur "
11725 "when distributing negation across "
11726 "division"),
11727 WARN_STRICT_OVERFLOW_MISC);
11728 return fold_build2 (code, type,
11729 fold_convert (type, TREE_OPERAND (arg0, 0)),
11730 fold_convert (type, negate_expr (arg1)));
11732 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11733 && TREE_CODE (arg1) == NEGATE_EXPR
11734 && negate_expr_p (arg0))
11736 if (INTEGRAL_TYPE_P (type))
11737 fold_overflow_warning (("assuming signed overflow does not occur "
11738 "when distributing negation across "
11739 "division"),
11740 WARN_STRICT_OVERFLOW_MISC);
11741 return fold_build2 (code, type,
11742 fold_convert (type, negate_expr (arg0)),
11743 fold_convert (type, TREE_OPERAND (arg1, 0)));
11746 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11747 operation, EXACT_DIV_EXPR.
11749 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11750 At one time others generated faster code, it's not clear if they do
11751 after the last round to changes to the DIV code in expmed.c. */
11752 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11753 && multiple_of_p (type, arg0, arg1))
11754 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11756 strict_overflow_p = false;
11757 if (TREE_CODE (arg1) == INTEGER_CST
11758 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11759 &strict_overflow_p)))
11761 if (strict_overflow_p)
11762 fold_overflow_warning (("assuming signed overflow does not occur "
11763 "when simplifying division"),
11764 WARN_STRICT_OVERFLOW_MISC);
11765 return fold_convert (type, tem);
11768 return NULL_TREE;
11770 case CEIL_MOD_EXPR:
11771 case FLOOR_MOD_EXPR:
11772 case ROUND_MOD_EXPR:
11773 case TRUNC_MOD_EXPR:
11774 /* X % 1 is always zero, but be sure to preserve any side
11775 effects in X. */
11776 if (integer_onep (arg1))
11777 return omit_one_operand (type, integer_zero_node, arg0);
11779 /* X % 0, return X % 0 unchanged so that we can get the
11780 proper warnings and errors. */
11781 if (integer_zerop (arg1))
11782 return NULL_TREE;
11784 /* 0 % X is always zero, but be sure to preserve any side
11785 effects in X. Place this after checking for X == 0. */
11786 if (integer_zerop (arg0))
11787 return omit_one_operand (type, integer_zero_node, arg1);
11789 /* X % -1 is zero. */
11790 if (!TYPE_UNSIGNED (type)
11791 && TREE_CODE (arg1) == INTEGER_CST
11792 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11793 && TREE_INT_CST_HIGH (arg1) == -1)
11794 return omit_one_operand (type, integer_zero_node, arg0);
11796 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11797 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11798 strict_overflow_p = false;
11799 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11800 && (TYPE_UNSIGNED (type)
11801 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11803 tree c = arg1;
11804 /* Also optimize A % (C << N) where C is a power of 2,
11805 to A & ((C << N) - 1). */
11806 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11807 c = TREE_OPERAND (arg1, 0);
11809 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11811 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11812 build_int_cst (TREE_TYPE (arg1), 1));
11813 if (strict_overflow_p)
11814 fold_overflow_warning (("assuming signed overflow does not "
11815 "occur when simplifying "
11816 "X % (power of two)"),
11817 WARN_STRICT_OVERFLOW_MISC);
11818 return fold_build2 (BIT_AND_EXPR, type,
11819 fold_convert (type, arg0),
11820 fold_convert (type, mask));
11824 /* X % -C is the same as X % C. */
11825 if (code == TRUNC_MOD_EXPR
11826 && !TYPE_UNSIGNED (type)
11827 && TREE_CODE (arg1) == INTEGER_CST
11828 && !TREE_OVERFLOW (arg1)
11829 && TREE_INT_CST_HIGH (arg1) < 0
11830 && !TYPE_OVERFLOW_TRAPS (type)
11831 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11832 && !sign_bit_p (arg1, arg1))
11833 return fold_build2 (code, type, fold_convert (type, arg0),
11834 fold_convert (type, negate_expr (arg1)));
11836 /* X % -Y is the same as X % Y. */
11837 if (code == TRUNC_MOD_EXPR
11838 && !TYPE_UNSIGNED (type)
11839 && TREE_CODE (arg1) == NEGATE_EXPR
11840 && !TYPE_OVERFLOW_TRAPS (type))
11841 return fold_build2 (code, type, fold_convert (type, arg0),
11842 fold_convert (type, TREE_OPERAND (arg1, 0)));
11844 if (TREE_CODE (arg1) == INTEGER_CST
11845 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11846 &strict_overflow_p)))
11848 if (strict_overflow_p)
11849 fold_overflow_warning (("assuming signed overflow does not occur "
11850 "when simplifying modulus"),
11851 WARN_STRICT_OVERFLOW_MISC);
11852 return fold_convert (type, tem);
11855 return NULL_TREE;
11857 case LROTATE_EXPR:
11858 case RROTATE_EXPR:
11859 if (integer_all_onesp (arg0))
11860 return omit_one_operand (type, arg0, arg1);
11861 goto shift;
11863 case RSHIFT_EXPR:
11864 /* Optimize -1 >> x for arithmetic right shifts. */
11865 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11866 && tree_expr_nonnegative_p (arg1))
11867 return omit_one_operand (type, arg0, arg1);
11868 /* ... fall through ... */
11870 case LSHIFT_EXPR:
11871 shift:
11872 if (integer_zerop (arg1))
11873 return non_lvalue (fold_convert (type, arg0));
11874 if (integer_zerop (arg0))
11875 return omit_one_operand (type, arg0, arg1);
11877 /* Since negative shift count is not well-defined,
11878 don't try to compute it in the compiler. */
11879 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11880 return NULL_TREE;
11882 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11883 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11884 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11885 && host_integerp (TREE_OPERAND (arg0, 1), false)
11886 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11888 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11889 + TREE_INT_CST_LOW (arg1));
11891 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11892 being well defined. */
11893 if (low >= TYPE_PRECISION (type))
11895 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11896 low = low % TYPE_PRECISION (type);
11897 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11898 return omit_one_operand (type, build_int_cst (type, 0),
11899 TREE_OPERAND (arg0, 0));
11900 else
11901 low = TYPE_PRECISION (type) - 1;
11904 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11905 build_int_cst (type, low));
11908 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11909 into x & ((unsigned)-1 >> c) for unsigned types. */
11910 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11911 || (TYPE_UNSIGNED (type)
11912 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11913 && host_integerp (arg1, false)
11914 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11915 && host_integerp (TREE_OPERAND (arg0, 1), false)
11916 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11918 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11919 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11920 tree lshift;
11921 tree arg00;
11923 if (low0 == low1)
11925 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11927 lshift = build_int_cst (type, -1);
11928 lshift = int_const_binop (code, lshift, arg1, 0);
11930 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11934 /* Rewrite an LROTATE_EXPR by a constant into an
11935 RROTATE_EXPR by a new constant. */
11936 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11938 tree tem = build_int_cst (TREE_TYPE (arg1),
11939 TYPE_PRECISION (type));
11940 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11941 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11944 /* If we have a rotate of a bit operation with the rotate count and
11945 the second operand of the bit operation both constant,
11946 permute the two operations. */
11947 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11948 && (TREE_CODE (arg0) == BIT_AND_EXPR
11949 || TREE_CODE (arg0) == BIT_IOR_EXPR
11950 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11951 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11952 return fold_build2 (TREE_CODE (arg0), type,
11953 fold_build2 (code, type,
11954 TREE_OPERAND (arg0, 0), arg1),
11955 fold_build2 (code, type,
11956 TREE_OPERAND (arg0, 1), arg1));
11958 /* Two consecutive rotates adding up to the precision of the
11959 type can be ignored. */
11960 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11961 && TREE_CODE (arg0) == RROTATE_EXPR
11962 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11963 && TREE_INT_CST_HIGH (arg1) == 0
11964 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11965 && ((TREE_INT_CST_LOW (arg1)
11966 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11967 == (unsigned int) TYPE_PRECISION (type)))
11968 return TREE_OPERAND (arg0, 0);
11970 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11971 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11972 if the latter can be further optimized. */
11973 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11974 && TREE_CODE (arg0) == BIT_AND_EXPR
11975 && TREE_CODE (arg1) == INTEGER_CST
11976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11978 tree mask = fold_build2 (code, type,
11979 fold_convert (type, TREE_OPERAND (arg0, 1)),
11980 arg1);
11981 tree shift = fold_build2 (code, type,
11982 fold_convert (type, TREE_OPERAND (arg0, 0)),
11983 arg1);
11984 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11985 if (tem)
11986 return tem;
11989 return NULL_TREE;
11991 case MIN_EXPR:
11992 if (operand_equal_p (arg0, arg1, 0))
11993 return omit_one_operand (type, arg0, arg1);
11994 if (INTEGRAL_TYPE_P (type)
11995 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11996 return omit_one_operand (type, arg1, arg0);
11997 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11998 if (tem)
11999 return tem;
12000 goto associate;
12002 case MAX_EXPR:
12003 if (operand_equal_p (arg0, arg1, 0))
12004 return omit_one_operand (type, arg0, arg1);
12005 if (INTEGRAL_TYPE_P (type)
12006 && TYPE_MAX_VALUE (type)
12007 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12008 return omit_one_operand (type, arg1, arg0);
12009 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
12010 if (tem)
12011 return tem;
12012 goto associate;
12014 case TRUTH_ANDIF_EXPR:
12015 /* Note that the operands of this must be ints
12016 and their values must be 0 or 1.
12017 ("true" is a fixed value perhaps depending on the language.) */
12018 /* If first arg is constant zero, return it. */
12019 if (integer_zerop (arg0))
12020 return fold_convert (type, arg0);
12021 case TRUTH_AND_EXPR:
12022 /* If either arg is constant true, drop it. */
12023 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12024 return non_lvalue (fold_convert (type, arg1));
12025 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12026 /* Preserve sequence points. */
12027 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12028 return non_lvalue (fold_convert (type, arg0));
12029 /* If second arg is constant zero, result is zero, but first arg
12030 must be evaluated. */
12031 if (integer_zerop (arg1))
12032 return omit_one_operand (type, arg1, arg0);
12033 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12034 case will be handled here. */
12035 if (integer_zerop (arg0))
12036 return omit_one_operand (type, arg0, arg1);
12038 /* !X && X is always false. */
12039 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12040 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12041 return omit_one_operand (type, integer_zero_node, arg1);
12042 /* X && !X is always false. */
12043 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12044 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12045 return omit_one_operand (type, integer_zero_node, arg0);
12047 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12048 means A >= Y && A != MAX, but in this case we know that
12049 A < X <= MAX. */
12051 if (!TREE_SIDE_EFFECTS (arg0)
12052 && !TREE_SIDE_EFFECTS (arg1))
12054 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12055 if (tem && !operand_equal_p (tem, arg0, 0))
12056 return fold_build2 (code, type, tem, arg1);
12058 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12059 if (tem && !operand_equal_p (tem, arg1, 0))
12060 return fold_build2 (code, type, arg0, tem);
12063 truth_andor:
12064 /* We only do these simplifications if we are optimizing. */
12065 if (!optimize)
12066 return NULL_TREE;
12068 /* Check for things like (A || B) && (A || C). We can convert this
12069 to A || (B && C). Note that either operator can be any of the four
12070 truth and/or operations and the transformation will still be
12071 valid. Also note that we only care about order for the
12072 ANDIF and ORIF operators. If B contains side effects, this
12073 might change the truth-value of A. */
12074 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12075 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12076 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12077 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12078 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12079 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12081 tree a00 = TREE_OPERAND (arg0, 0);
12082 tree a01 = TREE_OPERAND (arg0, 1);
12083 tree a10 = TREE_OPERAND (arg1, 0);
12084 tree a11 = TREE_OPERAND (arg1, 1);
12085 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12086 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12087 && (code == TRUTH_AND_EXPR
12088 || code == TRUTH_OR_EXPR));
12090 if (operand_equal_p (a00, a10, 0))
12091 return fold_build2 (TREE_CODE (arg0), type, a00,
12092 fold_build2 (code, type, a01, a11));
12093 else if (commutative && operand_equal_p (a00, a11, 0))
12094 return fold_build2 (TREE_CODE (arg0), type, a00,
12095 fold_build2 (code, type, a01, a10));
12096 else if (commutative && operand_equal_p (a01, a10, 0))
12097 return fold_build2 (TREE_CODE (arg0), type, a01,
12098 fold_build2 (code, type, a00, a11));
12100 /* This case if tricky because we must either have commutative
12101 operators or else A10 must not have side-effects. */
12103 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12104 && operand_equal_p (a01, a11, 0))
12105 return fold_build2 (TREE_CODE (arg0), type,
12106 fold_build2 (code, type, a00, a10),
12107 a01);
12110 /* See if we can build a range comparison. */
12111 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12112 return tem;
12114 /* Check for the possibility of merging component references. If our
12115 lhs is another similar operation, try to merge its rhs with our
12116 rhs. Then try to merge our lhs and rhs. */
12117 if (TREE_CODE (arg0) == code
12118 && 0 != (tem = fold_truthop (code, type,
12119 TREE_OPERAND (arg0, 1), arg1)))
12120 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12122 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12123 return tem;
12125 return NULL_TREE;
12127 case TRUTH_ORIF_EXPR:
12128 /* Note that the operands of this must be ints
12129 and their values must be 0 or true.
12130 ("true" is a fixed value perhaps depending on the language.) */
12131 /* If first arg is constant true, return it. */
12132 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12133 return fold_convert (type, arg0);
12134 case TRUTH_OR_EXPR:
12135 /* If either arg is constant zero, drop it. */
12136 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12137 return non_lvalue (fold_convert (type, arg1));
12138 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12139 /* Preserve sequence points. */
12140 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12141 return non_lvalue (fold_convert (type, arg0));
12142 /* If second arg is constant true, result is true, but we must
12143 evaluate first arg. */
12144 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12145 return omit_one_operand (type, arg1, arg0);
12146 /* Likewise for first arg, but note this only occurs here for
12147 TRUTH_OR_EXPR. */
12148 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12149 return omit_one_operand (type, arg0, arg1);
12151 /* !X || X is always true. */
12152 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12154 return omit_one_operand (type, integer_one_node, arg1);
12155 /* X || !X is always true. */
12156 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12157 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12158 return omit_one_operand (type, integer_one_node, arg0);
12160 goto truth_andor;
12162 case TRUTH_XOR_EXPR:
12163 /* If the second arg is constant zero, drop it. */
12164 if (integer_zerop (arg1))
12165 return non_lvalue (fold_convert (type, arg0));
12166 /* If the second arg is constant true, this is a logical inversion. */
12167 if (integer_onep (arg1))
12169 /* Only call invert_truthvalue if operand is a truth value. */
12170 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12171 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12172 else
12173 tem = invert_truthvalue (arg0);
12174 return non_lvalue (fold_convert (type, tem));
12176 /* Identical arguments cancel to zero. */
12177 if (operand_equal_p (arg0, arg1, 0))
12178 return omit_one_operand (type, integer_zero_node, arg0);
12180 /* !X ^ X is always true. */
12181 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12182 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12183 return omit_one_operand (type, integer_one_node, arg1);
12185 /* X ^ !X is always true. */
12186 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12187 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12188 return omit_one_operand (type, integer_one_node, arg0);
12190 return NULL_TREE;
12192 case EQ_EXPR:
12193 case NE_EXPR:
12194 tem = fold_comparison (code, type, op0, op1);
12195 if (tem != NULL_TREE)
12196 return tem;
12198 /* bool_var != 0 becomes bool_var. */
12199 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12200 && code == NE_EXPR)
12201 return non_lvalue (fold_convert (type, arg0));
12203 /* bool_var == 1 becomes bool_var. */
12204 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12205 && code == EQ_EXPR)
12206 return non_lvalue (fold_convert (type, arg0));
12208 /* bool_var != 1 becomes !bool_var. */
12209 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12210 && code == NE_EXPR)
12211 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12213 /* bool_var == 0 becomes !bool_var. */
12214 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12215 && code == EQ_EXPR)
12216 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12218 /* If this is an equality comparison of the address of two non-weak,
12219 unaliased symbols neither of which are extern (since we do not
12220 have access to attributes for externs), then we know the result. */
12221 if (TREE_CODE (arg0) == ADDR_EXPR
12222 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12223 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12224 && ! lookup_attribute ("alias",
12225 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12226 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12227 && TREE_CODE (arg1) == ADDR_EXPR
12228 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12229 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12230 && ! lookup_attribute ("alias",
12231 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12232 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12234 /* We know that we're looking at the address of two
12235 non-weak, unaliased, static _DECL nodes.
12237 It is both wasteful and incorrect to call operand_equal_p
12238 to compare the two ADDR_EXPR nodes. It is wasteful in that
12239 all we need to do is test pointer equality for the arguments
12240 to the two ADDR_EXPR nodes. It is incorrect to use
12241 operand_equal_p as that function is NOT equivalent to a
12242 C equality test. It can in fact return false for two
12243 objects which would test as equal using the C equality
12244 operator. */
12245 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12246 return constant_boolean_node (equal
12247 ? code == EQ_EXPR : code != EQ_EXPR,
12248 type);
12251 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12252 a MINUS_EXPR of a constant, we can convert it into a comparison with
12253 a revised constant as long as no overflow occurs. */
12254 if (TREE_CODE (arg1) == INTEGER_CST
12255 && (TREE_CODE (arg0) == PLUS_EXPR
12256 || TREE_CODE (arg0) == MINUS_EXPR)
12257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12258 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12259 ? MINUS_EXPR : PLUS_EXPR,
12260 fold_convert (TREE_TYPE (arg0), arg1),
12261 TREE_OPERAND (arg0, 1), 0))
12262 && !TREE_OVERFLOW (tem))
12263 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12265 /* Similarly for a NEGATE_EXPR. */
12266 if (TREE_CODE (arg0) == NEGATE_EXPR
12267 && TREE_CODE (arg1) == INTEGER_CST
12268 && 0 != (tem = negate_expr (arg1))
12269 && TREE_CODE (tem) == INTEGER_CST
12270 && !TREE_OVERFLOW (tem))
12271 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12273 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12274 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12275 && TREE_CODE (arg1) == INTEGER_CST
12276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12277 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12278 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12279 fold_convert (TREE_TYPE (arg0), arg1),
12280 TREE_OPERAND (arg0, 1)));
12282 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12283 if ((TREE_CODE (arg0) == PLUS_EXPR
12284 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12285 || TREE_CODE (arg0) == MINUS_EXPR)
12286 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12287 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12288 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12290 tree val = TREE_OPERAND (arg0, 1);
12291 return omit_two_operands (type,
12292 fold_build2 (code, type,
12293 val,
12294 build_int_cst (TREE_TYPE (val),
12295 0)),
12296 TREE_OPERAND (arg0, 0), arg1);
12299 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12300 if (TREE_CODE (arg0) == MINUS_EXPR
12301 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12302 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12303 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12305 return omit_two_operands (type,
12306 code == NE_EXPR
12307 ? boolean_true_node : boolean_false_node,
12308 TREE_OPERAND (arg0, 1), arg1);
12311 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12312 for !=. Don't do this for ordered comparisons due to overflow. */
12313 if (TREE_CODE (arg0) == MINUS_EXPR
12314 && integer_zerop (arg1))
12315 return fold_build2 (code, type,
12316 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12318 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12319 if (TREE_CODE (arg0) == ABS_EXPR
12320 && (integer_zerop (arg1) || real_zerop (arg1)))
12321 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12323 /* If this is an EQ or NE comparison with zero and ARG0 is
12324 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12325 two operations, but the latter can be done in one less insn
12326 on machines that have only two-operand insns or on which a
12327 constant cannot be the first operand. */
12328 if (TREE_CODE (arg0) == BIT_AND_EXPR
12329 && integer_zerop (arg1))
12331 tree arg00 = TREE_OPERAND (arg0, 0);
12332 tree arg01 = TREE_OPERAND (arg0, 1);
12333 if (TREE_CODE (arg00) == LSHIFT_EXPR
12334 && integer_onep (TREE_OPERAND (arg00, 0)))
12336 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12337 arg01, TREE_OPERAND (arg00, 1));
12338 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12339 build_int_cst (TREE_TYPE (arg0), 1));
12340 return fold_build2 (code, type,
12341 fold_convert (TREE_TYPE (arg1), tem), arg1);
12343 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12344 && integer_onep (TREE_OPERAND (arg01, 0)))
12346 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12347 arg00, TREE_OPERAND (arg01, 1));
12348 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12349 build_int_cst (TREE_TYPE (arg0), 1));
12350 return fold_build2 (code, type,
12351 fold_convert (TREE_TYPE (arg1), tem), arg1);
12355 /* If this is an NE or EQ comparison of zero against the result of a
12356 signed MOD operation whose second operand is a power of 2, make
12357 the MOD operation unsigned since it is simpler and equivalent. */
12358 if (integer_zerop (arg1)
12359 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12360 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12361 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12362 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12363 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12364 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12366 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12367 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12368 fold_convert (newtype,
12369 TREE_OPERAND (arg0, 0)),
12370 fold_convert (newtype,
12371 TREE_OPERAND (arg0, 1)));
12373 return fold_build2 (code, type, newmod,
12374 fold_convert (newtype, arg1));
12377 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12378 C1 is a valid shift constant, and C2 is a power of two, i.e.
12379 a single bit. */
12380 if (TREE_CODE (arg0) == BIT_AND_EXPR
12381 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12382 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12383 == INTEGER_CST
12384 && integer_pow2p (TREE_OPERAND (arg0, 1))
12385 && integer_zerop (arg1))
12387 tree itype = TREE_TYPE (arg0);
12388 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12389 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12391 /* Check for a valid shift count. */
12392 if (TREE_INT_CST_HIGH (arg001) == 0
12393 && TREE_INT_CST_LOW (arg001) < prec)
12395 tree arg01 = TREE_OPERAND (arg0, 1);
12396 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12397 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12398 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12399 can be rewritten as (X & (C2 << C1)) != 0. */
12400 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12402 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12403 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12404 return fold_build2 (code, type, tem, arg1);
12406 /* Otherwise, for signed (arithmetic) shifts,
12407 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12408 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12409 else if (!TYPE_UNSIGNED (itype))
12410 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12411 arg000, build_int_cst (itype, 0));
12412 /* Otherwise, of unsigned (logical) shifts,
12413 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12414 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12415 else
12416 return omit_one_operand (type,
12417 code == EQ_EXPR ? integer_one_node
12418 : integer_zero_node,
12419 arg000);
12423 /* If this is an NE comparison of zero with an AND of one, remove the
12424 comparison since the AND will give the correct value. */
12425 if (code == NE_EXPR
12426 && integer_zerop (arg1)
12427 && TREE_CODE (arg0) == BIT_AND_EXPR
12428 && integer_onep (TREE_OPERAND (arg0, 1)))
12429 return fold_convert (type, arg0);
12431 /* If we have (A & C) == C where C is a power of 2, convert this into
12432 (A & C) != 0. Similarly for NE_EXPR. */
12433 if (TREE_CODE (arg0) == BIT_AND_EXPR
12434 && integer_pow2p (TREE_OPERAND (arg0, 1))
12435 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12436 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12437 arg0, fold_convert (TREE_TYPE (arg0),
12438 integer_zero_node));
12440 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12441 bit, then fold the expression into A < 0 or A >= 0. */
12442 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12443 if (tem)
12444 return tem;
12446 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12447 Similarly for NE_EXPR. */
12448 if (TREE_CODE (arg0) == BIT_AND_EXPR
12449 && TREE_CODE (arg1) == INTEGER_CST
12450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12452 tree notc = fold_build1 (BIT_NOT_EXPR,
12453 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12454 TREE_OPERAND (arg0, 1));
12455 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12456 arg1, notc);
12457 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12458 if (integer_nonzerop (dandnotc))
12459 return omit_one_operand (type, rslt, arg0);
12462 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12463 Similarly for NE_EXPR. */
12464 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12465 && TREE_CODE (arg1) == INTEGER_CST
12466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12468 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12469 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12470 TREE_OPERAND (arg0, 1), notd);
12471 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12472 if (integer_nonzerop (candnotd))
12473 return omit_one_operand (type, rslt, arg0);
12476 /* If this is a comparison of a field, we may be able to simplify it. */
12477 if ((TREE_CODE (arg0) == COMPONENT_REF
12478 || TREE_CODE (arg0) == BIT_FIELD_REF)
12479 /* Handle the constant case even without -O
12480 to make sure the warnings are given. */
12481 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12483 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12484 if (t1)
12485 return t1;
12488 /* Optimize comparisons of strlen vs zero to a compare of the
12489 first character of the string vs zero. To wit,
12490 strlen(ptr) == 0 => *ptr == 0
12491 strlen(ptr) != 0 => *ptr != 0
12492 Other cases should reduce to one of these two (or a constant)
12493 due to the return value of strlen being unsigned. */
12494 if (TREE_CODE (arg0) == CALL_EXPR
12495 && integer_zerop (arg1))
12497 tree fndecl = get_callee_fndecl (arg0);
12499 if (fndecl
12500 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12501 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12502 && call_expr_nargs (arg0) == 1
12503 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12505 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12506 return fold_build2 (code, type, iref,
12507 build_int_cst (TREE_TYPE (iref), 0));
12511 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12512 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12513 if (TREE_CODE (arg0) == RSHIFT_EXPR
12514 && integer_zerop (arg1)
12515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12517 tree arg00 = TREE_OPERAND (arg0, 0);
12518 tree arg01 = TREE_OPERAND (arg0, 1);
12519 tree itype = TREE_TYPE (arg00);
12520 if (TREE_INT_CST_HIGH (arg01) == 0
12521 && TREE_INT_CST_LOW (arg01)
12522 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12524 if (TYPE_UNSIGNED (itype))
12526 itype = signed_type_for (itype);
12527 arg00 = fold_convert (itype, arg00);
12529 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12530 type, arg00, build_int_cst (itype, 0));
12534 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12535 if (integer_zerop (arg1)
12536 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12537 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12538 TREE_OPERAND (arg0, 1));
12540 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12541 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12542 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12543 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12544 build_int_cst (TREE_TYPE (arg1), 0));
12545 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12546 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12547 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12548 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12549 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12550 build_int_cst (TREE_TYPE (arg1), 0));
12552 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12553 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12554 && TREE_CODE (arg1) == INTEGER_CST
12555 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12556 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12557 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12558 TREE_OPERAND (arg0, 1), arg1));
12560 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12561 (X & C) == 0 when C is a single bit. */
12562 if (TREE_CODE (arg0) == BIT_AND_EXPR
12563 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12564 && integer_zerop (arg1)
12565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12567 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12568 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12569 TREE_OPERAND (arg0, 1));
12570 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12571 type, tem, arg1);
12574 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12575 constant C is a power of two, i.e. a single bit. */
12576 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12578 && integer_zerop (arg1)
12579 && integer_pow2p (TREE_OPERAND (arg0, 1))
12580 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12581 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12583 tree arg00 = TREE_OPERAND (arg0, 0);
12584 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12585 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12588 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12589 when is C is a power of two, i.e. a single bit. */
12590 if (TREE_CODE (arg0) == BIT_AND_EXPR
12591 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12592 && integer_zerop (arg1)
12593 && integer_pow2p (TREE_OPERAND (arg0, 1))
12594 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12595 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12597 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12598 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12599 arg000, TREE_OPERAND (arg0, 1));
12600 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12601 tem, build_int_cst (TREE_TYPE (tem), 0));
12604 if (integer_zerop (arg1)
12605 && tree_expr_nonzero_p (arg0))
12607 tree res = constant_boolean_node (code==NE_EXPR, type);
12608 return omit_one_operand (type, res, arg0);
12611 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12612 if (TREE_CODE (arg0) == NEGATE_EXPR
12613 && TREE_CODE (arg1) == NEGATE_EXPR)
12614 return fold_build2 (code, type,
12615 TREE_OPERAND (arg0, 0),
12616 TREE_OPERAND (arg1, 0));
12618 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12619 if (TREE_CODE (arg0) == BIT_AND_EXPR
12620 && TREE_CODE (arg1) == BIT_AND_EXPR)
12622 tree arg00 = TREE_OPERAND (arg0, 0);
12623 tree arg01 = TREE_OPERAND (arg0, 1);
12624 tree arg10 = TREE_OPERAND (arg1, 0);
12625 tree arg11 = TREE_OPERAND (arg1, 1);
12626 tree itype = TREE_TYPE (arg0);
12628 if (operand_equal_p (arg01, arg11, 0))
12629 return fold_build2 (code, type,
12630 fold_build2 (BIT_AND_EXPR, itype,
12631 fold_build2 (BIT_XOR_EXPR, itype,
12632 arg00, arg10),
12633 arg01),
12634 build_int_cst (itype, 0));
12636 if (operand_equal_p (arg01, arg10, 0))
12637 return fold_build2 (code, type,
12638 fold_build2 (BIT_AND_EXPR, itype,
12639 fold_build2 (BIT_XOR_EXPR, itype,
12640 arg00, arg11),
12641 arg01),
12642 build_int_cst (itype, 0));
12644 if (operand_equal_p (arg00, arg11, 0))
12645 return fold_build2 (code, type,
12646 fold_build2 (BIT_AND_EXPR, itype,
12647 fold_build2 (BIT_XOR_EXPR, itype,
12648 arg01, arg10),
12649 arg00),
12650 build_int_cst (itype, 0));
12652 if (operand_equal_p (arg00, arg10, 0))
12653 return fold_build2 (code, type,
12654 fold_build2 (BIT_AND_EXPR, itype,
12655 fold_build2 (BIT_XOR_EXPR, itype,
12656 arg01, arg11),
12657 arg00),
12658 build_int_cst (itype, 0));
12661 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12662 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12664 tree arg00 = TREE_OPERAND (arg0, 0);
12665 tree arg01 = TREE_OPERAND (arg0, 1);
12666 tree arg10 = TREE_OPERAND (arg1, 0);
12667 tree arg11 = TREE_OPERAND (arg1, 1);
12668 tree itype = TREE_TYPE (arg0);
12670 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12671 operand_equal_p guarantees no side-effects so we don't need
12672 to use omit_one_operand on Z. */
12673 if (operand_equal_p (arg01, arg11, 0))
12674 return fold_build2 (code, type, arg00, arg10);
12675 if (operand_equal_p (arg01, arg10, 0))
12676 return fold_build2 (code, type, arg00, arg11);
12677 if (operand_equal_p (arg00, arg11, 0))
12678 return fold_build2 (code, type, arg01, arg10);
12679 if (operand_equal_p (arg00, arg10, 0))
12680 return fold_build2 (code, type, arg01, arg11);
12682 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12683 if (TREE_CODE (arg01) == INTEGER_CST
12684 && TREE_CODE (arg11) == INTEGER_CST)
12685 return fold_build2 (code, type,
12686 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12687 fold_build2 (BIT_XOR_EXPR, itype,
12688 arg01, arg11)),
12689 arg10);
12692 /* Attempt to simplify equality/inequality comparisons of complex
12693 values. Only lower the comparison if the result is known or
12694 can be simplified to a single scalar comparison. */
12695 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12696 || TREE_CODE (arg0) == COMPLEX_CST)
12697 && (TREE_CODE (arg1) == COMPLEX_EXPR
12698 || TREE_CODE (arg1) == COMPLEX_CST))
12700 tree real0, imag0, real1, imag1;
12701 tree rcond, icond;
12703 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12705 real0 = TREE_OPERAND (arg0, 0);
12706 imag0 = TREE_OPERAND (arg0, 1);
12708 else
12710 real0 = TREE_REALPART (arg0);
12711 imag0 = TREE_IMAGPART (arg0);
12714 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12716 real1 = TREE_OPERAND (arg1, 0);
12717 imag1 = TREE_OPERAND (arg1, 1);
12719 else
12721 real1 = TREE_REALPART (arg1);
12722 imag1 = TREE_IMAGPART (arg1);
12725 rcond = fold_binary (code, type, real0, real1);
12726 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12728 if (integer_zerop (rcond))
12730 if (code == EQ_EXPR)
12731 return omit_two_operands (type, boolean_false_node,
12732 imag0, imag1);
12733 return fold_build2 (NE_EXPR, type, imag0, imag1);
12735 else
12737 if (code == NE_EXPR)
12738 return omit_two_operands (type, boolean_true_node,
12739 imag0, imag1);
12740 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12744 icond = fold_binary (code, type, imag0, imag1);
12745 if (icond && TREE_CODE (icond) == INTEGER_CST)
12747 if (integer_zerop (icond))
12749 if (code == EQ_EXPR)
12750 return omit_two_operands (type, boolean_false_node,
12751 real0, real1);
12752 return fold_build2 (NE_EXPR, type, real0, real1);
12754 else
12756 if (code == NE_EXPR)
12757 return omit_two_operands (type, boolean_true_node,
12758 real0, real1);
12759 return fold_build2 (EQ_EXPR, type, real0, real1);
12764 return NULL_TREE;
12766 case LT_EXPR:
12767 case GT_EXPR:
12768 case LE_EXPR:
12769 case GE_EXPR:
12770 tem = fold_comparison (code, type, op0, op1);
12771 if (tem != NULL_TREE)
12772 return tem;
12774 /* Transform comparisons of the form X +- C CMP X. */
12775 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12776 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12777 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12778 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12779 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12780 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12782 tree arg01 = TREE_OPERAND (arg0, 1);
12783 enum tree_code code0 = TREE_CODE (arg0);
12784 int is_positive;
12786 if (TREE_CODE (arg01) == REAL_CST)
12787 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12788 else
12789 is_positive = tree_int_cst_sgn (arg01);
12791 /* (X - c) > X becomes false. */
12792 if (code == GT_EXPR
12793 && ((code0 == MINUS_EXPR && is_positive >= 0)
12794 || (code0 == PLUS_EXPR && is_positive <= 0)))
12796 if (TREE_CODE (arg01) == INTEGER_CST
12797 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12798 fold_overflow_warning (("assuming signed overflow does not "
12799 "occur when assuming that (X - c) > X "
12800 "is always false"),
12801 WARN_STRICT_OVERFLOW_ALL);
12802 return constant_boolean_node (0, type);
12805 /* Likewise (X + c) < X becomes false. */
12806 if (code == LT_EXPR
12807 && ((code0 == PLUS_EXPR && is_positive >= 0)
12808 || (code0 == MINUS_EXPR && is_positive <= 0)))
12810 if (TREE_CODE (arg01) == INTEGER_CST
12811 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12812 fold_overflow_warning (("assuming signed overflow does not "
12813 "occur when assuming that "
12814 "(X + c) < X is always false"),
12815 WARN_STRICT_OVERFLOW_ALL);
12816 return constant_boolean_node (0, type);
12819 /* Convert (X - c) <= X to true. */
12820 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12821 && code == LE_EXPR
12822 && ((code0 == MINUS_EXPR && is_positive >= 0)
12823 || (code0 == PLUS_EXPR && is_positive <= 0)))
12825 if (TREE_CODE (arg01) == INTEGER_CST
12826 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12827 fold_overflow_warning (("assuming signed overflow does not "
12828 "occur when assuming that "
12829 "(X - c) <= X is always true"),
12830 WARN_STRICT_OVERFLOW_ALL);
12831 return constant_boolean_node (1, type);
12834 /* Convert (X + c) >= X to true. */
12835 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12836 && code == GE_EXPR
12837 && ((code0 == PLUS_EXPR && is_positive >= 0)
12838 || (code0 == MINUS_EXPR && is_positive <= 0)))
12840 if (TREE_CODE (arg01) == INTEGER_CST
12841 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12842 fold_overflow_warning (("assuming signed overflow does not "
12843 "occur when assuming that "
12844 "(X + c) >= X is always true"),
12845 WARN_STRICT_OVERFLOW_ALL);
12846 return constant_boolean_node (1, type);
12849 if (TREE_CODE (arg01) == INTEGER_CST)
12851 /* Convert X + c > X and X - c < X to true for integers. */
12852 if (code == GT_EXPR
12853 && ((code0 == PLUS_EXPR && is_positive > 0)
12854 || (code0 == MINUS_EXPR && is_positive < 0)))
12856 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12857 fold_overflow_warning (("assuming signed overflow does "
12858 "not occur when assuming that "
12859 "(X + c) > X is always true"),
12860 WARN_STRICT_OVERFLOW_ALL);
12861 return constant_boolean_node (1, type);
12864 if (code == LT_EXPR
12865 && ((code0 == MINUS_EXPR && is_positive > 0)
12866 || (code0 == PLUS_EXPR && is_positive < 0)))
12868 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12869 fold_overflow_warning (("assuming signed overflow does "
12870 "not occur when assuming that "
12871 "(X - c) < X is always true"),
12872 WARN_STRICT_OVERFLOW_ALL);
12873 return constant_boolean_node (1, type);
12876 /* Convert X + c <= X and X - c >= X to false for integers. */
12877 if (code == LE_EXPR
12878 && ((code0 == PLUS_EXPR && is_positive > 0)
12879 || (code0 == MINUS_EXPR && is_positive < 0)))
12881 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does "
12883 "not occur when assuming that "
12884 "(X + c) <= X is always false"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (0, type);
12889 if (code == GE_EXPR
12890 && ((code0 == MINUS_EXPR && is_positive > 0)
12891 || (code0 == PLUS_EXPR && is_positive < 0)))
12893 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12894 fold_overflow_warning (("assuming signed overflow does "
12895 "not occur when assuming that "
12896 "(X - c) >= X is always false"),
12897 WARN_STRICT_OVERFLOW_ALL);
12898 return constant_boolean_node (0, type);
12903 /* Comparisons with the highest or lowest possible integer of
12904 the specified precision will have known values. */
12906 tree arg1_type = TREE_TYPE (arg1);
12907 unsigned int width = TYPE_PRECISION (arg1_type);
12909 if (TREE_CODE (arg1) == INTEGER_CST
12910 && width <= 2 * HOST_BITS_PER_WIDE_INT
12911 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12913 HOST_WIDE_INT signed_max_hi;
12914 unsigned HOST_WIDE_INT signed_max_lo;
12915 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12917 if (width <= HOST_BITS_PER_WIDE_INT)
12919 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12920 - 1;
12921 signed_max_hi = 0;
12922 max_hi = 0;
12924 if (TYPE_UNSIGNED (arg1_type))
12926 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12927 min_lo = 0;
12928 min_hi = 0;
12930 else
12932 max_lo = signed_max_lo;
12933 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12934 min_hi = -1;
12937 else
12939 width -= HOST_BITS_PER_WIDE_INT;
12940 signed_max_lo = -1;
12941 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12942 - 1;
12943 max_lo = -1;
12944 min_lo = 0;
12946 if (TYPE_UNSIGNED (arg1_type))
12948 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12949 min_hi = 0;
12951 else
12953 max_hi = signed_max_hi;
12954 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12958 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12959 && TREE_INT_CST_LOW (arg1) == max_lo)
12960 switch (code)
12962 case GT_EXPR:
12963 return omit_one_operand (type, integer_zero_node, arg0);
12965 case GE_EXPR:
12966 return fold_build2 (EQ_EXPR, type, op0, op1);
12968 case LE_EXPR:
12969 return omit_one_operand (type, integer_one_node, arg0);
12971 case LT_EXPR:
12972 return fold_build2 (NE_EXPR, type, op0, op1);
12974 /* The GE_EXPR and LT_EXPR cases above are not normally
12975 reached because of previous transformations. */
12977 default:
12978 break;
12980 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12981 == max_hi
12982 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12983 switch (code)
12985 case GT_EXPR:
12986 arg1 = const_binop (PLUS_EXPR, arg1,
12987 build_int_cst (TREE_TYPE (arg1), 1), 0);
12988 return fold_build2 (EQ_EXPR, type,
12989 fold_convert (TREE_TYPE (arg1), arg0),
12990 arg1);
12991 case LE_EXPR:
12992 arg1 = const_binop (PLUS_EXPR, arg1,
12993 build_int_cst (TREE_TYPE (arg1), 1), 0);
12994 return fold_build2 (NE_EXPR, type,
12995 fold_convert (TREE_TYPE (arg1), arg0),
12996 arg1);
12997 default:
12998 break;
13000 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13001 == min_hi
13002 && TREE_INT_CST_LOW (arg1) == min_lo)
13003 switch (code)
13005 case LT_EXPR:
13006 return omit_one_operand (type, integer_zero_node, arg0);
13008 case LE_EXPR:
13009 return fold_build2 (EQ_EXPR, type, op0, op1);
13011 case GE_EXPR:
13012 return omit_one_operand (type, integer_one_node, arg0);
13014 case GT_EXPR:
13015 return fold_build2 (NE_EXPR, type, op0, op1);
13017 default:
13018 break;
13020 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13021 == min_hi
13022 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13023 switch (code)
13025 case GE_EXPR:
13026 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13027 return fold_build2 (NE_EXPR, type,
13028 fold_convert (TREE_TYPE (arg1), arg0),
13029 arg1);
13030 case LT_EXPR:
13031 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13032 return fold_build2 (EQ_EXPR, type,
13033 fold_convert (TREE_TYPE (arg1), arg0),
13034 arg1);
13035 default:
13036 break;
13039 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13040 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13041 && TYPE_UNSIGNED (arg1_type)
13042 /* We will flip the signedness of the comparison operator
13043 associated with the mode of arg1, so the sign bit is
13044 specified by this mode. Check that arg1 is the signed
13045 max associated with this sign bit. */
13046 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13047 /* signed_type does not work on pointer types. */
13048 && INTEGRAL_TYPE_P (arg1_type))
13050 /* The following case also applies to X < signed_max+1
13051 and X >= signed_max+1 because previous transformations. */
13052 if (code == LE_EXPR || code == GT_EXPR)
13054 tree st;
13055 st = signed_type_for (TREE_TYPE (arg1));
13056 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13057 type, fold_convert (st, arg0),
13058 build_int_cst (st, 0));
13064 /* If we are comparing an ABS_EXPR with a constant, we can
13065 convert all the cases into explicit comparisons, but they may
13066 well not be faster than doing the ABS and one comparison.
13067 But ABS (X) <= C is a range comparison, which becomes a subtraction
13068 and a comparison, and is probably faster. */
13069 if (code == LE_EXPR
13070 && TREE_CODE (arg1) == INTEGER_CST
13071 && TREE_CODE (arg0) == ABS_EXPR
13072 && ! TREE_SIDE_EFFECTS (arg0)
13073 && (0 != (tem = negate_expr (arg1)))
13074 && TREE_CODE (tem) == INTEGER_CST
13075 && !TREE_OVERFLOW (tem))
13076 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13077 build2 (GE_EXPR, type,
13078 TREE_OPERAND (arg0, 0), tem),
13079 build2 (LE_EXPR, type,
13080 TREE_OPERAND (arg0, 0), arg1));
13082 /* Convert ABS_EXPR<x> >= 0 to true. */
13083 strict_overflow_p = false;
13084 if (code == GE_EXPR
13085 && (integer_zerop (arg1)
13086 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13087 && real_zerop (arg1)))
13088 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13090 if (strict_overflow_p)
13091 fold_overflow_warning (("assuming signed overflow does not occur "
13092 "when simplifying comparison of "
13093 "absolute value and zero"),
13094 WARN_STRICT_OVERFLOW_CONDITIONAL);
13095 return omit_one_operand (type, integer_one_node, arg0);
13098 /* Convert ABS_EXPR<x> < 0 to false. */
13099 strict_overflow_p = false;
13100 if (code == LT_EXPR
13101 && (integer_zerop (arg1) || real_zerop (arg1))
13102 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13104 if (strict_overflow_p)
13105 fold_overflow_warning (("assuming signed overflow does not occur "
13106 "when simplifying comparison of "
13107 "absolute value and zero"),
13108 WARN_STRICT_OVERFLOW_CONDITIONAL);
13109 return omit_one_operand (type, integer_zero_node, arg0);
13112 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13113 and similarly for >= into !=. */
13114 if ((code == LT_EXPR || code == GE_EXPR)
13115 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13116 && TREE_CODE (arg1) == LSHIFT_EXPR
13117 && integer_onep (TREE_OPERAND (arg1, 0)))
13118 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13119 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13120 TREE_OPERAND (arg1, 1)),
13121 build_int_cst (TREE_TYPE (arg0), 0));
13123 if ((code == LT_EXPR || code == GE_EXPR)
13124 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13125 && CONVERT_EXPR_P (arg1)
13126 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13127 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13128 return
13129 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13130 fold_convert (TREE_TYPE (arg0),
13131 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13132 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13133 1))),
13134 build_int_cst (TREE_TYPE (arg0), 0));
13136 return NULL_TREE;
13138 case UNORDERED_EXPR:
13139 case ORDERED_EXPR:
13140 case UNLT_EXPR:
13141 case UNLE_EXPR:
13142 case UNGT_EXPR:
13143 case UNGE_EXPR:
13144 case UNEQ_EXPR:
13145 case LTGT_EXPR:
13146 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13148 t1 = fold_relational_const (code, type, arg0, arg1);
13149 if (t1 != NULL_TREE)
13150 return t1;
13153 /* If the first operand is NaN, the result is constant. */
13154 if (TREE_CODE (arg0) == REAL_CST
13155 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13156 && (code != LTGT_EXPR || ! flag_trapping_math))
13158 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13159 ? integer_zero_node
13160 : integer_one_node;
13161 return omit_one_operand (type, t1, arg1);
13164 /* If the second operand is NaN, the result is constant. */
13165 if (TREE_CODE (arg1) == REAL_CST
13166 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13167 && (code != LTGT_EXPR || ! flag_trapping_math))
13169 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13170 ? integer_zero_node
13171 : integer_one_node;
13172 return omit_one_operand (type, t1, arg0);
13175 /* Simplify unordered comparison of something with itself. */
13176 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13177 && operand_equal_p (arg0, arg1, 0))
13178 return constant_boolean_node (1, type);
13180 if (code == LTGT_EXPR
13181 && !flag_trapping_math
13182 && operand_equal_p (arg0, arg1, 0))
13183 return constant_boolean_node (0, type);
13185 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13187 tree targ0 = strip_float_extensions (arg0);
13188 tree targ1 = strip_float_extensions (arg1);
13189 tree newtype = TREE_TYPE (targ0);
13191 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13192 newtype = TREE_TYPE (targ1);
13194 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13195 return fold_build2 (code, type, fold_convert (newtype, targ0),
13196 fold_convert (newtype, targ1));
13199 return NULL_TREE;
13201 case COMPOUND_EXPR:
13202 /* When pedantic, a compound expression can be neither an lvalue
13203 nor an integer constant expression. */
13204 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13205 return NULL_TREE;
13206 /* Don't let (0, 0) be null pointer constant. */
13207 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13208 : fold_convert (type, arg1);
13209 return pedantic_non_lvalue (tem);
13211 case COMPLEX_EXPR:
13212 if ((TREE_CODE (arg0) == REAL_CST
13213 && TREE_CODE (arg1) == REAL_CST)
13214 || (TREE_CODE (arg0) == INTEGER_CST
13215 && TREE_CODE (arg1) == INTEGER_CST))
13216 return build_complex (type, arg0, arg1);
13217 return NULL_TREE;
13219 case ASSERT_EXPR:
13220 /* An ASSERT_EXPR should never be passed to fold_binary. */
13221 gcc_unreachable ();
13223 default:
13224 return NULL_TREE;
13225 } /* switch (code) */
13228 /* Callback for walk_tree, looking for LABEL_EXPR.
13229 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13230 Do not check the sub-tree of GOTO_EXPR. */
13232 static tree
13233 contains_label_1 (tree *tp,
13234 int *walk_subtrees,
13235 void *data ATTRIBUTE_UNUSED)
13237 switch (TREE_CODE (*tp))
13239 case LABEL_EXPR:
13240 return *tp;
13241 case GOTO_EXPR:
13242 *walk_subtrees = 0;
13243 /* no break */
13244 default:
13245 return NULL_TREE;
13249 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13250 accessible from outside the sub-tree. Returns NULL_TREE if no
13251 addressable label is found. */
13253 static bool
13254 contains_label_p (tree st)
13256 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13259 /* Fold a ternary expression of code CODE and type TYPE with operands
13260 OP0, OP1, and OP2. Return the folded expression if folding is
13261 successful. Otherwise, return NULL_TREE. */
13263 tree
13264 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13266 tree tem;
13267 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13268 enum tree_code_class kind = TREE_CODE_CLASS (code);
13270 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13271 && TREE_CODE_LENGTH (code) == 3);
13273 /* Strip any conversions that don't change the mode. This is safe
13274 for every expression, except for a comparison expression because
13275 its signedness is derived from its operands. So, in the latter
13276 case, only strip conversions that don't change the signedness.
13278 Note that this is done as an internal manipulation within the
13279 constant folder, in order to find the simplest representation of
13280 the arguments so that their form can be studied. In any cases,
13281 the appropriate type conversions should be put back in the tree
13282 that will get out of the constant folder. */
13283 if (op0)
13285 arg0 = op0;
13286 STRIP_NOPS (arg0);
13289 if (op1)
13291 arg1 = op1;
13292 STRIP_NOPS (arg1);
13295 switch (code)
13297 case COMPONENT_REF:
13298 if (TREE_CODE (arg0) == CONSTRUCTOR
13299 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13301 unsigned HOST_WIDE_INT idx;
13302 tree field, value;
13303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13304 if (field == arg1)
13305 return value;
13307 return NULL_TREE;
13309 case COND_EXPR:
13310 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13311 so all simple results must be passed through pedantic_non_lvalue. */
13312 if (TREE_CODE (arg0) == INTEGER_CST)
13314 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13315 tem = integer_zerop (arg0) ? op2 : op1;
13316 /* Only optimize constant conditions when the selected branch
13317 has the same type as the COND_EXPR. This avoids optimizing
13318 away "c ? x : throw", where the throw has a void type.
13319 Avoid throwing away that operand which contains label. */
13320 if ((!TREE_SIDE_EFFECTS (unused_op)
13321 || !contains_label_p (unused_op))
13322 && (! VOID_TYPE_P (TREE_TYPE (tem))
13323 || VOID_TYPE_P (type)))
13324 return pedantic_non_lvalue (tem);
13325 return NULL_TREE;
13327 if (operand_equal_p (arg1, op2, 0))
13328 return pedantic_omit_one_operand (type, arg1, arg0);
13330 /* If we have A op B ? A : C, we may be able to convert this to a
13331 simpler expression, depending on the operation and the values
13332 of B and C. Signed zeros prevent all of these transformations,
13333 for reasons given above each one.
13335 Also try swapping the arguments and inverting the conditional. */
13336 if (COMPARISON_CLASS_P (arg0)
13337 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13338 arg1, TREE_OPERAND (arg0, 1))
13339 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13341 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13342 if (tem)
13343 return tem;
13346 if (COMPARISON_CLASS_P (arg0)
13347 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13348 op2,
13349 TREE_OPERAND (arg0, 1))
13350 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13352 tem = fold_truth_not_expr (arg0);
13353 if (tem && COMPARISON_CLASS_P (tem))
13355 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13356 if (tem)
13357 return tem;
13361 /* If the second operand is simpler than the third, swap them
13362 since that produces better jump optimization results. */
13363 if (truth_value_p (TREE_CODE (arg0))
13364 && tree_swap_operands_p (op1, op2, false))
13366 /* See if this can be inverted. If it can't, possibly because
13367 it was a floating-point inequality comparison, don't do
13368 anything. */
13369 tem = fold_truth_not_expr (arg0);
13370 if (tem)
13371 return fold_build3 (code, type, tem, op2, op1);
13374 /* Convert A ? 1 : 0 to simply A. */
13375 if (integer_onep (op1)
13376 && integer_zerop (op2)
13377 /* If we try to convert OP0 to our type, the
13378 call to fold will try to move the conversion inside
13379 a COND, which will recurse. In that case, the COND_EXPR
13380 is probably the best choice, so leave it alone. */
13381 && type == TREE_TYPE (arg0))
13382 return pedantic_non_lvalue (arg0);
13384 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13385 over COND_EXPR in cases such as floating point comparisons. */
13386 if (integer_zerop (op1)
13387 && integer_onep (op2)
13388 && truth_value_p (TREE_CODE (arg0)))
13389 return pedantic_non_lvalue (fold_convert (type,
13390 invert_truthvalue (arg0)));
13392 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13393 if (TREE_CODE (arg0) == LT_EXPR
13394 && integer_zerop (TREE_OPERAND (arg0, 1))
13395 && integer_zerop (op2)
13396 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13398 /* sign_bit_p only checks ARG1 bits within A's precision.
13399 If <sign bit of A> has wider type than A, bits outside
13400 of A's precision in <sign bit of A> need to be checked.
13401 If they are all 0, this optimization needs to be done
13402 in unsigned A's type, if they are all 1 in signed A's type,
13403 otherwise this can't be done. */
13404 if (TYPE_PRECISION (TREE_TYPE (tem))
13405 < TYPE_PRECISION (TREE_TYPE (arg1))
13406 && TYPE_PRECISION (TREE_TYPE (tem))
13407 < TYPE_PRECISION (type))
13409 unsigned HOST_WIDE_INT mask_lo;
13410 HOST_WIDE_INT mask_hi;
13411 int inner_width, outer_width;
13412 tree tem_type;
13414 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13415 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13416 if (outer_width > TYPE_PRECISION (type))
13417 outer_width = TYPE_PRECISION (type);
13419 if (outer_width > HOST_BITS_PER_WIDE_INT)
13421 mask_hi = ((unsigned HOST_WIDE_INT) -1
13422 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13423 mask_lo = -1;
13425 else
13427 mask_hi = 0;
13428 mask_lo = ((unsigned HOST_WIDE_INT) -1
13429 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13431 if (inner_width > HOST_BITS_PER_WIDE_INT)
13433 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13434 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13435 mask_lo = 0;
13437 else
13438 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13439 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13441 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13442 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13444 tem_type = signed_type_for (TREE_TYPE (tem));
13445 tem = fold_convert (tem_type, tem);
13447 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13448 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13450 tem_type = unsigned_type_for (TREE_TYPE (tem));
13451 tem = fold_convert (tem_type, tem);
13453 else
13454 tem = NULL;
13457 if (tem)
13458 return fold_convert (type,
13459 fold_build2 (BIT_AND_EXPR,
13460 TREE_TYPE (tem), tem,
13461 fold_convert (TREE_TYPE (tem),
13462 arg1)));
13465 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13466 already handled above. */
13467 if (TREE_CODE (arg0) == BIT_AND_EXPR
13468 && integer_onep (TREE_OPERAND (arg0, 1))
13469 && integer_zerop (op2)
13470 && integer_pow2p (arg1))
13472 tree tem = TREE_OPERAND (arg0, 0);
13473 STRIP_NOPS (tem);
13474 if (TREE_CODE (tem) == RSHIFT_EXPR
13475 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13476 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13477 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13478 return fold_build2 (BIT_AND_EXPR, type,
13479 TREE_OPERAND (tem, 0), arg1);
13482 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13483 is probably obsolete because the first operand should be a
13484 truth value (that's why we have the two cases above), but let's
13485 leave it in until we can confirm this for all front-ends. */
13486 if (integer_zerop (op2)
13487 && TREE_CODE (arg0) == NE_EXPR
13488 && integer_zerop (TREE_OPERAND (arg0, 1))
13489 && integer_pow2p (arg1)
13490 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13491 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13492 arg1, OEP_ONLY_CONST))
13493 return pedantic_non_lvalue (fold_convert (type,
13494 TREE_OPERAND (arg0, 0)));
13496 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13497 if (integer_zerop (op2)
13498 && truth_value_p (TREE_CODE (arg0))
13499 && truth_value_p (TREE_CODE (arg1)))
13500 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13501 fold_convert (type, arg0),
13502 arg1);
13504 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13505 if (integer_onep (op2)
13506 && truth_value_p (TREE_CODE (arg0))
13507 && truth_value_p (TREE_CODE (arg1)))
13509 /* Only perform transformation if ARG0 is easily inverted. */
13510 tem = fold_truth_not_expr (arg0);
13511 if (tem)
13512 return fold_build2 (TRUTH_ORIF_EXPR, type,
13513 fold_convert (type, tem),
13514 arg1);
13517 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13518 if (integer_zerop (arg1)
13519 && truth_value_p (TREE_CODE (arg0))
13520 && truth_value_p (TREE_CODE (op2)))
13522 /* Only perform transformation if ARG0 is easily inverted. */
13523 tem = fold_truth_not_expr (arg0);
13524 if (tem)
13525 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13526 fold_convert (type, tem),
13527 op2);
13530 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13531 if (integer_onep (arg1)
13532 && truth_value_p (TREE_CODE (arg0))
13533 && truth_value_p (TREE_CODE (op2)))
13534 return fold_build2 (TRUTH_ORIF_EXPR, type,
13535 fold_convert (type, arg0),
13536 op2);
13538 return NULL_TREE;
13540 case CALL_EXPR:
13541 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13542 of fold_ternary on them. */
13543 gcc_unreachable ();
13545 case BIT_FIELD_REF:
13546 if ((TREE_CODE (arg0) == VECTOR_CST
13547 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13548 && type == TREE_TYPE (TREE_TYPE (arg0)))
13550 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13551 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13553 if (width != 0
13554 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13555 && (idx % width) == 0
13556 && (idx = idx / width)
13557 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13559 tree elements = NULL_TREE;
13561 if (TREE_CODE (arg0) == VECTOR_CST)
13562 elements = TREE_VECTOR_CST_ELTS (arg0);
13563 else
13565 unsigned HOST_WIDE_INT idx;
13566 tree value;
13568 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13569 elements = tree_cons (NULL_TREE, value, elements);
13571 while (idx-- > 0 && elements)
13572 elements = TREE_CHAIN (elements);
13573 if (elements)
13574 return TREE_VALUE (elements);
13575 else
13576 return fold_convert (type, integer_zero_node);
13580 /* A bit-field-ref that referenced the full argument can be stripped. */
13581 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13582 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13583 && integer_zerop (op2))
13584 return fold_convert (type, arg0);
13586 return NULL_TREE;
13588 default:
13589 return NULL_TREE;
13590 } /* switch (code) */
13593 /* Perform constant folding and related simplification of EXPR.
13594 The related simplifications include x*1 => x, x*0 => 0, etc.,
13595 and application of the associative law.
13596 NOP_EXPR conversions may be removed freely (as long as we
13597 are careful not to change the type of the overall expression).
13598 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13599 but we can constant-fold them if they have constant operands. */
13601 #ifdef ENABLE_FOLD_CHECKING
13602 # define fold(x) fold_1 (x)
13603 static tree fold_1 (tree);
13604 static
13605 #endif
13606 tree
13607 fold (tree expr)
13609 const tree t = expr;
13610 enum tree_code code = TREE_CODE (t);
13611 enum tree_code_class kind = TREE_CODE_CLASS (code);
13612 tree tem;
13614 /* Return right away if a constant. */
13615 if (kind == tcc_constant)
13616 return t;
13618 /* CALL_EXPR-like objects with variable numbers of operands are
13619 treated specially. */
13620 if (kind == tcc_vl_exp)
13622 if (code == CALL_EXPR)
13624 tem = fold_call_expr (expr, false);
13625 return tem ? tem : expr;
13627 return expr;
13630 if (IS_EXPR_CODE_CLASS (kind))
13632 tree type = TREE_TYPE (t);
13633 tree op0, op1, op2;
13635 switch (TREE_CODE_LENGTH (code))
13637 case 1:
13638 op0 = TREE_OPERAND (t, 0);
13639 tem = fold_unary (code, type, op0);
13640 return tem ? tem : expr;
13641 case 2:
13642 op0 = TREE_OPERAND (t, 0);
13643 op1 = TREE_OPERAND (t, 1);
13644 tem = fold_binary (code, type, op0, op1);
13645 return tem ? tem : expr;
13646 case 3:
13647 op0 = TREE_OPERAND (t, 0);
13648 op1 = TREE_OPERAND (t, 1);
13649 op2 = TREE_OPERAND (t, 2);
13650 tem = fold_ternary (code, type, op0, op1, op2);
13651 return tem ? tem : expr;
13652 default:
13653 break;
13657 switch (code)
13659 case ARRAY_REF:
13661 tree op0 = TREE_OPERAND (t, 0);
13662 tree op1 = TREE_OPERAND (t, 1);
13664 if (TREE_CODE (op1) == INTEGER_CST
13665 && TREE_CODE (op0) == CONSTRUCTOR
13666 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13668 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13669 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13670 unsigned HOST_WIDE_INT begin = 0;
13672 /* Find a matching index by means of a binary search. */
13673 while (begin != end)
13675 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13676 tree index = VEC_index (constructor_elt, elts, middle)->index;
13678 if (TREE_CODE (index) == INTEGER_CST
13679 && tree_int_cst_lt (index, op1))
13680 begin = middle + 1;
13681 else if (TREE_CODE (index) == INTEGER_CST
13682 && tree_int_cst_lt (op1, index))
13683 end = middle;
13684 else if (TREE_CODE (index) == RANGE_EXPR
13685 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13686 begin = middle + 1;
13687 else if (TREE_CODE (index) == RANGE_EXPR
13688 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13689 end = middle;
13690 else
13691 return VEC_index (constructor_elt, elts, middle)->value;
13695 return t;
13698 case CONST_DECL:
13699 return fold (DECL_INITIAL (t));
13701 default:
13702 return t;
13703 } /* switch (code) */
13706 #ifdef ENABLE_FOLD_CHECKING
13707 #undef fold
13709 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13710 static void fold_check_failed (const_tree, const_tree);
13711 void print_fold_checksum (const_tree);
13713 /* When --enable-checking=fold, compute a digest of expr before
13714 and after actual fold call to see if fold did not accidentally
13715 change original expr. */
13717 tree
13718 fold (tree expr)
13720 tree ret;
13721 struct md5_ctx ctx;
13722 unsigned char checksum_before[16], checksum_after[16];
13723 htab_t ht;
13725 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13726 md5_init_ctx (&ctx);
13727 fold_checksum_tree (expr, &ctx, ht);
13728 md5_finish_ctx (&ctx, checksum_before);
13729 htab_empty (ht);
13731 ret = fold_1 (expr);
13733 md5_init_ctx (&ctx);
13734 fold_checksum_tree (expr, &ctx, ht);
13735 md5_finish_ctx (&ctx, checksum_after);
13736 htab_delete (ht);
13738 if (memcmp (checksum_before, checksum_after, 16))
13739 fold_check_failed (expr, ret);
13741 return ret;
13744 void
13745 print_fold_checksum (const_tree expr)
13747 struct md5_ctx ctx;
13748 unsigned char checksum[16], cnt;
13749 htab_t ht;
13751 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13752 md5_init_ctx (&ctx);
13753 fold_checksum_tree (expr, &ctx, ht);
13754 md5_finish_ctx (&ctx, checksum);
13755 htab_delete (ht);
13756 for (cnt = 0; cnt < 16; ++cnt)
13757 fprintf (stderr, "%02x", checksum[cnt]);
13758 putc ('\n', stderr);
13761 static void
13762 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13764 internal_error ("fold check: original tree changed by fold");
13767 static void
13768 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13770 const void **slot;
13771 enum tree_code code;
13772 union tree_node buf;
13773 int i, len;
13775 recursive_label:
13777 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13778 <= sizeof (struct tree_function_decl))
13779 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13780 if (expr == NULL)
13781 return;
13782 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13783 if (*slot != NULL)
13784 return;
13785 *slot = expr;
13786 code = TREE_CODE (expr);
13787 if (TREE_CODE_CLASS (code) == tcc_declaration
13788 && DECL_ASSEMBLER_NAME_SET_P (expr))
13790 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13791 memcpy ((char *) &buf, expr, tree_size (expr));
13792 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13793 expr = (tree) &buf;
13795 else if (TREE_CODE_CLASS (code) == tcc_type
13796 && (TYPE_POINTER_TO (expr)
13797 || TYPE_REFERENCE_TO (expr)
13798 || TYPE_CACHED_VALUES_P (expr)
13799 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13800 || TYPE_NEXT_VARIANT (expr)))
13802 /* Allow these fields to be modified. */
13803 tree tmp;
13804 memcpy ((char *) &buf, expr, tree_size (expr));
13805 expr = tmp = (tree) &buf;
13806 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13807 TYPE_POINTER_TO (tmp) = NULL;
13808 TYPE_REFERENCE_TO (tmp) = NULL;
13809 TYPE_NEXT_VARIANT (tmp) = NULL;
13810 if (TYPE_CACHED_VALUES_P (tmp))
13812 TYPE_CACHED_VALUES_P (tmp) = 0;
13813 TYPE_CACHED_VALUES (tmp) = NULL;
13816 md5_process_bytes (expr, tree_size (expr), ctx);
13817 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13818 if (TREE_CODE_CLASS (code) != tcc_type
13819 && TREE_CODE_CLASS (code) != tcc_declaration
13820 && code != TREE_LIST
13821 && code != SSA_NAME)
13822 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13823 switch (TREE_CODE_CLASS (code))
13825 case tcc_constant:
13826 switch (code)
13828 case STRING_CST:
13829 md5_process_bytes (TREE_STRING_POINTER (expr),
13830 TREE_STRING_LENGTH (expr), ctx);
13831 break;
13832 case COMPLEX_CST:
13833 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13834 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13835 break;
13836 case VECTOR_CST:
13837 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13838 break;
13839 default:
13840 break;
13842 break;
13843 case tcc_exceptional:
13844 switch (code)
13846 case TREE_LIST:
13847 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13848 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13849 expr = TREE_CHAIN (expr);
13850 goto recursive_label;
13851 break;
13852 case TREE_VEC:
13853 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13854 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13855 break;
13856 default:
13857 break;
13859 break;
13860 case tcc_expression:
13861 case tcc_reference:
13862 case tcc_comparison:
13863 case tcc_unary:
13864 case tcc_binary:
13865 case tcc_statement:
13866 case tcc_vl_exp:
13867 len = TREE_OPERAND_LENGTH (expr);
13868 for (i = 0; i < len; ++i)
13869 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13870 break;
13871 case tcc_declaration:
13872 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13873 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13874 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13876 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13877 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13878 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13879 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13880 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13882 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13883 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13885 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13887 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13888 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13889 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13891 break;
13892 case tcc_type:
13893 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13894 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13895 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13896 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13897 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13898 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13899 if (INTEGRAL_TYPE_P (expr)
13900 || SCALAR_FLOAT_TYPE_P (expr))
13902 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13903 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13905 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13906 if (TREE_CODE (expr) == RECORD_TYPE
13907 || TREE_CODE (expr) == UNION_TYPE
13908 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13909 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13910 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13911 break;
13912 default:
13913 break;
13917 /* Helper function for outputting the checksum of a tree T. When
13918 debugging with gdb, you can "define mynext" to be "next" followed
13919 by "call debug_fold_checksum (op0)", then just trace down till the
13920 outputs differ. */
13922 void
13923 debug_fold_checksum (const_tree t)
13925 int i;
13926 unsigned char checksum[16];
13927 struct md5_ctx ctx;
13928 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13930 md5_init_ctx (&ctx);
13931 fold_checksum_tree (t, &ctx, ht);
13932 md5_finish_ctx (&ctx, checksum);
13933 htab_empty (ht);
13935 for (i = 0; i < 16; i++)
13936 fprintf (stderr, "%d ", checksum[i]);
13938 fprintf (stderr, "\n");
13941 #endif
13943 /* Fold a unary tree expression with code CODE of type TYPE with an
13944 operand OP0. Return a folded expression if successful. Otherwise,
13945 return a tree expression with code CODE of type TYPE with an
13946 operand OP0. */
13948 tree
13949 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13951 tree tem;
13952 #ifdef ENABLE_FOLD_CHECKING
13953 unsigned char checksum_before[16], checksum_after[16];
13954 struct md5_ctx ctx;
13955 htab_t ht;
13957 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13958 md5_init_ctx (&ctx);
13959 fold_checksum_tree (op0, &ctx, ht);
13960 md5_finish_ctx (&ctx, checksum_before);
13961 htab_empty (ht);
13962 #endif
13964 tem = fold_unary (code, type, op0);
13965 if (!tem)
13966 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13968 #ifdef ENABLE_FOLD_CHECKING
13969 md5_init_ctx (&ctx);
13970 fold_checksum_tree (op0, &ctx, ht);
13971 md5_finish_ctx (&ctx, checksum_after);
13972 htab_delete (ht);
13974 if (memcmp (checksum_before, checksum_after, 16))
13975 fold_check_failed (op0, tem);
13976 #endif
13977 return tem;
13980 /* Fold a binary tree expression with code CODE of type TYPE with
13981 operands OP0 and OP1. Return a folded expression if successful.
13982 Otherwise, return a tree expression with code CODE of type TYPE
13983 with operands OP0 and OP1. */
13985 tree
13986 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13987 MEM_STAT_DECL)
13989 tree tem;
13990 #ifdef ENABLE_FOLD_CHECKING
13991 unsigned char checksum_before_op0[16],
13992 checksum_before_op1[16],
13993 checksum_after_op0[16],
13994 checksum_after_op1[16];
13995 struct md5_ctx ctx;
13996 htab_t ht;
13998 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13999 md5_init_ctx (&ctx);
14000 fold_checksum_tree (op0, &ctx, ht);
14001 md5_finish_ctx (&ctx, checksum_before_op0);
14002 htab_empty (ht);
14004 md5_init_ctx (&ctx);
14005 fold_checksum_tree (op1, &ctx, ht);
14006 md5_finish_ctx (&ctx, checksum_before_op1);
14007 htab_empty (ht);
14008 #endif
14010 tem = fold_binary (code, type, op0, op1);
14011 if (!tem)
14012 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14014 #ifdef ENABLE_FOLD_CHECKING
14015 md5_init_ctx (&ctx);
14016 fold_checksum_tree (op0, &ctx, ht);
14017 md5_finish_ctx (&ctx, checksum_after_op0);
14018 htab_empty (ht);
14020 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14021 fold_check_failed (op0, tem);
14023 md5_init_ctx (&ctx);
14024 fold_checksum_tree (op1, &ctx, ht);
14025 md5_finish_ctx (&ctx, checksum_after_op1);
14026 htab_delete (ht);
14028 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14029 fold_check_failed (op1, tem);
14030 #endif
14031 return tem;
14034 /* Fold a ternary tree expression with code CODE of type TYPE with
14035 operands OP0, OP1, and OP2. Return a folded expression if
14036 successful. Otherwise, return a tree expression with code CODE of
14037 type TYPE with operands OP0, OP1, and OP2. */
14039 tree
14040 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14041 MEM_STAT_DECL)
14043 tree tem;
14044 #ifdef ENABLE_FOLD_CHECKING
14045 unsigned char checksum_before_op0[16],
14046 checksum_before_op1[16],
14047 checksum_before_op2[16],
14048 checksum_after_op0[16],
14049 checksum_after_op1[16],
14050 checksum_after_op2[16];
14051 struct md5_ctx ctx;
14052 htab_t ht;
14054 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14055 md5_init_ctx (&ctx);
14056 fold_checksum_tree (op0, &ctx, ht);
14057 md5_finish_ctx (&ctx, checksum_before_op0);
14058 htab_empty (ht);
14060 md5_init_ctx (&ctx);
14061 fold_checksum_tree (op1, &ctx, ht);
14062 md5_finish_ctx (&ctx, checksum_before_op1);
14063 htab_empty (ht);
14065 md5_init_ctx (&ctx);
14066 fold_checksum_tree (op2, &ctx, ht);
14067 md5_finish_ctx (&ctx, checksum_before_op2);
14068 htab_empty (ht);
14069 #endif
14071 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14072 tem = fold_ternary (code, type, op0, op1, op2);
14073 if (!tem)
14074 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14076 #ifdef ENABLE_FOLD_CHECKING
14077 md5_init_ctx (&ctx);
14078 fold_checksum_tree (op0, &ctx, ht);
14079 md5_finish_ctx (&ctx, checksum_after_op0);
14080 htab_empty (ht);
14082 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14083 fold_check_failed (op0, tem);
14085 md5_init_ctx (&ctx);
14086 fold_checksum_tree (op1, &ctx, ht);
14087 md5_finish_ctx (&ctx, checksum_after_op1);
14088 htab_empty (ht);
14090 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14091 fold_check_failed (op1, tem);
14093 md5_init_ctx (&ctx);
14094 fold_checksum_tree (op2, &ctx, ht);
14095 md5_finish_ctx (&ctx, checksum_after_op2);
14096 htab_delete (ht);
14098 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14099 fold_check_failed (op2, tem);
14100 #endif
14101 return tem;
14104 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14105 arguments in ARGARRAY, and a null static chain.
14106 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14107 of type TYPE from the given operands as constructed by build_call_array. */
14109 tree
14110 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14112 tree tem;
14113 #ifdef ENABLE_FOLD_CHECKING
14114 unsigned char checksum_before_fn[16],
14115 checksum_before_arglist[16],
14116 checksum_after_fn[16],
14117 checksum_after_arglist[16];
14118 struct md5_ctx ctx;
14119 htab_t ht;
14120 int i;
14122 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14123 md5_init_ctx (&ctx);
14124 fold_checksum_tree (fn, &ctx, ht);
14125 md5_finish_ctx (&ctx, checksum_before_fn);
14126 htab_empty (ht);
14128 md5_init_ctx (&ctx);
14129 for (i = 0; i < nargs; i++)
14130 fold_checksum_tree (argarray[i], &ctx, ht);
14131 md5_finish_ctx (&ctx, checksum_before_arglist);
14132 htab_empty (ht);
14133 #endif
14135 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14137 #ifdef ENABLE_FOLD_CHECKING
14138 md5_init_ctx (&ctx);
14139 fold_checksum_tree (fn, &ctx, ht);
14140 md5_finish_ctx (&ctx, checksum_after_fn);
14141 htab_empty (ht);
14143 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14144 fold_check_failed (fn, tem);
14146 md5_init_ctx (&ctx);
14147 for (i = 0; i < nargs; i++)
14148 fold_checksum_tree (argarray[i], &ctx, ht);
14149 md5_finish_ctx (&ctx, checksum_after_arglist);
14150 htab_delete (ht);
14152 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14153 fold_check_failed (NULL_TREE, tem);
14154 #endif
14155 return tem;
14158 /* Perform constant folding and related simplification of initializer
14159 expression EXPR. These behave identically to "fold_buildN" but ignore
14160 potential run-time traps and exceptions that fold must preserve. */
14162 #define START_FOLD_INIT \
14163 int saved_signaling_nans = flag_signaling_nans;\
14164 int saved_trapping_math = flag_trapping_math;\
14165 int saved_rounding_math = flag_rounding_math;\
14166 int saved_trapv = flag_trapv;\
14167 int saved_folding_initializer = folding_initializer;\
14168 flag_signaling_nans = 0;\
14169 flag_trapping_math = 0;\
14170 flag_rounding_math = 0;\
14171 flag_trapv = 0;\
14172 folding_initializer = 1;
14174 #define END_FOLD_INIT \
14175 flag_signaling_nans = saved_signaling_nans;\
14176 flag_trapping_math = saved_trapping_math;\
14177 flag_rounding_math = saved_rounding_math;\
14178 flag_trapv = saved_trapv;\
14179 folding_initializer = saved_folding_initializer;
14181 tree
14182 fold_build1_initializer (enum tree_code code, tree type, tree op)
14184 tree result;
14185 START_FOLD_INIT;
14187 result = fold_build1 (code, type, op);
14189 END_FOLD_INIT;
14190 return result;
14193 tree
14194 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14196 tree result;
14197 START_FOLD_INIT;
14199 result = fold_build2 (code, type, op0, op1);
14201 END_FOLD_INIT;
14202 return result;
14205 tree
14206 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14207 tree op2)
14209 tree result;
14210 START_FOLD_INIT;
14212 result = fold_build3 (code, type, op0, op1, op2);
14214 END_FOLD_INIT;
14215 return result;
14218 tree
14219 fold_build_call_array_initializer (tree type, tree fn,
14220 int nargs, tree *argarray)
14222 tree result;
14223 START_FOLD_INIT;
14225 result = fold_build_call_array (type, fn, nargs, argarray);
14227 END_FOLD_INIT;
14228 return result;
14231 #undef START_FOLD_INIT
14232 #undef END_FOLD_INIT
14234 /* Determine if first argument is a multiple of second argument. Return 0 if
14235 it is not, or we cannot easily determined it to be.
14237 An example of the sort of thing we care about (at this point; this routine
14238 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14239 fold cases do now) is discovering that
14241 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14243 is a multiple of
14245 SAVE_EXPR (J * 8)
14247 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14249 This code also handles discovering that
14251 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14253 is a multiple of 8 so we don't have to worry about dealing with a
14254 possible remainder.
14256 Note that we *look* inside a SAVE_EXPR only to determine how it was
14257 calculated; it is not safe for fold to do much of anything else with the
14258 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14259 at run time. For example, the latter example above *cannot* be implemented
14260 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14261 evaluation time of the original SAVE_EXPR is not necessarily the same at
14262 the time the new expression is evaluated. The only optimization of this
14263 sort that would be valid is changing
14265 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14267 divided by 8 to
14269 SAVE_EXPR (I) * SAVE_EXPR (J)
14271 (where the same SAVE_EXPR (J) is used in the original and the
14272 transformed version). */
14275 multiple_of_p (tree type, const_tree top, const_tree bottom)
14277 if (operand_equal_p (top, bottom, 0))
14278 return 1;
14280 if (TREE_CODE (type) != INTEGER_TYPE)
14281 return 0;
14283 switch (TREE_CODE (top))
14285 case BIT_AND_EXPR:
14286 /* Bitwise and provides a power of two multiple. If the mask is
14287 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14288 if (!integer_pow2p (bottom))
14289 return 0;
14290 /* FALLTHRU */
14292 case MULT_EXPR:
14293 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14294 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14296 case PLUS_EXPR:
14297 case MINUS_EXPR:
14298 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14299 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14301 case LSHIFT_EXPR:
14302 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14304 tree op1, t1;
14306 op1 = TREE_OPERAND (top, 1);
14307 /* const_binop may not detect overflow correctly,
14308 so check for it explicitly here. */
14309 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14310 > TREE_INT_CST_LOW (op1)
14311 && TREE_INT_CST_HIGH (op1) == 0
14312 && 0 != (t1 = fold_convert (type,
14313 const_binop (LSHIFT_EXPR,
14314 size_one_node,
14315 op1, 0)))
14316 && !TREE_OVERFLOW (t1))
14317 return multiple_of_p (type, t1, bottom);
14319 return 0;
14321 case NOP_EXPR:
14322 /* Can't handle conversions from non-integral or wider integral type. */
14323 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14324 || (TYPE_PRECISION (type)
14325 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14326 return 0;
14328 /* .. fall through ... */
14330 case SAVE_EXPR:
14331 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14333 case INTEGER_CST:
14334 if (TREE_CODE (bottom) != INTEGER_CST
14335 || integer_zerop (bottom)
14336 || (TYPE_UNSIGNED (type)
14337 && (tree_int_cst_sgn (top) < 0
14338 || tree_int_cst_sgn (bottom) < 0)))
14339 return 0;
14340 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14341 top, bottom, 0));
14343 default:
14344 return 0;
14348 /* Return true if CODE or TYPE is known to be non-negative. */
14350 static bool
14351 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14353 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14354 && truth_value_p (code))
14355 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14356 have a signed:1 type (where the value is -1 and 0). */
14357 return true;
14358 return false;
14361 /* Return true if (CODE OP0) is known to be non-negative. If the return
14362 value is based on the assumption that signed overflow is undefined,
14363 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14364 *STRICT_OVERFLOW_P. */
14366 bool
14367 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14368 bool *strict_overflow_p)
14370 if (TYPE_UNSIGNED (type))
14371 return true;
14373 switch (code)
14375 case ABS_EXPR:
14376 /* We can't return 1 if flag_wrapv is set because
14377 ABS_EXPR<INT_MIN> = INT_MIN. */
14378 if (!INTEGRAL_TYPE_P (type))
14379 return true;
14380 if (TYPE_OVERFLOW_UNDEFINED (type))
14382 *strict_overflow_p = true;
14383 return true;
14385 break;
14387 case NON_LVALUE_EXPR:
14388 case FLOAT_EXPR:
14389 case FIX_TRUNC_EXPR:
14390 return tree_expr_nonnegative_warnv_p (op0,
14391 strict_overflow_p);
14393 case NOP_EXPR:
14395 tree inner_type = TREE_TYPE (op0);
14396 tree outer_type = type;
14398 if (TREE_CODE (outer_type) == REAL_TYPE)
14400 if (TREE_CODE (inner_type) == REAL_TYPE)
14401 return tree_expr_nonnegative_warnv_p (op0,
14402 strict_overflow_p);
14403 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14405 if (TYPE_UNSIGNED (inner_type))
14406 return true;
14407 return tree_expr_nonnegative_warnv_p (op0,
14408 strict_overflow_p);
14411 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14413 if (TREE_CODE (inner_type) == REAL_TYPE)
14414 return tree_expr_nonnegative_warnv_p (op0,
14415 strict_overflow_p);
14416 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14417 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14418 && TYPE_UNSIGNED (inner_type);
14421 break;
14423 default:
14424 return tree_simple_nonnegative_warnv_p (code, type);
14427 /* We don't know sign of `t', so be conservative and return false. */
14428 return false;
14431 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14432 value is based on the assumption that signed overflow is undefined,
14433 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14434 *STRICT_OVERFLOW_P. */
14436 bool
14437 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14438 tree op1, bool *strict_overflow_p)
14440 if (TYPE_UNSIGNED (type))
14441 return true;
14443 switch (code)
14445 case POINTER_PLUS_EXPR:
14446 case PLUS_EXPR:
14447 if (FLOAT_TYPE_P (type))
14448 return (tree_expr_nonnegative_warnv_p (op0,
14449 strict_overflow_p)
14450 && tree_expr_nonnegative_warnv_p (op1,
14451 strict_overflow_p));
14453 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14454 both unsigned and at least 2 bits shorter than the result. */
14455 if (TREE_CODE (type) == INTEGER_TYPE
14456 && TREE_CODE (op0) == NOP_EXPR
14457 && TREE_CODE (op1) == NOP_EXPR)
14459 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14460 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14461 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14462 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14464 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14465 TYPE_PRECISION (inner2)) + 1;
14466 return prec < TYPE_PRECISION (type);
14469 break;
14471 case MULT_EXPR:
14472 if (FLOAT_TYPE_P (type))
14474 /* x * x for floating point x is always non-negative. */
14475 if (operand_equal_p (op0, op1, 0))
14476 return true;
14477 return (tree_expr_nonnegative_warnv_p (op0,
14478 strict_overflow_p)
14479 && tree_expr_nonnegative_warnv_p (op1,
14480 strict_overflow_p));
14483 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14484 both unsigned and their total bits is shorter than the result. */
14485 if (TREE_CODE (type) == INTEGER_TYPE
14486 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14487 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14489 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14490 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14491 : TREE_TYPE (op0);
14492 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14493 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14494 : TREE_TYPE (op1);
14496 bool unsigned0 = TYPE_UNSIGNED (inner0);
14497 bool unsigned1 = TYPE_UNSIGNED (inner1);
14499 if (TREE_CODE (op0) == INTEGER_CST)
14500 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14502 if (TREE_CODE (op1) == INTEGER_CST)
14503 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14505 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14506 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14508 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14509 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14510 : TYPE_PRECISION (inner0);
14512 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14513 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14514 : TYPE_PRECISION (inner1);
14516 return precision0 + precision1 < TYPE_PRECISION (type);
14519 return false;
14521 case BIT_AND_EXPR:
14522 case MAX_EXPR:
14523 return (tree_expr_nonnegative_warnv_p (op0,
14524 strict_overflow_p)
14525 || tree_expr_nonnegative_warnv_p (op1,
14526 strict_overflow_p));
14528 case BIT_IOR_EXPR:
14529 case BIT_XOR_EXPR:
14530 case MIN_EXPR:
14531 case RDIV_EXPR:
14532 case TRUNC_DIV_EXPR:
14533 case CEIL_DIV_EXPR:
14534 case FLOOR_DIV_EXPR:
14535 case ROUND_DIV_EXPR:
14536 return (tree_expr_nonnegative_warnv_p (op0,
14537 strict_overflow_p)
14538 && tree_expr_nonnegative_warnv_p (op1,
14539 strict_overflow_p));
14541 case TRUNC_MOD_EXPR:
14542 case CEIL_MOD_EXPR:
14543 case FLOOR_MOD_EXPR:
14544 case ROUND_MOD_EXPR:
14545 return tree_expr_nonnegative_warnv_p (op0,
14546 strict_overflow_p);
14547 default:
14548 return tree_simple_nonnegative_warnv_p (code, type);
14551 /* We don't know sign of `t', so be conservative and return false. */
14552 return false;
14555 /* Return true if T is known to be non-negative. If the return
14556 value is based on the assumption that signed overflow is undefined,
14557 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14558 *STRICT_OVERFLOW_P. */
14560 bool
14561 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14563 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14564 return true;
14566 switch (TREE_CODE (t))
14568 case INTEGER_CST:
14569 return tree_int_cst_sgn (t) >= 0;
14571 case REAL_CST:
14572 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14574 case FIXED_CST:
14575 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14577 case COND_EXPR:
14578 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14579 strict_overflow_p)
14580 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14581 strict_overflow_p));
14582 default:
14583 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14584 TREE_TYPE (t));
14586 /* We don't know sign of `t', so be conservative and return false. */
14587 return false;
14590 /* Return true if T is known to be non-negative. If the return
14591 value is based on the assumption that signed overflow is undefined,
14592 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14593 *STRICT_OVERFLOW_P. */
14595 bool
14596 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14597 tree arg0, tree arg1, bool *strict_overflow_p)
14599 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14600 switch (DECL_FUNCTION_CODE (fndecl))
14602 CASE_FLT_FN (BUILT_IN_ACOS):
14603 CASE_FLT_FN (BUILT_IN_ACOSH):
14604 CASE_FLT_FN (BUILT_IN_CABS):
14605 CASE_FLT_FN (BUILT_IN_COSH):
14606 CASE_FLT_FN (BUILT_IN_ERFC):
14607 CASE_FLT_FN (BUILT_IN_EXP):
14608 CASE_FLT_FN (BUILT_IN_EXP10):
14609 CASE_FLT_FN (BUILT_IN_EXP2):
14610 CASE_FLT_FN (BUILT_IN_FABS):
14611 CASE_FLT_FN (BUILT_IN_FDIM):
14612 CASE_FLT_FN (BUILT_IN_HYPOT):
14613 CASE_FLT_FN (BUILT_IN_POW10):
14614 CASE_INT_FN (BUILT_IN_FFS):
14615 CASE_INT_FN (BUILT_IN_PARITY):
14616 CASE_INT_FN (BUILT_IN_POPCOUNT):
14617 case BUILT_IN_BSWAP32:
14618 case BUILT_IN_BSWAP64:
14619 /* Always true. */
14620 return true;
14622 CASE_FLT_FN (BUILT_IN_SQRT):
14623 /* sqrt(-0.0) is -0.0. */
14624 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14625 return true;
14626 return tree_expr_nonnegative_warnv_p (arg0,
14627 strict_overflow_p);
14629 CASE_FLT_FN (BUILT_IN_ASINH):
14630 CASE_FLT_FN (BUILT_IN_ATAN):
14631 CASE_FLT_FN (BUILT_IN_ATANH):
14632 CASE_FLT_FN (BUILT_IN_CBRT):
14633 CASE_FLT_FN (BUILT_IN_CEIL):
14634 CASE_FLT_FN (BUILT_IN_ERF):
14635 CASE_FLT_FN (BUILT_IN_EXPM1):
14636 CASE_FLT_FN (BUILT_IN_FLOOR):
14637 CASE_FLT_FN (BUILT_IN_FMOD):
14638 CASE_FLT_FN (BUILT_IN_FREXP):
14639 CASE_FLT_FN (BUILT_IN_LCEIL):
14640 CASE_FLT_FN (BUILT_IN_LDEXP):
14641 CASE_FLT_FN (BUILT_IN_LFLOOR):
14642 CASE_FLT_FN (BUILT_IN_LLCEIL):
14643 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14644 CASE_FLT_FN (BUILT_IN_LLRINT):
14645 CASE_FLT_FN (BUILT_IN_LLROUND):
14646 CASE_FLT_FN (BUILT_IN_LRINT):
14647 CASE_FLT_FN (BUILT_IN_LROUND):
14648 CASE_FLT_FN (BUILT_IN_MODF):
14649 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14650 CASE_FLT_FN (BUILT_IN_RINT):
14651 CASE_FLT_FN (BUILT_IN_ROUND):
14652 CASE_FLT_FN (BUILT_IN_SCALB):
14653 CASE_FLT_FN (BUILT_IN_SCALBLN):
14654 CASE_FLT_FN (BUILT_IN_SCALBN):
14655 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14656 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14657 CASE_FLT_FN (BUILT_IN_SINH):
14658 CASE_FLT_FN (BUILT_IN_TANH):
14659 CASE_FLT_FN (BUILT_IN_TRUNC):
14660 /* True if the 1st argument is nonnegative. */
14661 return tree_expr_nonnegative_warnv_p (arg0,
14662 strict_overflow_p);
14664 CASE_FLT_FN (BUILT_IN_FMAX):
14665 /* True if the 1st OR 2nd arguments are nonnegative. */
14666 return (tree_expr_nonnegative_warnv_p (arg0,
14667 strict_overflow_p)
14668 || (tree_expr_nonnegative_warnv_p (arg1,
14669 strict_overflow_p)));
14671 CASE_FLT_FN (BUILT_IN_FMIN):
14672 /* True if the 1st AND 2nd arguments are nonnegative. */
14673 return (tree_expr_nonnegative_warnv_p (arg0,
14674 strict_overflow_p)
14675 && (tree_expr_nonnegative_warnv_p (arg1,
14676 strict_overflow_p)));
14678 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14679 /* True if the 2nd argument is nonnegative. */
14680 return tree_expr_nonnegative_warnv_p (arg1,
14681 strict_overflow_p);
14683 CASE_FLT_FN (BUILT_IN_POWI):
14684 /* True if the 1st argument is nonnegative or the second
14685 argument is an even integer. */
14686 if (TREE_CODE (arg1) == INTEGER_CST
14687 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14688 return true;
14689 return tree_expr_nonnegative_warnv_p (arg0,
14690 strict_overflow_p);
14692 CASE_FLT_FN (BUILT_IN_POW):
14693 /* True if the 1st argument is nonnegative or the second
14694 argument is an even integer valued real. */
14695 if (TREE_CODE (arg1) == REAL_CST)
14697 REAL_VALUE_TYPE c;
14698 HOST_WIDE_INT n;
14700 c = TREE_REAL_CST (arg1);
14701 n = real_to_integer (&c);
14702 if ((n & 1) == 0)
14704 REAL_VALUE_TYPE cint;
14705 real_from_integer (&cint, VOIDmode, n,
14706 n < 0 ? -1 : 0, 0);
14707 if (real_identical (&c, &cint))
14708 return true;
14711 return tree_expr_nonnegative_warnv_p (arg0,
14712 strict_overflow_p);
14714 default:
14715 break;
14717 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14718 type);
14721 /* Return true if T is known to be non-negative. If the return
14722 value is based on the assumption that signed overflow is undefined,
14723 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14724 *STRICT_OVERFLOW_P. */
14726 bool
14727 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14729 enum tree_code code = TREE_CODE (t);
14730 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14731 return true;
14733 switch (code)
14735 case TARGET_EXPR:
14737 tree temp = TARGET_EXPR_SLOT (t);
14738 t = TARGET_EXPR_INITIAL (t);
14740 /* If the initializer is non-void, then it's a normal expression
14741 that will be assigned to the slot. */
14742 if (!VOID_TYPE_P (t))
14743 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14745 /* Otherwise, the initializer sets the slot in some way. One common
14746 way is an assignment statement at the end of the initializer. */
14747 while (1)
14749 if (TREE_CODE (t) == BIND_EXPR)
14750 t = expr_last (BIND_EXPR_BODY (t));
14751 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14752 || TREE_CODE (t) == TRY_CATCH_EXPR)
14753 t = expr_last (TREE_OPERAND (t, 0));
14754 else if (TREE_CODE (t) == STATEMENT_LIST)
14755 t = expr_last (t);
14756 else
14757 break;
14759 if (TREE_CODE (t) == MODIFY_EXPR
14760 && TREE_OPERAND (t, 0) == temp)
14761 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14762 strict_overflow_p);
14764 return false;
14767 case CALL_EXPR:
14769 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14770 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14772 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14773 get_callee_fndecl (t),
14774 arg0,
14775 arg1,
14776 strict_overflow_p);
14778 case COMPOUND_EXPR:
14779 case MODIFY_EXPR:
14780 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14781 strict_overflow_p);
14782 case BIND_EXPR:
14783 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14784 strict_overflow_p);
14785 case SAVE_EXPR:
14786 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14787 strict_overflow_p);
14789 default:
14790 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14791 TREE_TYPE (t));
14794 /* We don't know sign of `t', so be conservative and return false. */
14795 return false;
14798 /* Return true if T is known to be non-negative. If the return
14799 value is based on the assumption that signed overflow is undefined,
14800 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14801 *STRICT_OVERFLOW_P. */
14803 bool
14804 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14806 enum tree_code code;
14807 if (t == error_mark_node)
14808 return false;
14810 code = TREE_CODE (t);
14811 switch (TREE_CODE_CLASS (code))
14813 case tcc_binary:
14814 case tcc_comparison:
14815 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14816 TREE_TYPE (t),
14817 TREE_OPERAND (t, 0),
14818 TREE_OPERAND (t, 1),
14819 strict_overflow_p);
14821 case tcc_unary:
14822 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14823 TREE_TYPE (t),
14824 TREE_OPERAND (t, 0),
14825 strict_overflow_p);
14827 case tcc_constant:
14828 case tcc_declaration:
14829 case tcc_reference:
14830 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14832 default:
14833 break;
14836 switch (code)
14838 case TRUTH_AND_EXPR:
14839 case TRUTH_OR_EXPR:
14840 case TRUTH_XOR_EXPR:
14841 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14842 TREE_TYPE (t),
14843 TREE_OPERAND (t, 0),
14844 TREE_OPERAND (t, 1),
14845 strict_overflow_p);
14846 case TRUTH_NOT_EXPR:
14847 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14848 TREE_TYPE (t),
14849 TREE_OPERAND (t, 0),
14850 strict_overflow_p);
14852 case COND_EXPR:
14853 case CONSTRUCTOR:
14854 case OBJ_TYPE_REF:
14855 case ASSERT_EXPR:
14856 case ADDR_EXPR:
14857 case WITH_SIZE_EXPR:
14858 case EXC_PTR_EXPR:
14859 case SSA_NAME:
14860 case FILTER_EXPR:
14861 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14863 default:
14864 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14868 /* Return true if `t' is known to be non-negative. Handle warnings
14869 about undefined signed overflow. */
14871 bool
14872 tree_expr_nonnegative_p (tree t)
14874 bool ret, strict_overflow_p;
14876 strict_overflow_p = false;
14877 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14878 if (strict_overflow_p)
14879 fold_overflow_warning (("assuming signed overflow does not occur when "
14880 "determining that expression is always "
14881 "non-negative"),
14882 WARN_STRICT_OVERFLOW_MISC);
14883 return ret;
14887 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14888 For floating point we further ensure that T is not denormal.
14889 Similar logic is present in nonzero_address in rtlanal.h.
14891 If the return value is based on the assumption that signed overflow
14892 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14893 change *STRICT_OVERFLOW_P. */
14895 bool
14896 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14897 bool *strict_overflow_p)
14899 switch (code)
14901 case ABS_EXPR:
14902 return tree_expr_nonzero_warnv_p (op0,
14903 strict_overflow_p);
14905 case NOP_EXPR:
14907 tree inner_type = TREE_TYPE (op0);
14908 tree outer_type = type;
14910 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14911 && tree_expr_nonzero_warnv_p (op0,
14912 strict_overflow_p));
14914 break;
14916 case NON_LVALUE_EXPR:
14917 return tree_expr_nonzero_warnv_p (op0,
14918 strict_overflow_p);
14920 default:
14921 break;
14924 return false;
14927 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14928 For floating point we further ensure that T is not denormal.
14929 Similar logic is present in nonzero_address in rtlanal.h.
14931 If the return value is based on the assumption that signed overflow
14932 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14933 change *STRICT_OVERFLOW_P. */
14935 bool
14936 tree_binary_nonzero_warnv_p (enum tree_code code,
14937 tree type,
14938 tree op0,
14939 tree op1, bool *strict_overflow_p)
14941 bool sub_strict_overflow_p;
14942 switch (code)
14944 case POINTER_PLUS_EXPR:
14945 case PLUS_EXPR:
14946 if (TYPE_OVERFLOW_UNDEFINED (type))
14948 /* With the presence of negative values it is hard
14949 to say something. */
14950 sub_strict_overflow_p = false;
14951 if (!tree_expr_nonnegative_warnv_p (op0,
14952 &sub_strict_overflow_p)
14953 || !tree_expr_nonnegative_warnv_p (op1,
14954 &sub_strict_overflow_p))
14955 return false;
14956 /* One of operands must be positive and the other non-negative. */
14957 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14958 overflows, on a twos-complement machine the sum of two
14959 nonnegative numbers can never be zero. */
14960 return (tree_expr_nonzero_warnv_p (op0,
14961 strict_overflow_p)
14962 || tree_expr_nonzero_warnv_p (op1,
14963 strict_overflow_p));
14965 break;
14967 case MULT_EXPR:
14968 if (TYPE_OVERFLOW_UNDEFINED (type))
14970 if (tree_expr_nonzero_warnv_p (op0,
14971 strict_overflow_p)
14972 && tree_expr_nonzero_warnv_p (op1,
14973 strict_overflow_p))
14975 *strict_overflow_p = true;
14976 return true;
14979 break;
14981 case MIN_EXPR:
14982 sub_strict_overflow_p = false;
14983 if (tree_expr_nonzero_warnv_p (op0,
14984 &sub_strict_overflow_p)
14985 && tree_expr_nonzero_warnv_p (op1,
14986 &sub_strict_overflow_p))
14988 if (sub_strict_overflow_p)
14989 *strict_overflow_p = true;
14991 break;
14993 case MAX_EXPR:
14994 sub_strict_overflow_p = false;
14995 if (tree_expr_nonzero_warnv_p (op0,
14996 &sub_strict_overflow_p))
14998 if (sub_strict_overflow_p)
14999 *strict_overflow_p = true;
15001 /* When both operands are nonzero, then MAX must be too. */
15002 if (tree_expr_nonzero_warnv_p (op1,
15003 strict_overflow_p))
15004 return true;
15006 /* MAX where operand 0 is positive is positive. */
15007 return tree_expr_nonnegative_warnv_p (op0,
15008 strict_overflow_p);
15010 /* MAX where operand 1 is positive is positive. */
15011 else if (tree_expr_nonzero_warnv_p (op1,
15012 &sub_strict_overflow_p)
15013 && tree_expr_nonnegative_warnv_p (op1,
15014 &sub_strict_overflow_p))
15016 if (sub_strict_overflow_p)
15017 *strict_overflow_p = true;
15018 return true;
15020 break;
15022 case BIT_IOR_EXPR:
15023 return (tree_expr_nonzero_warnv_p (op1,
15024 strict_overflow_p)
15025 || tree_expr_nonzero_warnv_p (op0,
15026 strict_overflow_p));
15028 default:
15029 break;
15032 return false;
15035 /* Return true when T is an address and is known to be nonzero.
15036 For floating point we further ensure that T is not denormal.
15037 Similar logic is present in nonzero_address in rtlanal.h.
15039 If the return value is based on the assumption that signed overflow
15040 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15041 change *STRICT_OVERFLOW_P. */
15043 bool
15044 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15046 bool sub_strict_overflow_p;
15047 switch (TREE_CODE (t))
15049 case INTEGER_CST:
15050 return !integer_zerop (t);
15052 case ADDR_EXPR:
15054 tree base = get_base_address (TREE_OPERAND (t, 0));
15056 if (!base)
15057 return false;
15059 /* Weak declarations may link to NULL. Other things may also be NULL
15060 so protect with -fdelete-null-pointer-checks; but not variables
15061 allocated on the stack. */
15062 if (DECL_P (base)
15063 && (flag_delete_null_pointer_checks
15064 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15065 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15067 /* Constants are never weak. */
15068 if (CONSTANT_CLASS_P (base))
15069 return true;
15071 return false;
15074 case COND_EXPR:
15075 sub_strict_overflow_p = false;
15076 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15077 &sub_strict_overflow_p)
15078 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15079 &sub_strict_overflow_p))
15081 if (sub_strict_overflow_p)
15082 *strict_overflow_p = true;
15083 return true;
15085 break;
15087 default:
15088 break;
15090 return false;
15093 /* Return true when T is an address and is known to be nonzero.
15094 For floating point we further ensure that T is not denormal.
15095 Similar logic is present in nonzero_address in rtlanal.h.
15097 If the return value is based on the assumption that signed overflow
15098 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15099 change *STRICT_OVERFLOW_P. */
15101 bool
15102 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15104 tree type = TREE_TYPE (t);
15105 enum tree_code code;
15107 /* Doing something useful for floating point would need more work. */
15108 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15109 return false;
15111 code = TREE_CODE (t);
15112 switch (TREE_CODE_CLASS (code))
15114 case tcc_unary:
15115 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15116 strict_overflow_p);
15117 case tcc_binary:
15118 case tcc_comparison:
15119 return tree_binary_nonzero_warnv_p (code, type,
15120 TREE_OPERAND (t, 0),
15121 TREE_OPERAND (t, 1),
15122 strict_overflow_p);
15123 case tcc_constant:
15124 case tcc_declaration:
15125 case tcc_reference:
15126 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15128 default:
15129 break;
15132 switch (code)
15134 case TRUTH_NOT_EXPR:
15135 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15136 strict_overflow_p);
15138 case TRUTH_AND_EXPR:
15139 case TRUTH_OR_EXPR:
15140 case TRUTH_XOR_EXPR:
15141 return tree_binary_nonzero_warnv_p (code, type,
15142 TREE_OPERAND (t, 0),
15143 TREE_OPERAND (t, 1),
15144 strict_overflow_p);
15146 case COND_EXPR:
15147 case CONSTRUCTOR:
15148 case OBJ_TYPE_REF:
15149 case ASSERT_EXPR:
15150 case ADDR_EXPR:
15151 case WITH_SIZE_EXPR:
15152 case EXC_PTR_EXPR:
15153 case SSA_NAME:
15154 case FILTER_EXPR:
15155 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15157 case COMPOUND_EXPR:
15158 case MODIFY_EXPR:
15159 case BIND_EXPR:
15160 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15161 strict_overflow_p);
15163 case SAVE_EXPR:
15164 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15165 strict_overflow_p);
15167 case CALL_EXPR:
15168 return alloca_call_p (t);
15170 default:
15171 break;
15173 return false;
15176 /* Return true when T is an address and is known to be nonzero.
15177 Handle warnings about undefined signed overflow. */
15179 bool
15180 tree_expr_nonzero_p (tree t)
15182 bool ret, strict_overflow_p;
15184 strict_overflow_p = false;
15185 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15186 if (strict_overflow_p)
15187 fold_overflow_warning (("assuming signed overflow does not occur when "
15188 "determining that expression is always "
15189 "non-zero"),
15190 WARN_STRICT_OVERFLOW_MISC);
15191 return ret;
15194 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15195 attempt to fold the expression to a constant without modifying TYPE,
15196 OP0 or OP1.
15198 If the expression could be simplified to a constant, then return
15199 the constant. If the expression would not be simplified to a
15200 constant, then return NULL_TREE. */
15202 tree
15203 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15205 tree tem = fold_binary (code, type, op0, op1);
15206 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15209 /* Given the components of a unary expression CODE, TYPE and OP0,
15210 attempt to fold the expression to a constant without modifying
15211 TYPE or OP0.
15213 If the expression could be simplified to a constant, then return
15214 the constant. If the expression would not be simplified to a
15215 constant, then return NULL_TREE. */
15217 tree
15218 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15220 tree tem = fold_unary (code, type, op0);
15221 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15224 /* If EXP represents referencing an element in a constant string
15225 (either via pointer arithmetic or array indexing), return the
15226 tree representing the value accessed, otherwise return NULL. */
15228 tree
15229 fold_read_from_constant_string (tree exp)
15231 if ((TREE_CODE (exp) == INDIRECT_REF
15232 || TREE_CODE (exp) == ARRAY_REF)
15233 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15235 tree exp1 = TREE_OPERAND (exp, 0);
15236 tree index;
15237 tree string;
15239 if (TREE_CODE (exp) == INDIRECT_REF)
15240 string = string_constant (exp1, &index);
15241 else
15243 tree low_bound = array_ref_low_bound (exp);
15244 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15246 /* Optimize the special-case of a zero lower bound.
15248 We convert the low_bound to sizetype to avoid some problems
15249 with constant folding. (E.g. suppose the lower bound is 1,
15250 and its mode is QI. Without the conversion,l (ARRAY
15251 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15252 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15253 if (! integer_zerop (low_bound))
15254 index = size_diffop (index, fold_convert (sizetype, low_bound));
15256 string = exp1;
15259 if (string
15260 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15261 && TREE_CODE (string) == STRING_CST
15262 && TREE_CODE (index) == INTEGER_CST
15263 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15264 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15265 == MODE_INT)
15266 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15267 return build_int_cst_type (TREE_TYPE (exp),
15268 (TREE_STRING_POINTER (string)
15269 [TREE_INT_CST_LOW (index)]));
15271 return NULL;
15274 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15275 an integer constant, real, or fixed-point constant.
15277 TYPE is the type of the result. */
15279 static tree
15280 fold_negate_const (tree arg0, tree type)
15282 tree t = NULL_TREE;
15284 switch (TREE_CODE (arg0))
15286 case INTEGER_CST:
15288 unsigned HOST_WIDE_INT low;
15289 HOST_WIDE_INT high;
15290 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15291 TREE_INT_CST_HIGH (arg0),
15292 &low, &high);
15293 t = force_fit_type_double (type, low, high, 1,
15294 (overflow | TREE_OVERFLOW (arg0))
15295 && !TYPE_UNSIGNED (type));
15296 break;
15299 case REAL_CST:
15300 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15301 break;
15303 case FIXED_CST:
15305 FIXED_VALUE_TYPE f;
15306 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15307 &(TREE_FIXED_CST (arg0)), NULL,
15308 TYPE_SATURATING (type));
15309 t = build_fixed (type, f);
15310 /* Propagate overflow flags. */
15311 if (overflow_p | TREE_OVERFLOW (arg0))
15312 TREE_OVERFLOW (t) = 1;
15313 break;
15316 default:
15317 gcc_unreachable ();
15320 return t;
15323 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15324 an integer constant or real constant.
15326 TYPE is the type of the result. */
15328 tree
15329 fold_abs_const (tree arg0, tree type)
15331 tree t = NULL_TREE;
15333 switch (TREE_CODE (arg0))
15335 case INTEGER_CST:
15336 /* If the value is unsigned, then the absolute value is
15337 the same as the ordinary value. */
15338 if (TYPE_UNSIGNED (type))
15339 t = arg0;
15340 /* Similarly, if the value is non-negative. */
15341 else if (INT_CST_LT (integer_minus_one_node, arg0))
15342 t = arg0;
15343 /* If the value is negative, then the absolute value is
15344 its negation. */
15345 else
15347 unsigned HOST_WIDE_INT low;
15348 HOST_WIDE_INT high;
15349 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15350 TREE_INT_CST_HIGH (arg0),
15351 &low, &high);
15352 t = force_fit_type_double (type, low, high, -1,
15353 overflow | TREE_OVERFLOW (arg0));
15355 break;
15357 case REAL_CST:
15358 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15359 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15360 else
15361 t = arg0;
15362 break;
15364 default:
15365 gcc_unreachable ();
15368 return t;
15371 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15372 constant. TYPE is the type of the result. */
15374 static tree
15375 fold_not_const (tree arg0, tree type)
15377 tree t = NULL_TREE;
15379 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15381 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15382 ~TREE_INT_CST_HIGH (arg0), 0,
15383 TREE_OVERFLOW (arg0));
15385 return t;
15388 /* Given CODE, a relational operator, the target type, TYPE and two
15389 constant operands OP0 and OP1, return the result of the
15390 relational operation. If the result is not a compile time
15391 constant, then return NULL_TREE. */
15393 static tree
15394 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15396 int result, invert;
15398 /* From here on, the only cases we handle are when the result is
15399 known to be a constant. */
15401 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15403 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15404 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15406 /* Handle the cases where either operand is a NaN. */
15407 if (real_isnan (c0) || real_isnan (c1))
15409 switch (code)
15411 case EQ_EXPR:
15412 case ORDERED_EXPR:
15413 result = 0;
15414 break;
15416 case NE_EXPR:
15417 case UNORDERED_EXPR:
15418 case UNLT_EXPR:
15419 case UNLE_EXPR:
15420 case UNGT_EXPR:
15421 case UNGE_EXPR:
15422 case UNEQ_EXPR:
15423 result = 1;
15424 break;
15426 case LT_EXPR:
15427 case LE_EXPR:
15428 case GT_EXPR:
15429 case GE_EXPR:
15430 case LTGT_EXPR:
15431 if (flag_trapping_math)
15432 return NULL_TREE;
15433 result = 0;
15434 break;
15436 default:
15437 gcc_unreachable ();
15440 return constant_boolean_node (result, type);
15443 return constant_boolean_node (real_compare (code, c0, c1), type);
15446 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15448 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15449 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15450 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15453 /* Handle equality/inequality of complex constants. */
15454 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15456 tree rcond = fold_relational_const (code, type,
15457 TREE_REALPART (op0),
15458 TREE_REALPART (op1));
15459 tree icond = fold_relational_const (code, type,
15460 TREE_IMAGPART (op0),
15461 TREE_IMAGPART (op1));
15462 if (code == EQ_EXPR)
15463 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15464 else if (code == NE_EXPR)
15465 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15466 else
15467 return NULL_TREE;
15470 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15472 To compute GT, swap the arguments and do LT.
15473 To compute GE, do LT and invert the result.
15474 To compute LE, swap the arguments, do LT and invert the result.
15475 To compute NE, do EQ and invert the result.
15477 Therefore, the code below must handle only EQ and LT. */
15479 if (code == LE_EXPR || code == GT_EXPR)
15481 tree tem = op0;
15482 op0 = op1;
15483 op1 = tem;
15484 code = swap_tree_comparison (code);
15487 /* Note that it is safe to invert for real values here because we
15488 have already handled the one case that it matters. */
15490 invert = 0;
15491 if (code == NE_EXPR || code == GE_EXPR)
15493 invert = 1;
15494 code = invert_tree_comparison (code, false);
15497 /* Compute a result for LT or EQ if args permit;
15498 Otherwise return T. */
15499 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15501 if (code == EQ_EXPR)
15502 result = tree_int_cst_equal (op0, op1);
15503 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15504 result = INT_CST_LT_UNSIGNED (op0, op1);
15505 else
15506 result = INT_CST_LT (op0, op1);
15508 else
15509 return NULL_TREE;
15511 if (invert)
15512 result ^= 1;
15513 return constant_boolean_node (result, type);
15516 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15517 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15518 itself. */
15520 tree
15521 fold_build_cleanup_point_expr (tree type, tree expr)
15523 /* If the expression does not have side effects then we don't have to wrap
15524 it with a cleanup point expression. */
15525 if (!TREE_SIDE_EFFECTS (expr))
15526 return expr;
15528 /* If the expression is a return, check to see if the expression inside the
15529 return has no side effects or the right hand side of the modify expression
15530 inside the return. If either don't have side effects set we don't need to
15531 wrap the expression in a cleanup point expression. Note we don't check the
15532 left hand side of the modify because it should always be a return decl. */
15533 if (TREE_CODE (expr) == RETURN_EXPR)
15535 tree op = TREE_OPERAND (expr, 0);
15536 if (!op || !TREE_SIDE_EFFECTS (op))
15537 return expr;
15538 op = TREE_OPERAND (op, 1);
15539 if (!TREE_SIDE_EFFECTS (op))
15540 return expr;
15543 return build1 (CLEANUP_POINT_EXPR, type, expr);
15546 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15547 of an indirection through OP0, or NULL_TREE if no simplification is
15548 possible. */
15550 tree
15551 fold_indirect_ref_1 (tree type, tree op0)
15553 tree sub = op0;
15554 tree subtype;
15556 STRIP_NOPS (sub);
15557 subtype = TREE_TYPE (sub);
15558 if (!POINTER_TYPE_P (subtype))
15559 return NULL_TREE;
15561 if (TREE_CODE (sub) == ADDR_EXPR)
15563 tree op = TREE_OPERAND (sub, 0);
15564 tree optype = TREE_TYPE (op);
15565 /* *&CONST_DECL -> to the value of the const decl. */
15566 if (TREE_CODE (op) == CONST_DECL)
15567 return DECL_INITIAL (op);
15568 /* *&p => p; make sure to handle *&"str"[cst] here. */
15569 if (type == optype)
15571 tree fop = fold_read_from_constant_string (op);
15572 if (fop)
15573 return fop;
15574 else
15575 return op;
15577 /* *(foo *)&fooarray => fooarray[0] */
15578 else if (TREE_CODE (optype) == ARRAY_TYPE
15579 && type == TREE_TYPE (optype))
15581 tree type_domain = TYPE_DOMAIN (optype);
15582 tree min_val = size_zero_node;
15583 if (type_domain && TYPE_MIN_VALUE (type_domain))
15584 min_val = TYPE_MIN_VALUE (type_domain);
15585 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15587 /* *(foo *)&complexfoo => __real__ complexfoo */
15588 else if (TREE_CODE (optype) == COMPLEX_TYPE
15589 && type == TREE_TYPE (optype))
15590 return fold_build1 (REALPART_EXPR, type, op);
15591 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15592 else if (TREE_CODE (optype) == VECTOR_TYPE
15593 && type == TREE_TYPE (optype))
15595 tree part_width = TYPE_SIZE (type);
15596 tree index = bitsize_int (0);
15597 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15601 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15602 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15603 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15605 tree op00 = TREE_OPERAND (sub, 0);
15606 tree op01 = TREE_OPERAND (sub, 1);
15607 tree op00type;
15609 STRIP_NOPS (op00);
15610 op00type = TREE_TYPE (op00);
15611 if (TREE_CODE (op00) == ADDR_EXPR
15612 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15613 && type == TREE_TYPE (TREE_TYPE (op00type)))
15615 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15616 tree part_width = TYPE_SIZE (type);
15617 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15618 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15619 tree index = bitsize_int (indexi);
15621 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15622 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15623 part_width, index);
15629 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15630 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15631 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15633 tree op00 = TREE_OPERAND (sub, 0);
15634 tree op01 = TREE_OPERAND (sub, 1);
15635 tree op00type;
15637 STRIP_NOPS (op00);
15638 op00type = TREE_TYPE (op00);
15639 if (TREE_CODE (op00) == ADDR_EXPR
15640 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15641 && type == TREE_TYPE (TREE_TYPE (op00type)))
15643 tree size = TYPE_SIZE_UNIT (type);
15644 if (tree_int_cst_equal (size, op01))
15645 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15649 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15650 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15651 && type == TREE_TYPE (TREE_TYPE (subtype)))
15653 tree type_domain;
15654 tree min_val = size_zero_node;
15655 sub = build_fold_indirect_ref (sub);
15656 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15657 if (type_domain && TYPE_MIN_VALUE (type_domain))
15658 min_val = TYPE_MIN_VALUE (type_domain);
15659 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15662 return NULL_TREE;
15665 /* Builds an expression for an indirection through T, simplifying some
15666 cases. */
15668 tree
15669 build_fold_indirect_ref (tree t)
15671 tree type = TREE_TYPE (TREE_TYPE (t));
15672 tree sub = fold_indirect_ref_1 (type, t);
15674 if (sub)
15675 return sub;
15676 else
15677 return build1 (INDIRECT_REF, type, t);
15680 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15682 tree
15683 fold_indirect_ref (tree t)
15685 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15687 if (sub)
15688 return sub;
15689 else
15690 return t;
15693 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15694 whose result is ignored. The type of the returned tree need not be
15695 the same as the original expression. */
15697 tree
15698 fold_ignored_result (tree t)
15700 if (!TREE_SIDE_EFFECTS (t))
15701 return integer_zero_node;
15703 for (;;)
15704 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15706 case tcc_unary:
15707 t = TREE_OPERAND (t, 0);
15708 break;
15710 case tcc_binary:
15711 case tcc_comparison:
15712 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15713 t = TREE_OPERAND (t, 0);
15714 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15715 t = TREE_OPERAND (t, 1);
15716 else
15717 return t;
15718 break;
15720 case tcc_expression:
15721 switch (TREE_CODE (t))
15723 case COMPOUND_EXPR:
15724 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15725 return t;
15726 t = TREE_OPERAND (t, 0);
15727 break;
15729 case COND_EXPR:
15730 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15731 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15732 return t;
15733 t = TREE_OPERAND (t, 0);
15734 break;
15736 default:
15737 return t;
15739 break;
15741 default:
15742 return t;
15746 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15747 This can only be applied to objects of a sizetype. */
15749 tree
15750 round_up (tree value, int divisor)
15752 tree div = NULL_TREE;
15754 gcc_assert (divisor > 0);
15755 if (divisor == 1)
15756 return value;
15758 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15759 have to do anything. Only do this when we are not given a const,
15760 because in that case, this check is more expensive than just
15761 doing it. */
15762 if (TREE_CODE (value) != INTEGER_CST)
15764 div = build_int_cst (TREE_TYPE (value), divisor);
15766 if (multiple_of_p (TREE_TYPE (value), value, div))
15767 return value;
15770 /* If divisor is a power of two, simplify this to bit manipulation. */
15771 if (divisor == (divisor & -divisor))
15773 if (TREE_CODE (value) == INTEGER_CST)
15775 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15776 unsigned HOST_WIDE_INT high;
15777 bool overflow_p;
15779 if ((low & (divisor - 1)) == 0)
15780 return value;
15782 overflow_p = TREE_OVERFLOW (value);
15783 high = TREE_INT_CST_HIGH (value);
15784 low &= ~(divisor - 1);
15785 low += divisor;
15786 if (low == 0)
15788 high++;
15789 if (high == 0)
15790 overflow_p = true;
15793 return force_fit_type_double (TREE_TYPE (value), low, high,
15794 -1, overflow_p);
15796 else
15798 tree t;
15800 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15801 value = size_binop (PLUS_EXPR, value, t);
15802 t = build_int_cst (TREE_TYPE (value), -divisor);
15803 value = size_binop (BIT_AND_EXPR, value, t);
15806 else
15808 if (!div)
15809 div = build_int_cst (TREE_TYPE (value), divisor);
15810 value = size_binop (CEIL_DIV_EXPR, value, div);
15811 value = size_binop (MULT_EXPR, value, div);
15814 return value;
15817 /* Likewise, but round down. */
15819 tree
15820 round_down (tree value, int divisor)
15822 tree div = NULL_TREE;
15824 gcc_assert (divisor > 0);
15825 if (divisor == 1)
15826 return value;
15828 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15829 have to do anything. Only do this when we are not given a const,
15830 because in that case, this check is more expensive than just
15831 doing it. */
15832 if (TREE_CODE (value) != INTEGER_CST)
15834 div = build_int_cst (TREE_TYPE (value), divisor);
15836 if (multiple_of_p (TREE_TYPE (value), value, div))
15837 return value;
15840 /* If divisor is a power of two, simplify this to bit manipulation. */
15841 if (divisor == (divisor & -divisor))
15843 tree t;
15845 t = build_int_cst (TREE_TYPE (value), -divisor);
15846 value = size_binop (BIT_AND_EXPR, value, t);
15848 else
15850 if (!div)
15851 div = build_int_cst (TREE_TYPE (value), divisor);
15852 value = size_binop (FLOOR_DIV_EXPR, value, div);
15853 value = size_binop (MULT_EXPR, value, div);
15856 return value;
15859 /* Returns the pointer to the base of the object addressed by EXP and
15860 extracts the information about the offset of the access, storing it
15861 to PBITPOS and POFFSET. */
15863 static tree
15864 split_address_to_core_and_offset (tree exp,
15865 HOST_WIDE_INT *pbitpos, tree *poffset)
15867 tree core;
15868 enum machine_mode mode;
15869 int unsignedp, volatilep;
15870 HOST_WIDE_INT bitsize;
15872 if (TREE_CODE (exp) == ADDR_EXPR)
15874 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15875 poffset, &mode, &unsignedp, &volatilep,
15876 false);
15877 core = build_fold_addr_expr (core);
15879 else
15881 core = exp;
15882 *pbitpos = 0;
15883 *poffset = NULL_TREE;
15886 return core;
15889 /* Returns true if addresses of E1 and E2 differ by a constant, false
15890 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15892 bool
15893 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15895 tree core1, core2;
15896 HOST_WIDE_INT bitpos1, bitpos2;
15897 tree toffset1, toffset2, tdiff, type;
15899 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15900 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15902 if (bitpos1 % BITS_PER_UNIT != 0
15903 || bitpos2 % BITS_PER_UNIT != 0
15904 || !operand_equal_p (core1, core2, 0))
15905 return false;
15907 if (toffset1 && toffset2)
15909 type = TREE_TYPE (toffset1);
15910 if (type != TREE_TYPE (toffset2))
15911 toffset2 = fold_convert (type, toffset2);
15913 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15914 if (!cst_and_fits_in_hwi (tdiff))
15915 return false;
15917 *diff = int_cst_value (tdiff);
15919 else if (toffset1 || toffset2)
15921 /* If only one of the offsets is non-constant, the difference cannot
15922 be a constant. */
15923 return false;
15925 else
15926 *diff = 0;
15928 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15929 return true;
15932 /* Simplify the floating point expression EXP when the sign of the
15933 result is not significant. Return NULL_TREE if no simplification
15934 is possible. */
15936 tree
15937 fold_strip_sign_ops (tree exp)
15939 tree arg0, arg1;
15941 switch (TREE_CODE (exp))
15943 case ABS_EXPR:
15944 case NEGATE_EXPR:
15945 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15946 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15948 case MULT_EXPR:
15949 case RDIV_EXPR:
15950 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15951 return NULL_TREE;
15952 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15953 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15954 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15955 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15956 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15957 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15958 break;
15960 case COMPOUND_EXPR:
15961 arg0 = TREE_OPERAND (exp, 0);
15962 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15963 if (arg1)
15964 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15965 break;
15967 case COND_EXPR:
15968 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15969 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15970 if (arg0 || arg1)
15971 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15972 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15973 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15974 break;
15976 case CALL_EXPR:
15978 const enum built_in_function fcode = builtin_mathfn_code (exp);
15979 switch (fcode)
15981 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15982 /* Strip copysign function call, return the 1st argument. */
15983 arg0 = CALL_EXPR_ARG (exp, 0);
15984 arg1 = CALL_EXPR_ARG (exp, 1);
15985 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15987 default:
15988 /* Strip sign ops from the argument of "odd" math functions. */
15989 if (negate_mathfn_p (fcode))
15991 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15992 if (arg0)
15993 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15995 break;
15998 break;
16000 default:
16001 break;
16003 return NULL_TREE;