* tree-ssa-address.c (create_mem_ref): Remove ", bsi" from
[official-gcc.git] / gcc / fold-const.c
blobcd4d6847065d892bcaba7b3f8149fb338af17347
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "ggc.h"
63 #include "hashtab.h"
64 #include "langhooks.h"
65 #include "md5.h"
67 /* Non-zero if we are folding constants inside an initializer; zero
68 otherwise. */
69 int folding_initializer = 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
75 COMPCODE_FALSE = 0,
76 COMPCODE_LT = 1,
77 COMPCODE_EQ = 2,
78 COMPCODE_LE = 3,
79 COMPCODE_GT = 4,
80 COMPCODE_LTGT = 5,
81 COMPCODE_GE = 6,
82 COMPCODE_ORD = 7,
83 COMPCODE_UNORD = 8,
84 COMPCODE_UNLT = 9,
85 COMPCODE_UNEQ = 10,
86 COMPCODE_UNLE = 11,
87 COMPCODE_UNGT = 12,
88 COMPCODE_NE = 13,
89 COMPCODE_UNGE = 14,
90 COMPCODE_TRUE = 15
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
133 static int multiple_of_p (tree, tree, tree);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
250 else
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 *lv = l1;
261 *hv = h1;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
287 int sign_extended_type;
288 bool overflow;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
335 *lv = l;
336 *hv = h;
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 if (l1 == 0)
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
359 else
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
394 carry = 0;
395 for (j = 0; j < 4; j++)
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
405 prod[i + 4] = carry;
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 if (h2 < 0)
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
443 if (count < 0)
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
464 else
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
504 unsigned HOST_WIDE_INT signmask;
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
520 else if (count >= HOST_BITS_PER_WIDE_INT)
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 else
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
536 *hv = signmask;
537 *lv = signmask;
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 else
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
589 count %= prec;
590 if (count < 0)
591 count += prec;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
636 if (hnum < 0)
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
644 if (hden < 0)
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
687 else
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
698 den_hi_sig = i;
699 break;
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
727 num_hi_sig = 4;
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
786 decode (quo, lquo, hquo);
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 switch (code)
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
813 else
814 return overflow;
815 break;
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
826 break;
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
861 else
862 return overflow;
864 break;
866 default:
867 gcc_unreachable ();
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
901 return build_int_cst_wide (type, quol, quoh);
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
907 static bool
908 negate_mathfn_p (enum built_in_function code)
910 switch (code)
912 CASE_FLT_FN (BUILT_IN_ASIN):
913 CASE_FLT_FN (BUILT_IN_ASINH):
914 CASE_FLT_FN (BUILT_IN_ATAN):
915 CASE_FLT_FN (BUILT_IN_ATANH):
916 CASE_FLT_FN (BUILT_IN_CASIN):
917 CASE_FLT_FN (BUILT_IN_CASINH):
918 CASE_FLT_FN (BUILT_IN_CATAN):
919 CASE_FLT_FN (BUILT_IN_CATANH):
920 CASE_FLT_FN (BUILT_IN_CBRT):
921 CASE_FLT_FN (BUILT_IN_CPROJ):
922 CASE_FLT_FN (BUILT_IN_CSIN):
923 CASE_FLT_FN (BUILT_IN_CSINH):
924 CASE_FLT_FN (BUILT_IN_CTAN):
925 CASE_FLT_FN (BUILT_IN_CTANH):
926 CASE_FLT_FN (BUILT_IN_ERF):
927 CASE_FLT_FN (BUILT_IN_LLROUND):
928 CASE_FLT_FN (BUILT_IN_LROUND):
929 CASE_FLT_FN (BUILT_IN_ROUND):
930 CASE_FLT_FN (BUILT_IN_SIN):
931 CASE_FLT_FN (BUILT_IN_SINH):
932 CASE_FLT_FN (BUILT_IN_TAN):
933 CASE_FLT_FN (BUILT_IN_TANH):
934 CASE_FLT_FN (BUILT_IN_TRUNC):
935 return true;
937 CASE_FLT_FN (BUILT_IN_LLRINT):
938 CASE_FLT_FN (BUILT_IN_LRINT):
939 CASE_FLT_FN (BUILT_IN_NEARBYINT):
940 CASE_FLT_FN (BUILT_IN_RINT):
941 return !flag_rounding_math;
943 default:
944 break;
946 return false;
949 /* Check whether we may negate an integer constant T without causing
950 overflow. */
952 bool
953 may_negate_without_overflow_p (tree t)
955 unsigned HOST_WIDE_INT val;
956 unsigned int prec;
957 tree type;
959 gcc_assert (TREE_CODE (t) == INTEGER_CST);
961 type = TREE_TYPE (t);
962 if (TYPE_UNSIGNED (type))
963 return false;
965 prec = TYPE_PRECISION (type);
966 if (prec > HOST_BITS_PER_WIDE_INT)
968 if (TREE_INT_CST_LOW (t) != 0)
969 return true;
970 prec -= HOST_BITS_PER_WIDE_INT;
971 val = TREE_INT_CST_HIGH (t);
973 else
974 val = TREE_INT_CST_LOW (t);
975 if (prec < HOST_BITS_PER_WIDE_INT)
976 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
977 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
980 /* Determine whether an expression T can be cheaply negated using
981 the function negate_expr without introducing undefined overflow. */
983 static bool
984 negate_expr_p (tree t)
986 tree type;
988 if (t == 0)
989 return false;
991 type = TREE_TYPE (t);
993 STRIP_SIGN_NOPS (t);
994 switch (TREE_CODE (t))
996 case INTEGER_CST:
997 if (TYPE_UNSIGNED (type)
998 || (flag_wrapv && ! flag_trapv))
999 return true;
1001 /* Check that -CST will not overflow type. */
1002 return may_negate_without_overflow_p (t);
1003 case BIT_NOT_EXPR:
1004 return INTEGRAL_TYPE_P (type)
1005 && (TYPE_UNSIGNED (type)
1006 || (flag_wrapv && !flag_trapv));
1008 case REAL_CST:
1009 case NEGATE_EXPR:
1010 return true;
1012 case COMPLEX_CST:
1013 return negate_expr_p (TREE_REALPART (t))
1014 && negate_expr_p (TREE_IMAGPART (t));
1016 case PLUS_EXPR:
1017 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1018 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1019 return false;
1020 /* -(A + B) -> (-B) - A. */
1021 if (negate_expr_p (TREE_OPERAND (t, 1))
1022 && reorder_operands_p (TREE_OPERAND (t, 0),
1023 TREE_OPERAND (t, 1)))
1024 return true;
1025 /* -(A + B) -> (-A) - B. */
1026 return negate_expr_p (TREE_OPERAND (t, 0));
1028 case MINUS_EXPR:
1029 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1030 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1031 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1032 && reorder_operands_p (TREE_OPERAND (t, 0),
1033 TREE_OPERAND (t, 1));
1035 case MULT_EXPR:
1036 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1037 break;
1039 /* Fall through. */
1041 case RDIV_EXPR:
1042 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1043 return negate_expr_p (TREE_OPERAND (t, 1))
1044 || negate_expr_p (TREE_OPERAND (t, 0));
1045 break;
1047 case TRUNC_DIV_EXPR:
1048 case ROUND_DIV_EXPR:
1049 case FLOOR_DIV_EXPR:
1050 case CEIL_DIV_EXPR:
1051 case EXACT_DIV_EXPR:
1052 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1053 break;
1054 return negate_expr_p (TREE_OPERAND (t, 1))
1055 || negate_expr_p (TREE_OPERAND (t, 0));
1057 case NOP_EXPR:
1058 /* Negate -((double)float) as (double)(-float). */
1059 if (TREE_CODE (type) == REAL_TYPE)
1061 tree tem = strip_float_extensions (t);
1062 if (tem != t)
1063 return negate_expr_p (tem);
1065 break;
1067 case CALL_EXPR:
1068 /* Negate -f(x) as f(-x). */
1069 if (negate_mathfn_p (builtin_mathfn_code (t)))
1070 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1071 break;
1073 case RSHIFT_EXPR:
1074 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1075 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1077 tree op1 = TREE_OPERAND (t, 1);
1078 if (TREE_INT_CST_HIGH (op1) == 0
1079 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1080 == TREE_INT_CST_LOW (op1))
1081 return true;
1083 break;
1085 default:
1086 break;
1088 return false;
1091 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1092 simplification is possible.
1093 If negate_expr_p would return true for T, NULL_TREE will never be
1094 returned. */
1096 static tree
1097 fold_negate_expr (tree t)
1099 tree type = TREE_TYPE (t);
1100 tree tem;
1102 switch (TREE_CODE (t))
1104 /* Convert - (~A) to A + 1. */
1105 case BIT_NOT_EXPR:
1106 if (INTEGRAL_TYPE_P (type))
1107 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1108 build_int_cst (type, 1));
1109 break;
1111 case INTEGER_CST:
1112 tem = fold_negate_const (t, type);
1113 if (!TREE_OVERFLOW (tem)
1114 || TYPE_UNSIGNED (type)
1115 || !flag_trapv)
1116 return tem;
1117 break;
1119 case REAL_CST:
1120 tem = fold_negate_const (t, type);
1121 /* Two's complement FP formats, such as c4x, may overflow. */
1122 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1123 return tem;
1124 break;
1126 case COMPLEX_CST:
1128 tree rpart = negate_expr (TREE_REALPART (t));
1129 tree ipart = negate_expr (TREE_IMAGPART (t));
1131 if ((TREE_CODE (rpart) == REAL_CST
1132 && TREE_CODE (ipart) == REAL_CST)
1133 || (TREE_CODE (rpart) == INTEGER_CST
1134 && TREE_CODE (ipart) == INTEGER_CST))
1135 return build_complex (type, rpart, ipart);
1137 break;
1139 case NEGATE_EXPR:
1140 return TREE_OPERAND (t, 0);
1142 case PLUS_EXPR:
1143 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1144 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1146 /* -(A + B) -> (-B) - A. */
1147 if (negate_expr_p (TREE_OPERAND (t, 1))
1148 && reorder_operands_p (TREE_OPERAND (t, 0),
1149 TREE_OPERAND (t, 1)))
1151 tem = negate_expr (TREE_OPERAND (t, 1));
1152 return fold_build2 (MINUS_EXPR, type,
1153 tem, TREE_OPERAND (t, 0));
1156 /* -(A + B) -> (-A) - B. */
1157 if (negate_expr_p (TREE_OPERAND (t, 0)))
1159 tem = negate_expr (TREE_OPERAND (t, 0));
1160 return fold_build2 (MINUS_EXPR, type,
1161 tem, TREE_OPERAND (t, 1));
1164 break;
1166 case MINUS_EXPR:
1167 /* - (A - B) -> B - A */
1168 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1169 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1170 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1171 return fold_build2 (MINUS_EXPR, type,
1172 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1173 break;
1175 case MULT_EXPR:
1176 if (TYPE_UNSIGNED (type))
1177 break;
1179 /* Fall through. */
1181 case RDIV_EXPR:
1182 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1184 tem = TREE_OPERAND (t, 1);
1185 if (negate_expr_p (tem))
1186 return fold_build2 (TREE_CODE (t), type,
1187 TREE_OPERAND (t, 0), negate_expr (tem));
1188 tem = TREE_OPERAND (t, 0);
1189 if (negate_expr_p (tem))
1190 return fold_build2 (TREE_CODE (t), type,
1191 negate_expr (tem), TREE_OPERAND (t, 1));
1193 break;
1195 case TRUNC_DIV_EXPR:
1196 case ROUND_DIV_EXPR:
1197 case FLOOR_DIV_EXPR:
1198 case CEIL_DIV_EXPR:
1199 case EXACT_DIV_EXPR:
1200 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1202 tem = TREE_OPERAND (t, 1);
1203 if (negate_expr_p (tem))
1204 return fold_build2 (TREE_CODE (t), type,
1205 TREE_OPERAND (t, 0), negate_expr (tem));
1206 tem = TREE_OPERAND (t, 0);
1207 if (negate_expr_p (tem))
1208 return fold_build2 (TREE_CODE (t), type,
1209 negate_expr (tem), TREE_OPERAND (t, 1));
1211 break;
1213 case NOP_EXPR:
1214 /* Convert -((double)float) into (double)(-float). */
1215 if (TREE_CODE (type) == REAL_TYPE)
1217 tem = strip_float_extensions (t);
1218 if (tem != t && negate_expr_p (tem))
1219 return negate_expr (tem);
1221 break;
1223 case CALL_EXPR:
1224 /* Negate -f(x) as f(-x). */
1225 if (negate_mathfn_p (builtin_mathfn_code (t))
1226 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1228 tree fndecl, arg, arglist;
1230 fndecl = get_callee_fndecl (t);
1231 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1232 arglist = build_tree_list (NULL_TREE, arg);
1233 return build_function_call_expr (fndecl, arglist);
1235 break;
1237 case RSHIFT_EXPR:
1238 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1239 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1241 tree op1 = TREE_OPERAND (t, 1);
1242 if (TREE_INT_CST_HIGH (op1) == 0
1243 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1244 == TREE_INT_CST_LOW (op1))
1246 tree ntype = TYPE_UNSIGNED (type)
1247 ? lang_hooks.types.signed_type (type)
1248 : lang_hooks.types.unsigned_type (type);
1249 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1250 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1251 return fold_convert (type, temp);
1254 break;
1256 default:
1257 break;
1260 return NULL_TREE;
1263 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1264 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1265 return NULL_TREE. */
1267 static tree
1268 negate_expr (tree t)
1270 tree type, tem;
1272 if (t == NULL_TREE)
1273 return NULL_TREE;
1275 type = TREE_TYPE (t);
1276 STRIP_SIGN_NOPS (t);
1278 tem = fold_negate_expr (t);
1279 if (!tem)
1280 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1281 return fold_convert (type, tem);
1284 /* Split a tree IN into a constant, literal and variable parts that could be
1285 combined with CODE to make IN. "constant" means an expression with
1286 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1287 commutative arithmetic operation. Store the constant part into *CONP,
1288 the literal in *LITP and return the variable part. If a part isn't
1289 present, set it to null. If the tree does not decompose in this way,
1290 return the entire tree as the variable part and the other parts as null.
1292 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1293 case, we negate an operand that was subtracted. Except if it is a
1294 literal for which we use *MINUS_LITP instead.
1296 If NEGATE_P is true, we are negating all of IN, again except a literal
1297 for which we use *MINUS_LITP instead.
1299 If IN is itself a literal or constant, return it as appropriate.
1301 Note that we do not guarantee that any of the three values will be the
1302 same type as IN, but they will have the same signedness and mode. */
1304 static tree
1305 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1306 tree *minus_litp, int negate_p)
1308 tree var = 0;
1310 *conp = 0;
1311 *litp = 0;
1312 *minus_litp = 0;
1314 /* Strip any conversions that don't change the machine mode or signedness. */
1315 STRIP_SIGN_NOPS (in);
1317 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1318 *litp = in;
1319 else if (TREE_CODE (in) == code
1320 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1321 /* We can associate addition and subtraction together (even
1322 though the C standard doesn't say so) for integers because
1323 the value is not affected. For reals, the value might be
1324 affected, so we can't. */
1325 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1326 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1328 tree op0 = TREE_OPERAND (in, 0);
1329 tree op1 = TREE_OPERAND (in, 1);
1330 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1331 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1333 /* First see if either of the operands is a literal, then a constant. */
1334 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1335 *litp = op0, op0 = 0;
1336 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1337 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1339 if (op0 != 0 && TREE_CONSTANT (op0))
1340 *conp = op0, op0 = 0;
1341 else if (op1 != 0 && TREE_CONSTANT (op1))
1342 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1344 /* If we haven't dealt with either operand, this is not a case we can
1345 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1346 if (op0 != 0 && op1 != 0)
1347 var = in;
1348 else if (op0 != 0)
1349 var = op0;
1350 else
1351 var = op1, neg_var_p = neg1_p;
1353 /* Now do any needed negations. */
1354 if (neg_litp_p)
1355 *minus_litp = *litp, *litp = 0;
1356 if (neg_conp_p)
1357 *conp = negate_expr (*conp);
1358 if (neg_var_p)
1359 var = negate_expr (var);
1361 else if (TREE_CONSTANT (in))
1362 *conp = in;
1363 else
1364 var = in;
1366 if (negate_p)
1368 if (*litp)
1369 *minus_litp = *litp, *litp = 0;
1370 else if (*minus_litp)
1371 *litp = *minus_litp, *minus_litp = 0;
1372 *conp = negate_expr (*conp);
1373 var = negate_expr (var);
1376 return var;
1379 /* Re-associate trees split by the above function. T1 and T2 are either
1380 expressions to associate or null. Return the new expression, if any. If
1381 we build an operation, do it in TYPE and with CODE. */
1383 static tree
1384 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1386 if (t1 == 0)
1387 return t2;
1388 else if (t2 == 0)
1389 return t1;
1391 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1392 try to fold this since we will have infinite recursion. But do
1393 deal with any NEGATE_EXPRs. */
1394 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1395 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1397 if (code == PLUS_EXPR)
1399 if (TREE_CODE (t1) == NEGATE_EXPR)
1400 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1401 fold_convert (type, TREE_OPERAND (t1, 0)));
1402 else if (TREE_CODE (t2) == NEGATE_EXPR)
1403 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1404 fold_convert (type, TREE_OPERAND (t2, 0)));
1405 else if (integer_zerop (t2))
1406 return fold_convert (type, t1);
1408 else if (code == MINUS_EXPR)
1410 if (integer_zerop (t2))
1411 return fold_convert (type, t1);
1414 return build2 (code, type, fold_convert (type, t1),
1415 fold_convert (type, t2));
1418 return fold_build2 (code, type, fold_convert (type, t1),
1419 fold_convert (type, t2));
1422 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1423 for use in int_const_binop, size_binop and size_diffop. */
1425 static bool
1426 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1428 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1429 return false;
1430 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1431 return false;
1433 switch (code)
1435 case LSHIFT_EXPR:
1436 case RSHIFT_EXPR:
1437 case LROTATE_EXPR:
1438 case RROTATE_EXPR:
1439 return true;
1441 default:
1442 break;
1445 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1446 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1447 && TYPE_MODE (type1) == TYPE_MODE (type2);
1451 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1452 to produce a new constant. Return NULL_TREE if we don't know how
1453 to evaluate CODE at compile-time.
1455 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1457 tree
1458 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1460 unsigned HOST_WIDE_INT int1l, int2l;
1461 HOST_WIDE_INT int1h, int2h;
1462 unsigned HOST_WIDE_INT low;
1463 HOST_WIDE_INT hi;
1464 unsigned HOST_WIDE_INT garbagel;
1465 HOST_WIDE_INT garbageh;
1466 tree t;
1467 tree type = TREE_TYPE (arg1);
1468 int uns = TYPE_UNSIGNED (type);
1469 int is_sizetype
1470 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1471 int overflow = 0;
1473 int1l = TREE_INT_CST_LOW (arg1);
1474 int1h = TREE_INT_CST_HIGH (arg1);
1475 int2l = TREE_INT_CST_LOW (arg2);
1476 int2h = TREE_INT_CST_HIGH (arg2);
1478 switch (code)
1480 case BIT_IOR_EXPR:
1481 low = int1l | int2l, hi = int1h | int2h;
1482 break;
1484 case BIT_XOR_EXPR:
1485 low = int1l ^ int2l, hi = int1h ^ int2h;
1486 break;
1488 case BIT_AND_EXPR:
1489 low = int1l & int2l, hi = int1h & int2h;
1490 break;
1492 case RSHIFT_EXPR:
1493 int2l = -int2l;
1494 case LSHIFT_EXPR:
1495 /* It's unclear from the C standard whether shifts can overflow.
1496 The following code ignores overflow; perhaps a C standard
1497 interpretation ruling is needed. */
1498 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1499 &low, &hi, !uns);
1500 break;
1502 case RROTATE_EXPR:
1503 int2l = - int2l;
1504 case LROTATE_EXPR:
1505 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1506 &low, &hi);
1507 break;
1509 case PLUS_EXPR:
1510 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1511 break;
1513 case MINUS_EXPR:
1514 neg_double (int2l, int2h, &low, &hi);
1515 add_double (int1l, int1h, low, hi, &low, &hi);
1516 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1517 break;
1519 case MULT_EXPR:
1520 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1521 break;
1523 case TRUNC_DIV_EXPR:
1524 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1525 case EXACT_DIV_EXPR:
1526 /* This is a shortcut for a common special case. */
1527 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1528 && !TREE_OVERFLOW (arg1)
1529 && !TREE_OVERFLOW (arg2)
1530 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1532 if (code == CEIL_DIV_EXPR)
1533 int1l += int2l - 1;
1535 low = int1l / int2l, hi = 0;
1536 break;
1539 /* ... fall through ... */
1541 case ROUND_DIV_EXPR:
1542 if (int2h == 0 && int2l == 0)
1543 return NULL_TREE;
1544 if (int2h == 0 && int2l == 1)
1546 low = int1l, hi = int1h;
1547 break;
1549 if (int1l == int2l && int1h == int2h
1550 && ! (int1l == 0 && int1h == 0))
1552 low = 1, hi = 0;
1553 break;
1555 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1556 &low, &hi, &garbagel, &garbageh);
1557 break;
1559 case TRUNC_MOD_EXPR:
1560 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1561 /* This is a shortcut for a common special case. */
1562 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1563 && !TREE_OVERFLOW (arg1)
1564 && !TREE_OVERFLOW (arg2)
1565 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1567 if (code == CEIL_MOD_EXPR)
1568 int1l += int2l - 1;
1569 low = int1l % int2l, hi = 0;
1570 break;
1573 /* ... fall through ... */
1575 case ROUND_MOD_EXPR:
1576 if (int2h == 0 && int2l == 0)
1577 return NULL_TREE;
1578 overflow = div_and_round_double (code, uns,
1579 int1l, int1h, int2l, int2h,
1580 &garbagel, &garbageh, &low, &hi);
1581 break;
1583 case MIN_EXPR:
1584 case MAX_EXPR:
1585 if (uns)
1586 low = (((unsigned HOST_WIDE_INT) int1h
1587 < (unsigned HOST_WIDE_INT) int2h)
1588 || (((unsigned HOST_WIDE_INT) int1h
1589 == (unsigned HOST_WIDE_INT) int2h)
1590 && int1l < int2l));
1591 else
1592 low = (int1h < int2h
1593 || (int1h == int2h && int1l < int2l));
1595 if (low == (code == MIN_EXPR))
1596 low = int1l, hi = int1h;
1597 else
1598 low = int2l, hi = int2h;
1599 break;
1601 default:
1602 return NULL_TREE;
1605 if (notrunc)
1607 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1609 /* Propagate overflow flags ourselves. */
1610 if (((!uns || is_sizetype) && overflow)
1611 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1613 t = copy_node (t);
1614 TREE_OVERFLOW (t) = 1;
1617 else
1618 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1619 ((!uns || is_sizetype) && overflow)
1620 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1622 return t;
1625 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1626 constant. We assume ARG1 and ARG2 have the same data type, or at least
1627 are the same kind of constant and the same machine mode. Return zero if
1628 combining the constants is not allowed in the current operating mode.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 static tree
1633 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1635 /* Sanity check for the recursive cases. */
1636 if (!arg1 || !arg2)
1637 return NULL_TREE;
1639 STRIP_NOPS (arg1);
1640 STRIP_NOPS (arg2);
1642 if (TREE_CODE (arg1) == INTEGER_CST)
1643 return int_const_binop (code, arg1, arg2, notrunc);
1645 if (TREE_CODE (arg1) == REAL_CST)
1647 enum machine_mode mode;
1648 REAL_VALUE_TYPE d1;
1649 REAL_VALUE_TYPE d2;
1650 REAL_VALUE_TYPE value;
1651 REAL_VALUE_TYPE result;
1652 bool inexact;
1653 tree t, type;
1655 /* The following codes are handled by real_arithmetic. */
1656 switch (code)
1658 case PLUS_EXPR:
1659 case MINUS_EXPR:
1660 case MULT_EXPR:
1661 case RDIV_EXPR:
1662 case MIN_EXPR:
1663 case MAX_EXPR:
1664 break;
1666 default:
1667 return NULL_TREE;
1670 d1 = TREE_REAL_CST (arg1);
1671 d2 = TREE_REAL_CST (arg2);
1673 type = TREE_TYPE (arg1);
1674 mode = TYPE_MODE (type);
1676 /* Don't perform operation if we honor signaling NaNs and
1677 either operand is a NaN. */
1678 if (HONOR_SNANS (mode)
1679 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1680 return NULL_TREE;
1682 /* Don't perform operation if it would raise a division
1683 by zero exception. */
1684 if (code == RDIV_EXPR
1685 && REAL_VALUES_EQUAL (d2, dconst0)
1686 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1687 return NULL_TREE;
1689 /* If either operand is a NaN, just return it. Otherwise, set up
1690 for floating-point trap; we return an overflow. */
1691 if (REAL_VALUE_ISNAN (d1))
1692 return arg1;
1693 else if (REAL_VALUE_ISNAN (d2))
1694 return arg2;
1696 inexact = real_arithmetic (&value, code, &d1, &d2);
1697 real_convert (&result, mode, &value);
1699 /* Don't constant fold this floating point operation if
1700 the result has overflowed and flag_trapping_math. */
1701 if (flag_trapping_math
1702 && MODE_HAS_INFINITIES (mode)
1703 && REAL_VALUE_ISINF (result)
1704 && !REAL_VALUE_ISINF (d1)
1705 && !REAL_VALUE_ISINF (d2))
1706 return NULL_TREE;
1708 /* Don't constant fold this floating point operation if the
1709 result may dependent upon the run-time rounding mode and
1710 flag_rounding_math is set, or if GCC's software emulation
1711 is unable to accurately represent the result. */
1712 if ((flag_rounding_math
1713 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1714 && !flag_unsafe_math_optimizations))
1715 && (inexact || !real_identical (&result, &value)))
1716 return NULL_TREE;
1718 t = build_real (type, result);
1720 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1721 return t;
1724 if (TREE_CODE (arg1) == COMPLEX_CST)
1726 tree type = TREE_TYPE (arg1);
1727 tree r1 = TREE_REALPART (arg1);
1728 tree i1 = TREE_IMAGPART (arg1);
1729 tree r2 = TREE_REALPART (arg2);
1730 tree i2 = TREE_IMAGPART (arg2);
1731 tree real, imag;
1733 switch (code)
1735 case PLUS_EXPR:
1736 case MINUS_EXPR:
1737 real = const_binop (code, r1, r2, notrunc);
1738 imag = const_binop (code, i1, i2, notrunc);
1739 break;
1741 case MULT_EXPR:
1742 real = const_binop (MINUS_EXPR,
1743 const_binop (MULT_EXPR, r1, r2, notrunc),
1744 const_binop (MULT_EXPR, i1, i2, notrunc),
1745 notrunc);
1746 imag = const_binop (PLUS_EXPR,
1747 const_binop (MULT_EXPR, r1, i2, notrunc),
1748 const_binop (MULT_EXPR, i1, r2, notrunc),
1749 notrunc);
1750 break;
1752 case RDIV_EXPR:
1754 tree magsquared
1755 = const_binop (PLUS_EXPR,
1756 const_binop (MULT_EXPR, r2, r2, notrunc),
1757 const_binop (MULT_EXPR, i2, i2, notrunc),
1758 notrunc);
1759 tree t1
1760 = const_binop (PLUS_EXPR,
1761 const_binop (MULT_EXPR, r1, r2, notrunc),
1762 const_binop (MULT_EXPR, i1, i2, notrunc),
1763 notrunc);
1764 tree t2
1765 = const_binop (MINUS_EXPR,
1766 const_binop (MULT_EXPR, i1, r2, notrunc),
1767 const_binop (MULT_EXPR, r1, i2, notrunc),
1768 notrunc);
1770 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1771 code = TRUNC_DIV_EXPR;
1773 real = const_binop (code, t1, magsquared, notrunc);
1774 imag = const_binop (code, t2, magsquared, notrunc);
1776 break;
1778 default:
1779 return NULL_TREE;
1782 if (real && imag)
1783 return build_complex (type, real, imag);
1786 return NULL_TREE;
1789 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1790 indicates which particular sizetype to create. */
1792 tree
1793 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1795 return build_int_cst (sizetype_tab[(int) kind], number);
1798 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1799 is a tree code. The type of the result is taken from the operands.
1800 Both must be equivalent integer types, ala int_binop_types_match_p.
1801 If the operands are constant, so is the result. */
1803 tree
1804 size_binop (enum tree_code code, tree arg0, tree arg1)
1806 tree type = TREE_TYPE (arg0);
1808 if (arg0 == error_mark_node || arg1 == error_mark_node)
1809 return error_mark_node;
1811 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1812 TREE_TYPE (arg1)));
1814 /* Handle the special case of two integer constants faster. */
1815 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1817 /* And some specific cases even faster than that. */
1818 if (code == PLUS_EXPR && integer_zerop (arg0))
1819 return arg1;
1820 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1821 && integer_zerop (arg1))
1822 return arg0;
1823 else if (code == MULT_EXPR && integer_onep (arg0))
1824 return arg1;
1826 /* Handle general case of two integer constants. */
1827 return int_const_binop (code, arg0, arg1, 0);
1830 return fold_build2 (code, type, arg0, arg1);
1833 /* Given two values, either both of sizetype or both of bitsizetype,
1834 compute the difference between the two values. Return the value
1835 in signed type corresponding to the type of the operands. */
1837 tree
1838 size_diffop (tree arg0, tree arg1)
1840 tree type = TREE_TYPE (arg0);
1841 tree ctype;
1843 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1844 TREE_TYPE (arg1)));
1846 /* If the type is already signed, just do the simple thing. */
1847 if (!TYPE_UNSIGNED (type))
1848 return size_binop (MINUS_EXPR, arg0, arg1);
1850 if (type == sizetype)
1851 ctype = ssizetype;
1852 else if (type == bitsizetype)
1853 ctype = sbitsizetype;
1854 else
1855 ctype = lang_hooks.types.signed_type (type);
1857 /* If either operand is not a constant, do the conversions to the signed
1858 type and subtract. The hardware will do the right thing with any
1859 overflow in the subtraction. */
1860 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1861 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1862 fold_convert (ctype, arg1));
1864 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1865 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1866 overflow) and negate (which can't either). Special-case a result
1867 of zero while we're here. */
1868 if (tree_int_cst_equal (arg0, arg1))
1869 return build_int_cst (ctype, 0);
1870 else if (tree_int_cst_lt (arg1, arg0))
1871 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1872 else
1873 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1874 fold_convert (ctype, size_binop (MINUS_EXPR,
1875 arg1, arg0)));
1878 /* A subroutine of fold_convert_const handling conversions of an
1879 INTEGER_CST to another integer type. */
1881 static tree
1882 fold_convert_const_int_from_int (tree type, tree arg1)
1884 tree t;
1886 /* Given an integer constant, make new constant with new type,
1887 appropriately sign-extended or truncated. */
1888 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1889 TREE_INT_CST_HIGH (arg1),
1890 /* Don't set the overflow when
1891 converting a pointer */
1892 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1893 (TREE_INT_CST_HIGH (arg1) < 0
1894 && (TYPE_UNSIGNED (type)
1895 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1896 | TREE_OVERFLOW (arg1));
1898 return t;
1901 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1902 to an integer type. */
1904 static tree
1905 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1907 int overflow = 0;
1908 tree t;
1910 /* The following code implements the floating point to integer
1911 conversion rules required by the Java Language Specification,
1912 that IEEE NaNs are mapped to zero and values that overflow
1913 the target precision saturate, i.e. values greater than
1914 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1915 are mapped to INT_MIN. These semantics are allowed by the
1916 C and C++ standards that simply state that the behavior of
1917 FP-to-integer conversion is unspecified upon overflow. */
1919 HOST_WIDE_INT high, low;
1920 REAL_VALUE_TYPE r;
1921 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1923 switch (code)
1925 case FIX_TRUNC_EXPR:
1926 real_trunc (&r, VOIDmode, &x);
1927 break;
1929 default:
1930 gcc_unreachable ();
1933 /* If R is NaN, return zero and show we have an overflow. */
1934 if (REAL_VALUE_ISNAN (r))
1936 overflow = 1;
1937 high = 0;
1938 low = 0;
1941 /* See if R is less than the lower bound or greater than the
1942 upper bound. */
1944 if (! overflow)
1946 tree lt = TYPE_MIN_VALUE (type);
1947 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1948 if (REAL_VALUES_LESS (r, l))
1950 overflow = 1;
1951 high = TREE_INT_CST_HIGH (lt);
1952 low = TREE_INT_CST_LOW (lt);
1956 if (! overflow)
1958 tree ut = TYPE_MAX_VALUE (type);
1959 if (ut)
1961 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1962 if (REAL_VALUES_LESS (u, r))
1964 overflow = 1;
1965 high = TREE_INT_CST_HIGH (ut);
1966 low = TREE_INT_CST_LOW (ut);
1971 if (! overflow)
1972 REAL_VALUE_TO_INT (&low, &high, r);
1974 t = force_fit_type_double (type, low, high, -1,
1975 overflow | TREE_OVERFLOW (arg1));
1976 return t;
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1982 static tree
1983 fold_convert_const_real_from_real (tree type, tree arg1)
1985 REAL_VALUE_TYPE value;
1986 tree t;
1988 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1989 t = build_real (type, value);
1991 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1992 return t;
1995 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1996 type TYPE. If no simplification can be done return NULL_TREE. */
1998 static tree
1999 fold_convert_const (enum tree_code code, tree type, tree arg1)
2001 if (TREE_TYPE (arg1) == type)
2002 return arg1;
2004 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2006 if (TREE_CODE (arg1) == INTEGER_CST)
2007 return fold_convert_const_int_from_int (type, arg1);
2008 else if (TREE_CODE (arg1) == REAL_CST)
2009 return fold_convert_const_int_from_real (code, type, arg1);
2011 else if (TREE_CODE (type) == REAL_TYPE)
2013 if (TREE_CODE (arg1) == INTEGER_CST)
2014 return build_real_from_int_cst (type, arg1);
2015 if (TREE_CODE (arg1) == REAL_CST)
2016 return fold_convert_const_real_from_real (type, arg1);
2018 return NULL_TREE;
2021 /* Construct a vector of zero elements of vector type TYPE. */
2023 static tree
2024 build_zero_vector (tree type)
2026 tree elem, list;
2027 int i, units;
2029 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2030 units = TYPE_VECTOR_SUBPARTS (type);
2032 list = NULL_TREE;
2033 for (i = 0; i < units; i++)
2034 list = tree_cons (NULL_TREE, elem, list);
2035 return build_vector (type, list);
2038 /* Convert expression ARG to type TYPE. Used by the middle-end for
2039 simple conversions in preference to calling the front-end's convert. */
2041 tree
2042 fold_convert (tree type, tree arg)
2044 tree orig = TREE_TYPE (arg);
2045 tree tem;
2047 if (type == orig)
2048 return arg;
2050 if (TREE_CODE (arg) == ERROR_MARK
2051 || TREE_CODE (type) == ERROR_MARK
2052 || TREE_CODE (orig) == ERROR_MARK)
2053 return error_mark_node;
2055 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2056 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2057 TYPE_MAIN_VARIANT (orig)))
2058 return fold_build1 (NOP_EXPR, type, arg);
2060 switch (TREE_CODE (type))
2062 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2063 case POINTER_TYPE: case REFERENCE_TYPE:
2064 case OFFSET_TYPE:
2065 if (TREE_CODE (arg) == INTEGER_CST)
2067 tem = fold_convert_const (NOP_EXPR, type, arg);
2068 if (tem != NULL_TREE)
2069 return tem;
2071 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2072 || TREE_CODE (orig) == OFFSET_TYPE)
2073 return fold_build1 (NOP_EXPR, type, arg);
2074 if (TREE_CODE (orig) == COMPLEX_TYPE)
2076 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2077 return fold_convert (type, tem);
2079 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2080 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2081 return fold_build1 (NOP_EXPR, type, arg);
2083 case REAL_TYPE:
2084 if (TREE_CODE (arg) == INTEGER_CST)
2086 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2087 if (tem != NULL_TREE)
2088 return tem;
2090 else if (TREE_CODE (arg) == REAL_CST)
2092 tem = fold_convert_const (NOP_EXPR, type, arg);
2093 if (tem != NULL_TREE)
2094 return tem;
2097 switch (TREE_CODE (orig))
2099 case INTEGER_TYPE:
2100 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2101 case POINTER_TYPE: case REFERENCE_TYPE:
2102 return fold_build1 (FLOAT_EXPR, type, arg);
2104 case REAL_TYPE:
2105 return fold_build1 (NOP_EXPR, type, arg);
2107 case COMPLEX_TYPE:
2108 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2109 return fold_convert (type, tem);
2111 default:
2112 gcc_unreachable ();
2115 case COMPLEX_TYPE:
2116 switch (TREE_CODE (orig))
2118 case INTEGER_TYPE:
2119 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2120 case POINTER_TYPE: case REFERENCE_TYPE:
2121 case REAL_TYPE:
2122 return build2 (COMPLEX_EXPR, type,
2123 fold_convert (TREE_TYPE (type), arg),
2124 fold_convert (TREE_TYPE (type), integer_zero_node));
2125 case COMPLEX_TYPE:
2127 tree rpart, ipart;
2129 if (TREE_CODE (arg) == COMPLEX_EXPR)
2131 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2132 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2133 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2136 arg = save_expr (arg);
2137 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2138 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2139 rpart = fold_convert (TREE_TYPE (type), rpart);
2140 ipart = fold_convert (TREE_TYPE (type), ipart);
2141 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2144 default:
2145 gcc_unreachable ();
2148 case VECTOR_TYPE:
2149 if (integer_zerop (arg))
2150 return build_zero_vector (type);
2151 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2152 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2153 || TREE_CODE (orig) == VECTOR_TYPE);
2154 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2156 case VOID_TYPE:
2157 tem = fold_ignored_result (arg);
2158 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2159 return tem;
2160 return fold_build1 (NOP_EXPR, type, tem);
2162 default:
2163 gcc_unreachable ();
2167 /* Return false if expr can be assumed not to be an lvalue, true
2168 otherwise. */
2170 static bool
2171 maybe_lvalue_p (tree x)
2173 /* We only need to wrap lvalue tree codes. */
2174 switch (TREE_CODE (x))
2176 case VAR_DECL:
2177 case PARM_DECL:
2178 case RESULT_DECL:
2179 case LABEL_DECL:
2180 case FUNCTION_DECL:
2181 case SSA_NAME:
2183 case COMPONENT_REF:
2184 case INDIRECT_REF:
2185 case ALIGN_INDIRECT_REF:
2186 case MISALIGNED_INDIRECT_REF:
2187 case ARRAY_REF:
2188 case ARRAY_RANGE_REF:
2189 case BIT_FIELD_REF:
2190 case OBJ_TYPE_REF:
2192 case REALPART_EXPR:
2193 case IMAGPART_EXPR:
2194 case PREINCREMENT_EXPR:
2195 case PREDECREMENT_EXPR:
2196 case SAVE_EXPR:
2197 case TRY_CATCH_EXPR:
2198 case WITH_CLEANUP_EXPR:
2199 case COMPOUND_EXPR:
2200 case MODIFY_EXPR:
2201 case GIMPLE_MODIFY_STMT:
2202 case TARGET_EXPR:
2203 case COND_EXPR:
2204 case BIND_EXPR:
2205 case MIN_EXPR:
2206 case MAX_EXPR:
2207 break;
2209 default:
2210 /* Assume the worst for front-end tree codes. */
2211 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2212 break;
2213 return false;
2216 return true;
2219 /* Return an expr equal to X but certainly not valid as an lvalue. */
2221 tree
2222 non_lvalue (tree x)
2224 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2225 us. */
2226 if (in_gimple_form)
2227 return x;
2229 if (! maybe_lvalue_p (x))
2230 return x;
2231 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2234 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2235 Zero means allow extended lvalues. */
2237 int pedantic_lvalues;
2239 /* When pedantic, return an expr equal to X but certainly not valid as a
2240 pedantic lvalue. Otherwise, return X. */
2242 static tree
2243 pedantic_non_lvalue (tree x)
2245 if (pedantic_lvalues)
2246 return non_lvalue (x);
2247 else
2248 return x;
2251 /* Given a tree comparison code, return the code that is the logical inverse
2252 of the given code. It is not safe to do this for floating-point
2253 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2254 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2256 enum tree_code
2257 invert_tree_comparison (enum tree_code code, bool honor_nans)
2259 if (honor_nans && flag_trapping_math)
2260 return ERROR_MARK;
2262 switch (code)
2264 case EQ_EXPR:
2265 return NE_EXPR;
2266 case NE_EXPR:
2267 return EQ_EXPR;
2268 case GT_EXPR:
2269 return honor_nans ? UNLE_EXPR : LE_EXPR;
2270 case GE_EXPR:
2271 return honor_nans ? UNLT_EXPR : LT_EXPR;
2272 case LT_EXPR:
2273 return honor_nans ? UNGE_EXPR : GE_EXPR;
2274 case LE_EXPR:
2275 return honor_nans ? UNGT_EXPR : GT_EXPR;
2276 case LTGT_EXPR:
2277 return UNEQ_EXPR;
2278 case UNEQ_EXPR:
2279 return LTGT_EXPR;
2280 case UNGT_EXPR:
2281 return LE_EXPR;
2282 case UNGE_EXPR:
2283 return LT_EXPR;
2284 case UNLT_EXPR:
2285 return GE_EXPR;
2286 case UNLE_EXPR:
2287 return GT_EXPR;
2288 case ORDERED_EXPR:
2289 return UNORDERED_EXPR;
2290 case UNORDERED_EXPR:
2291 return ORDERED_EXPR;
2292 default:
2293 gcc_unreachable ();
2297 /* Similar, but return the comparison that results if the operands are
2298 swapped. This is safe for floating-point. */
2300 enum tree_code
2301 swap_tree_comparison (enum tree_code code)
2303 switch (code)
2305 case EQ_EXPR:
2306 case NE_EXPR:
2307 case ORDERED_EXPR:
2308 case UNORDERED_EXPR:
2309 case LTGT_EXPR:
2310 case UNEQ_EXPR:
2311 return code;
2312 case GT_EXPR:
2313 return LT_EXPR;
2314 case GE_EXPR:
2315 return LE_EXPR;
2316 case LT_EXPR:
2317 return GT_EXPR;
2318 case LE_EXPR:
2319 return GE_EXPR;
2320 case UNGT_EXPR:
2321 return UNLT_EXPR;
2322 case UNGE_EXPR:
2323 return UNLE_EXPR;
2324 case UNLT_EXPR:
2325 return UNGT_EXPR;
2326 case UNLE_EXPR:
2327 return UNGE_EXPR;
2328 default:
2329 gcc_unreachable ();
2334 /* Convert a comparison tree code from an enum tree_code representation
2335 into a compcode bit-based encoding. This function is the inverse of
2336 compcode_to_comparison. */
2338 static enum comparison_code
2339 comparison_to_compcode (enum tree_code code)
2341 switch (code)
2343 case LT_EXPR:
2344 return COMPCODE_LT;
2345 case EQ_EXPR:
2346 return COMPCODE_EQ;
2347 case LE_EXPR:
2348 return COMPCODE_LE;
2349 case GT_EXPR:
2350 return COMPCODE_GT;
2351 case NE_EXPR:
2352 return COMPCODE_NE;
2353 case GE_EXPR:
2354 return COMPCODE_GE;
2355 case ORDERED_EXPR:
2356 return COMPCODE_ORD;
2357 case UNORDERED_EXPR:
2358 return COMPCODE_UNORD;
2359 case UNLT_EXPR:
2360 return COMPCODE_UNLT;
2361 case UNEQ_EXPR:
2362 return COMPCODE_UNEQ;
2363 case UNLE_EXPR:
2364 return COMPCODE_UNLE;
2365 case UNGT_EXPR:
2366 return COMPCODE_UNGT;
2367 case LTGT_EXPR:
2368 return COMPCODE_LTGT;
2369 case UNGE_EXPR:
2370 return COMPCODE_UNGE;
2371 default:
2372 gcc_unreachable ();
2376 /* Convert a compcode bit-based encoding of a comparison operator back
2377 to GCC's enum tree_code representation. This function is the
2378 inverse of comparison_to_compcode. */
2380 static enum tree_code
2381 compcode_to_comparison (enum comparison_code code)
2383 switch (code)
2385 case COMPCODE_LT:
2386 return LT_EXPR;
2387 case COMPCODE_EQ:
2388 return EQ_EXPR;
2389 case COMPCODE_LE:
2390 return LE_EXPR;
2391 case COMPCODE_GT:
2392 return GT_EXPR;
2393 case COMPCODE_NE:
2394 return NE_EXPR;
2395 case COMPCODE_GE:
2396 return GE_EXPR;
2397 case COMPCODE_ORD:
2398 return ORDERED_EXPR;
2399 case COMPCODE_UNORD:
2400 return UNORDERED_EXPR;
2401 case COMPCODE_UNLT:
2402 return UNLT_EXPR;
2403 case COMPCODE_UNEQ:
2404 return UNEQ_EXPR;
2405 case COMPCODE_UNLE:
2406 return UNLE_EXPR;
2407 case COMPCODE_UNGT:
2408 return UNGT_EXPR;
2409 case COMPCODE_LTGT:
2410 return LTGT_EXPR;
2411 case COMPCODE_UNGE:
2412 return UNGE_EXPR;
2413 default:
2414 gcc_unreachable ();
2418 /* Return a tree for the comparison which is the combination of
2419 doing the AND or OR (depending on CODE) of the two operations LCODE
2420 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2421 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2422 if this makes the transformation invalid. */
2424 tree
2425 combine_comparisons (enum tree_code code, enum tree_code lcode,
2426 enum tree_code rcode, tree truth_type,
2427 tree ll_arg, tree lr_arg)
2429 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2430 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2431 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2432 enum comparison_code compcode;
2434 switch (code)
2436 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2437 compcode = lcompcode & rcompcode;
2438 break;
2440 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2441 compcode = lcompcode | rcompcode;
2442 break;
2444 default:
2445 return NULL_TREE;
2448 if (!honor_nans)
2450 /* Eliminate unordered comparisons, as well as LTGT and ORD
2451 which are not used unless the mode has NaNs. */
2452 compcode &= ~COMPCODE_UNORD;
2453 if (compcode == COMPCODE_LTGT)
2454 compcode = COMPCODE_NE;
2455 else if (compcode == COMPCODE_ORD)
2456 compcode = COMPCODE_TRUE;
2458 else if (flag_trapping_math)
2460 /* Check that the original operation and the optimized ones will trap
2461 under the same condition. */
2462 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2463 && (lcompcode != COMPCODE_EQ)
2464 && (lcompcode != COMPCODE_ORD);
2465 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2466 && (rcompcode != COMPCODE_EQ)
2467 && (rcompcode != COMPCODE_ORD);
2468 bool trap = (compcode & COMPCODE_UNORD) == 0
2469 && (compcode != COMPCODE_EQ)
2470 && (compcode != COMPCODE_ORD);
2472 /* In a short-circuited boolean expression the LHS might be
2473 such that the RHS, if evaluated, will never trap. For
2474 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2475 if neither x nor y is NaN. (This is a mixed blessing: for
2476 example, the expression above will never trap, hence
2477 optimizing it to x < y would be invalid). */
2478 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2479 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2480 rtrap = false;
2482 /* If the comparison was short-circuited, and only the RHS
2483 trapped, we may now generate a spurious trap. */
2484 if (rtrap && !ltrap
2485 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2486 return NULL_TREE;
2488 /* If we changed the conditions that cause a trap, we lose. */
2489 if ((ltrap || rtrap) != trap)
2490 return NULL_TREE;
2493 if (compcode == COMPCODE_TRUE)
2494 return constant_boolean_node (true, truth_type);
2495 else if (compcode == COMPCODE_FALSE)
2496 return constant_boolean_node (false, truth_type);
2497 else
2498 return fold_build2 (compcode_to_comparison (compcode),
2499 truth_type, ll_arg, lr_arg);
2502 /* Return nonzero if CODE is a tree code that represents a truth value. */
2504 static int
2505 truth_value_p (enum tree_code code)
2507 return (TREE_CODE_CLASS (code) == tcc_comparison
2508 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2509 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2510 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2513 /* Return nonzero if two operands (typically of the same tree node)
2514 are necessarily equal. If either argument has side-effects this
2515 function returns zero. FLAGS modifies behavior as follows:
2517 If OEP_ONLY_CONST is set, only return nonzero for constants.
2518 This function tests whether the operands are indistinguishable;
2519 it does not test whether they are equal using C's == operation.
2520 The distinction is important for IEEE floating point, because
2521 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2522 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2524 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2525 even though it may hold multiple values during a function.
2526 This is because a GCC tree node guarantees that nothing else is
2527 executed between the evaluation of its "operands" (which may often
2528 be evaluated in arbitrary order). Hence if the operands themselves
2529 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2530 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2531 unset means assuming isochronic (or instantaneous) tree equivalence.
2532 Unless comparing arbitrary expression trees, such as from different
2533 statements, this flag can usually be left unset.
2535 If OEP_PURE_SAME is set, then pure functions with identical arguments
2536 are considered the same. It is used when the caller has other ways
2537 to ensure that global memory is unchanged in between. */
2540 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2542 /* If either is ERROR_MARK, they aren't equal. */
2543 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2544 return 0;
2546 /* If both types don't have the same signedness, then we can't consider
2547 them equal. We must check this before the STRIP_NOPS calls
2548 because they may change the signedness of the arguments. */
2549 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2550 return 0;
2552 /* If both types don't have the same precision, then it is not safe
2553 to strip NOPs. */
2554 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2555 return 0;
2557 STRIP_NOPS (arg0);
2558 STRIP_NOPS (arg1);
2560 /* In case both args are comparisons but with different comparison
2561 code, try to swap the comparison operands of one arg to produce
2562 a match and compare that variant. */
2563 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2564 && COMPARISON_CLASS_P (arg0)
2565 && COMPARISON_CLASS_P (arg1))
2567 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2569 if (TREE_CODE (arg0) == swap_code)
2570 return operand_equal_p (TREE_OPERAND (arg0, 0),
2571 TREE_OPERAND (arg1, 1), flags)
2572 && operand_equal_p (TREE_OPERAND (arg0, 1),
2573 TREE_OPERAND (arg1, 0), flags);
2576 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2577 /* This is needed for conversions and for COMPONENT_REF.
2578 Might as well play it safe and always test this. */
2579 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2580 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2581 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2582 return 0;
2584 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2585 We don't care about side effects in that case because the SAVE_EXPR
2586 takes care of that for us. In all other cases, two expressions are
2587 equal if they have no side effects. If we have two identical
2588 expressions with side effects that should be treated the same due
2589 to the only side effects being identical SAVE_EXPR's, that will
2590 be detected in the recursive calls below. */
2591 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2592 && (TREE_CODE (arg0) == SAVE_EXPR
2593 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2594 return 1;
2596 /* Next handle constant cases, those for which we can return 1 even
2597 if ONLY_CONST is set. */
2598 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2599 switch (TREE_CODE (arg0))
2601 case INTEGER_CST:
2602 return tree_int_cst_equal (arg0, arg1);
2604 case REAL_CST:
2605 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2606 TREE_REAL_CST (arg1)))
2607 return 1;
2610 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2612 /* If we do not distinguish between signed and unsigned zero,
2613 consider them equal. */
2614 if (real_zerop (arg0) && real_zerop (arg1))
2615 return 1;
2617 return 0;
2619 case VECTOR_CST:
2621 tree v1, v2;
2623 v1 = TREE_VECTOR_CST_ELTS (arg0);
2624 v2 = TREE_VECTOR_CST_ELTS (arg1);
2625 while (v1 && v2)
2627 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2628 flags))
2629 return 0;
2630 v1 = TREE_CHAIN (v1);
2631 v2 = TREE_CHAIN (v2);
2634 return v1 == v2;
2637 case COMPLEX_CST:
2638 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2639 flags)
2640 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2641 flags));
2643 case STRING_CST:
2644 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2645 && ! memcmp (TREE_STRING_POINTER (arg0),
2646 TREE_STRING_POINTER (arg1),
2647 TREE_STRING_LENGTH (arg0)));
2649 case ADDR_EXPR:
2650 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2652 default:
2653 break;
2656 if (flags & OEP_ONLY_CONST)
2657 return 0;
2659 /* Define macros to test an operand from arg0 and arg1 for equality and a
2660 variant that allows null and views null as being different from any
2661 non-null value. In the latter case, if either is null, the both
2662 must be; otherwise, do the normal comparison. */
2663 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2664 TREE_OPERAND (arg1, N), flags)
2666 #define OP_SAME_WITH_NULL(N) \
2667 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2668 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2670 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2672 case tcc_unary:
2673 /* Two conversions are equal only if signedness and modes match. */
2674 switch (TREE_CODE (arg0))
2676 case NOP_EXPR:
2677 case CONVERT_EXPR:
2678 case FIX_TRUNC_EXPR:
2679 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2680 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2681 return 0;
2682 break;
2683 default:
2684 break;
2687 return OP_SAME (0);
2690 case tcc_comparison:
2691 case tcc_binary:
2692 if (OP_SAME (0) && OP_SAME (1))
2693 return 1;
2695 /* For commutative ops, allow the other order. */
2696 return (commutative_tree_code (TREE_CODE (arg0))
2697 && operand_equal_p (TREE_OPERAND (arg0, 0),
2698 TREE_OPERAND (arg1, 1), flags)
2699 && operand_equal_p (TREE_OPERAND (arg0, 1),
2700 TREE_OPERAND (arg1, 0), flags));
2702 case tcc_reference:
2703 /* If either of the pointer (or reference) expressions we are
2704 dereferencing contain a side effect, these cannot be equal. */
2705 if (TREE_SIDE_EFFECTS (arg0)
2706 || TREE_SIDE_EFFECTS (arg1))
2707 return 0;
2709 switch (TREE_CODE (arg0))
2711 case INDIRECT_REF:
2712 case ALIGN_INDIRECT_REF:
2713 case MISALIGNED_INDIRECT_REF:
2714 case REALPART_EXPR:
2715 case IMAGPART_EXPR:
2716 return OP_SAME (0);
2718 case ARRAY_REF:
2719 case ARRAY_RANGE_REF:
2720 /* Operands 2 and 3 may be null. */
2721 return (OP_SAME (0)
2722 && OP_SAME (1)
2723 && OP_SAME_WITH_NULL (2)
2724 && OP_SAME_WITH_NULL (3));
2726 case COMPONENT_REF:
2727 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2728 may be NULL when we're called to compare MEM_EXPRs. */
2729 return OP_SAME_WITH_NULL (0)
2730 && OP_SAME (1)
2731 && OP_SAME_WITH_NULL (2);
2733 case BIT_FIELD_REF:
2734 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2736 default:
2737 return 0;
2740 case tcc_expression:
2741 switch (TREE_CODE (arg0))
2743 case ADDR_EXPR:
2744 case TRUTH_NOT_EXPR:
2745 return OP_SAME (0);
2747 case TRUTH_ANDIF_EXPR:
2748 case TRUTH_ORIF_EXPR:
2749 return OP_SAME (0) && OP_SAME (1);
2751 case TRUTH_AND_EXPR:
2752 case TRUTH_OR_EXPR:
2753 case TRUTH_XOR_EXPR:
2754 if (OP_SAME (0) && OP_SAME (1))
2755 return 1;
2757 /* Otherwise take into account this is a commutative operation. */
2758 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2759 TREE_OPERAND (arg1, 1), flags)
2760 && operand_equal_p (TREE_OPERAND (arg0, 1),
2761 TREE_OPERAND (arg1, 0), flags));
2763 case CALL_EXPR:
2764 /* If the CALL_EXPRs call different functions, then they
2765 clearly can not be equal. */
2766 if (!OP_SAME (0))
2767 return 0;
2770 unsigned int cef = call_expr_flags (arg0);
2771 if (flags & OEP_PURE_SAME)
2772 cef &= ECF_CONST | ECF_PURE;
2773 else
2774 cef &= ECF_CONST;
2775 if (!cef)
2776 return 0;
2779 /* Now see if all the arguments are the same. operand_equal_p
2780 does not handle TREE_LIST, so we walk the operands here
2781 feeding them to operand_equal_p. */
2782 arg0 = TREE_OPERAND (arg0, 1);
2783 arg1 = TREE_OPERAND (arg1, 1);
2784 while (arg0 && arg1)
2786 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2787 flags))
2788 return 0;
2790 arg0 = TREE_CHAIN (arg0);
2791 arg1 = TREE_CHAIN (arg1);
2794 /* If we get here and both argument lists are exhausted
2795 then the CALL_EXPRs are equal. */
2796 return ! (arg0 || arg1);
2798 default:
2799 return 0;
2802 case tcc_declaration:
2803 /* Consider __builtin_sqrt equal to sqrt. */
2804 return (TREE_CODE (arg0) == FUNCTION_DECL
2805 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2806 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2807 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2809 default:
2810 return 0;
2813 #undef OP_SAME
2814 #undef OP_SAME_WITH_NULL
2817 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2818 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2820 When in doubt, return 0. */
2822 static int
2823 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2825 int unsignedp1, unsignedpo;
2826 tree primarg0, primarg1, primother;
2827 unsigned int correct_width;
2829 if (operand_equal_p (arg0, arg1, 0))
2830 return 1;
2832 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2833 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2834 return 0;
2836 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2837 and see if the inner values are the same. This removes any
2838 signedness comparison, which doesn't matter here. */
2839 primarg0 = arg0, primarg1 = arg1;
2840 STRIP_NOPS (primarg0);
2841 STRIP_NOPS (primarg1);
2842 if (operand_equal_p (primarg0, primarg1, 0))
2843 return 1;
2845 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2846 actual comparison operand, ARG0.
2848 First throw away any conversions to wider types
2849 already present in the operands. */
2851 primarg1 = get_narrower (arg1, &unsignedp1);
2852 primother = get_narrower (other, &unsignedpo);
2854 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2855 if (unsignedp1 == unsignedpo
2856 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2857 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2859 tree type = TREE_TYPE (arg0);
2861 /* Make sure shorter operand is extended the right way
2862 to match the longer operand. */
2863 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2864 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2866 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2867 return 1;
2870 return 0;
2873 /* See if ARG is an expression that is either a comparison or is performing
2874 arithmetic on comparisons. The comparisons must only be comparing
2875 two different values, which will be stored in *CVAL1 and *CVAL2; if
2876 they are nonzero it means that some operands have already been found.
2877 No variables may be used anywhere else in the expression except in the
2878 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2879 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2881 If this is true, return 1. Otherwise, return zero. */
2883 static int
2884 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2886 enum tree_code code = TREE_CODE (arg);
2887 enum tree_code_class class = TREE_CODE_CLASS (code);
2889 /* We can handle some of the tcc_expression cases here. */
2890 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2891 class = tcc_unary;
2892 else if (class == tcc_expression
2893 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2894 || code == COMPOUND_EXPR))
2895 class = tcc_binary;
2897 else if (class == tcc_expression && code == SAVE_EXPR
2898 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2900 /* If we've already found a CVAL1 or CVAL2, this expression is
2901 two complex to handle. */
2902 if (*cval1 || *cval2)
2903 return 0;
2905 class = tcc_unary;
2906 *save_p = 1;
2909 switch (class)
2911 case tcc_unary:
2912 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2914 case tcc_binary:
2915 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2916 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2917 cval1, cval2, save_p));
2919 case tcc_constant:
2920 return 1;
2922 case tcc_expression:
2923 if (code == COND_EXPR)
2924 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2925 cval1, cval2, save_p)
2926 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2927 cval1, cval2, save_p)
2928 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2929 cval1, cval2, save_p));
2930 return 0;
2932 case tcc_comparison:
2933 /* First see if we can handle the first operand, then the second. For
2934 the second operand, we know *CVAL1 can't be zero. It must be that
2935 one side of the comparison is each of the values; test for the
2936 case where this isn't true by failing if the two operands
2937 are the same. */
2939 if (operand_equal_p (TREE_OPERAND (arg, 0),
2940 TREE_OPERAND (arg, 1), 0))
2941 return 0;
2943 if (*cval1 == 0)
2944 *cval1 = TREE_OPERAND (arg, 0);
2945 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2947 else if (*cval2 == 0)
2948 *cval2 = TREE_OPERAND (arg, 0);
2949 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2951 else
2952 return 0;
2954 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2956 else if (*cval2 == 0)
2957 *cval2 = TREE_OPERAND (arg, 1);
2958 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2960 else
2961 return 0;
2963 return 1;
2965 default:
2966 return 0;
2970 /* ARG is a tree that is known to contain just arithmetic operations and
2971 comparisons. Evaluate the operations in the tree substituting NEW0 for
2972 any occurrence of OLD0 as an operand of a comparison and likewise for
2973 NEW1 and OLD1. */
2975 static tree
2976 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2978 tree type = TREE_TYPE (arg);
2979 enum tree_code code = TREE_CODE (arg);
2980 enum tree_code_class class = TREE_CODE_CLASS (code);
2982 /* We can handle some of the tcc_expression cases here. */
2983 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2984 class = tcc_unary;
2985 else if (class == tcc_expression
2986 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2987 class = tcc_binary;
2989 switch (class)
2991 case tcc_unary:
2992 return fold_build1 (code, type,
2993 eval_subst (TREE_OPERAND (arg, 0),
2994 old0, new0, old1, new1));
2996 case tcc_binary:
2997 return fold_build2 (code, type,
2998 eval_subst (TREE_OPERAND (arg, 0),
2999 old0, new0, old1, new1),
3000 eval_subst (TREE_OPERAND (arg, 1),
3001 old0, new0, old1, new1));
3003 case tcc_expression:
3004 switch (code)
3006 case SAVE_EXPR:
3007 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3009 case COMPOUND_EXPR:
3010 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3012 case COND_EXPR:
3013 return fold_build3 (code, type,
3014 eval_subst (TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1),
3016 eval_subst (TREE_OPERAND (arg, 1),
3017 old0, new0, old1, new1),
3018 eval_subst (TREE_OPERAND (arg, 2),
3019 old0, new0, old1, new1));
3020 default:
3021 break;
3023 /* Fall through - ??? */
3025 case tcc_comparison:
3027 tree arg0 = TREE_OPERAND (arg, 0);
3028 tree arg1 = TREE_OPERAND (arg, 1);
3030 /* We need to check both for exact equality and tree equality. The
3031 former will be true if the operand has a side-effect. In that
3032 case, we know the operand occurred exactly once. */
3034 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3035 arg0 = new0;
3036 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3037 arg0 = new1;
3039 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3040 arg1 = new0;
3041 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3042 arg1 = new1;
3044 return fold_build2 (code, type, arg0, arg1);
3047 default:
3048 return arg;
3052 /* Return a tree for the case when the result of an expression is RESULT
3053 converted to TYPE and OMITTED was previously an operand of the expression
3054 but is now not needed (e.g., we folded OMITTED * 0).
3056 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3057 the conversion of RESULT to TYPE. */
3059 tree
3060 omit_one_operand (tree type, tree result, tree omitted)
3062 tree t = fold_convert (type, result);
3064 if (TREE_SIDE_EFFECTS (omitted))
3065 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3067 return non_lvalue (t);
3070 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3072 static tree
3073 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3075 tree t = fold_convert (type, result);
3077 if (TREE_SIDE_EFFECTS (omitted))
3078 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3080 return pedantic_non_lvalue (t);
3083 /* Return a tree for the case when the result of an expression is RESULT
3084 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3085 of the expression but are now not needed.
3087 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3088 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3089 evaluated before OMITTED2. Otherwise, if neither has side effects,
3090 just do the conversion of RESULT to TYPE. */
3092 tree
3093 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3095 tree t = fold_convert (type, result);
3097 if (TREE_SIDE_EFFECTS (omitted2))
3098 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3099 if (TREE_SIDE_EFFECTS (omitted1))
3100 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3102 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3106 /* Return a simplified tree node for the truth-negation of ARG. This
3107 never alters ARG itself. We assume that ARG is an operation that
3108 returns a truth value (0 or 1).
3110 FIXME: one would think we would fold the result, but it causes
3111 problems with the dominator optimizer. */
3113 tree
3114 fold_truth_not_expr (tree arg)
3116 tree type = TREE_TYPE (arg);
3117 enum tree_code code = TREE_CODE (arg);
3119 /* If this is a comparison, we can simply invert it, except for
3120 floating-point non-equality comparisons, in which case we just
3121 enclose a TRUTH_NOT_EXPR around what we have. */
3123 if (TREE_CODE_CLASS (code) == tcc_comparison)
3125 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3126 if (FLOAT_TYPE_P (op_type)
3127 && flag_trapping_math
3128 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3129 && code != NE_EXPR && code != EQ_EXPR)
3130 return NULL_TREE;
3131 else
3133 code = invert_tree_comparison (code,
3134 HONOR_NANS (TYPE_MODE (op_type)));
3135 if (code == ERROR_MARK)
3136 return NULL_TREE;
3137 else
3138 return build2 (code, type,
3139 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3143 switch (code)
3145 case INTEGER_CST:
3146 return constant_boolean_node (integer_zerop (arg), type);
3148 case TRUTH_AND_EXPR:
3149 return build2 (TRUTH_OR_EXPR, type,
3150 invert_truthvalue (TREE_OPERAND (arg, 0)),
3151 invert_truthvalue (TREE_OPERAND (arg, 1)));
3153 case TRUTH_OR_EXPR:
3154 return build2 (TRUTH_AND_EXPR, type,
3155 invert_truthvalue (TREE_OPERAND (arg, 0)),
3156 invert_truthvalue (TREE_OPERAND (arg, 1)));
3158 case TRUTH_XOR_EXPR:
3159 /* Here we can invert either operand. We invert the first operand
3160 unless the second operand is a TRUTH_NOT_EXPR in which case our
3161 result is the XOR of the first operand with the inside of the
3162 negation of the second operand. */
3164 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3165 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3166 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3167 else
3168 return build2 (TRUTH_XOR_EXPR, type,
3169 invert_truthvalue (TREE_OPERAND (arg, 0)),
3170 TREE_OPERAND (arg, 1));
3172 case TRUTH_ANDIF_EXPR:
3173 return build2 (TRUTH_ORIF_EXPR, type,
3174 invert_truthvalue (TREE_OPERAND (arg, 0)),
3175 invert_truthvalue (TREE_OPERAND (arg, 1)));
3177 case TRUTH_ORIF_EXPR:
3178 return build2 (TRUTH_ANDIF_EXPR, type,
3179 invert_truthvalue (TREE_OPERAND (arg, 0)),
3180 invert_truthvalue (TREE_OPERAND (arg, 1)));
3182 case TRUTH_NOT_EXPR:
3183 return TREE_OPERAND (arg, 0);
3185 case COND_EXPR:
3187 tree arg1 = TREE_OPERAND (arg, 1);
3188 tree arg2 = TREE_OPERAND (arg, 2);
3189 /* A COND_EXPR may have a throw as one operand, which
3190 then has void type. Just leave void operands
3191 as they are. */
3192 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3193 VOID_TYPE_P (TREE_TYPE (arg1))
3194 ? arg1 : invert_truthvalue (arg1),
3195 VOID_TYPE_P (TREE_TYPE (arg2))
3196 ? arg2 : invert_truthvalue (arg2));
3199 case COMPOUND_EXPR:
3200 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3201 invert_truthvalue (TREE_OPERAND (arg, 1)));
3203 case NON_LVALUE_EXPR:
3204 return invert_truthvalue (TREE_OPERAND (arg, 0));
3206 case NOP_EXPR:
3207 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3208 return build1 (TRUTH_NOT_EXPR, type, arg);
3210 case CONVERT_EXPR:
3211 case FLOAT_EXPR:
3212 return build1 (TREE_CODE (arg), type,
3213 invert_truthvalue (TREE_OPERAND (arg, 0)));
3215 case BIT_AND_EXPR:
3216 if (!integer_onep (TREE_OPERAND (arg, 1)))
3217 break;
3218 return build2 (EQ_EXPR, type, arg,
3219 build_int_cst (type, 0));
3221 case SAVE_EXPR:
3222 return build1 (TRUTH_NOT_EXPR, type, arg);
3224 case CLEANUP_POINT_EXPR:
3225 return build1 (CLEANUP_POINT_EXPR, type,
3226 invert_truthvalue (TREE_OPERAND (arg, 0)));
3228 default:
3229 break;
3232 return NULL_TREE;
3235 /* Return a simplified tree node for the truth-negation of ARG. This
3236 never alters ARG itself. We assume that ARG is an operation that
3237 returns a truth value (0 or 1).
3239 FIXME: one would think we would fold the result, but it causes
3240 problems with the dominator optimizer. */
3242 tree
3243 invert_truthvalue (tree arg)
3245 tree tem;
3247 if (TREE_CODE (arg) == ERROR_MARK)
3248 return arg;
3250 tem = fold_truth_not_expr (arg);
3251 if (!tem)
3252 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3254 return tem;
3257 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3258 operands are another bit-wise operation with a common input. If so,
3259 distribute the bit operations to save an operation and possibly two if
3260 constants are involved. For example, convert
3261 (A | B) & (A | C) into A | (B & C)
3262 Further simplification will occur if B and C are constants.
3264 If this optimization cannot be done, 0 will be returned. */
3266 static tree
3267 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3269 tree common;
3270 tree left, right;
3272 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3273 || TREE_CODE (arg0) == code
3274 || (TREE_CODE (arg0) != BIT_AND_EXPR
3275 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3276 return 0;
3278 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3280 common = TREE_OPERAND (arg0, 0);
3281 left = TREE_OPERAND (arg0, 1);
3282 right = TREE_OPERAND (arg1, 1);
3284 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3286 common = TREE_OPERAND (arg0, 0);
3287 left = TREE_OPERAND (arg0, 1);
3288 right = TREE_OPERAND (arg1, 0);
3290 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3292 common = TREE_OPERAND (arg0, 1);
3293 left = TREE_OPERAND (arg0, 0);
3294 right = TREE_OPERAND (arg1, 1);
3296 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3298 common = TREE_OPERAND (arg0, 1);
3299 left = TREE_OPERAND (arg0, 0);
3300 right = TREE_OPERAND (arg1, 0);
3302 else
3303 return 0;
3305 return fold_build2 (TREE_CODE (arg0), type, common,
3306 fold_build2 (code, type, left, right));
3309 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3310 with code CODE. This optimization is unsafe. */
3311 static tree
3312 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3314 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3315 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3317 /* (A / C) +- (B / C) -> (A +- B) / C. */
3318 if (mul0 == mul1
3319 && operand_equal_p (TREE_OPERAND (arg0, 1),
3320 TREE_OPERAND (arg1, 1), 0))
3321 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3322 fold_build2 (code, type,
3323 TREE_OPERAND (arg0, 0),
3324 TREE_OPERAND (arg1, 0)),
3325 TREE_OPERAND (arg0, 1));
3327 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3328 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3329 TREE_OPERAND (arg1, 0), 0)
3330 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3333 REAL_VALUE_TYPE r0, r1;
3334 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3335 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3336 if (!mul0)
3337 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3338 if (!mul1)
3339 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3340 real_arithmetic (&r0, code, &r0, &r1);
3341 return fold_build2 (MULT_EXPR, type,
3342 TREE_OPERAND (arg0, 0),
3343 build_real (type, r0));
3346 return NULL_TREE;
3349 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3350 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3352 static tree
3353 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3354 int unsignedp)
3356 tree result;
3358 if (bitpos == 0)
3360 tree size = TYPE_SIZE (TREE_TYPE (inner));
3361 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3362 || POINTER_TYPE_P (TREE_TYPE (inner)))
3363 && host_integerp (size, 0)
3364 && tree_low_cst (size, 0) == bitsize)
3365 return fold_convert (type, inner);
3368 result = build3 (BIT_FIELD_REF, type, inner,
3369 size_int (bitsize), bitsize_int (bitpos));
3371 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3373 return result;
3376 /* Optimize a bit-field compare.
3378 There are two cases: First is a compare against a constant and the
3379 second is a comparison of two items where the fields are at the same
3380 bit position relative to the start of a chunk (byte, halfword, word)
3381 large enough to contain it. In these cases we can avoid the shift
3382 implicit in bitfield extractions.
3384 For constants, we emit a compare of the shifted constant with the
3385 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3386 compared. For two fields at the same position, we do the ANDs with the
3387 similar mask and compare the result of the ANDs.
3389 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3390 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3391 are the left and right operands of the comparison, respectively.
3393 If the optimization described above can be done, we return the resulting
3394 tree. Otherwise we return zero. */
3396 static tree
3397 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3398 tree lhs, tree rhs)
3400 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3401 tree type = TREE_TYPE (lhs);
3402 tree signed_type, unsigned_type;
3403 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3404 enum machine_mode lmode, rmode, nmode;
3405 int lunsignedp, runsignedp;
3406 int lvolatilep = 0, rvolatilep = 0;
3407 tree linner, rinner = NULL_TREE;
3408 tree mask;
3409 tree offset;
3411 /* Get all the information about the extractions being done. If the bit size
3412 if the same as the size of the underlying object, we aren't doing an
3413 extraction at all and so can do nothing. We also don't want to
3414 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3415 then will no longer be able to replace it. */
3416 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3417 &lunsignedp, &lvolatilep, false);
3418 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3419 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3420 return 0;
3422 if (!const_p)
3424 /* If this is not a constant, we can only do something if bit positions,
3425 sizes, and signedness are the same. */
3426 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3427 &runsignedp, &rvolatilep, false);
3429 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3430 || lunsignedp != runsignedp || offset != 0
3431 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3432 return 0;
3435 /* See if we can find a mode to refer to this field. We should be able to,
3436 but fail if we can't. */
3437 nmode = get_best_mode (lbitsize, lbitpos,
3438 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3439 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3440 TYPE_ALIGN (TREE_TYPE (rinner))),
3441 word_mode, lvolatilep || rvolatilep);
3442 if (nmode == VOIDmode)
3443 return 0;
3445 /* Set signed and unsigned types of the precision of this mode for the
3446 shifts below. */
3447 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3448 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3450 /* Compute the bit position and size for the new reference and our offset
3451 within it. If the new reference is the same size as the original, we
3452 won't optimize anything, so return zero. */
3453 nbitsize = GET_MODE_BITSIZE (nmode);
3454 nbitpos = lbitpos & ~ (nbitsize - 1);
3455 lbitpos -= nbitpos;
3456 if (nbitsize == lbitsize)
3457 return 0;
3459 if (BYTES_BIG_ENDIAN)
3460 lbitpos = nbitsize - lbitsize - lbitpos;
3462 /* Make the mask to be used against the extracted field. */
3463 mask = build_int_cst_type (unsigned_type, -1);
3464 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3465 mask = const_binop (RSHIFT_EXPR, mask,
3466 size_int (nbitsize - lbitsize - lbitpos), 0);
3468 if (! const_p)
3469 /* If not comparing with constant, just rework the comparison
3470 and return. */
3471 return fold_build2 (code, compare_type,
3472 fold_build2 (BIT_AND_EXPR, unsigned_type,
3473 make_bit_field_ref (linner,
3474 unsigned_type,
3475 nbitsize, nbitpos,
3477 mask),
3478 fold_build2 (BIT_AND_EXPR, unsigned_type,
3479 make_bit_field_ref (rinner,
3480 unsigned_type,
3481 nbitsize, nbitpos,
3483 mask));
3485 /* Otherwise, we are handling the constant case. See if the constant is too
3486 big for the field. Warn and return a tree of for 0 (false) if so. We do
3487 this not only for its own sake, but to avoid having to test for this
3488 error case below. If we didn't, we might generate wrong code.
3490 For unsigned fields, the constant shifted right by the field length should
3491 be all zero. For signed fields, the high-order bits should agree with
3492 the sign bit. */
3494 if (lunsignedp)
3496 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3497 fold_convert (unsigned_type, rhs),
3498 size_int (lbitsize), 0)))
3500 warning (0, "comparison is always %d due to width of bit-field",
3501 code == NE_EXPR);
3502 return constant_boolean_node (code == NE_EXPR, compare_type);
3505 else
3507 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3508 size_int (lbitsize - 1), 0);
3509 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3511 warning (0, "comparison is always %d due to width of bit-field",
3512 code == NE_EXPR);
3513 return constant_boolean_node (code == NE_EXPR, compare_type);
3517 /* Single-bit compares should always be against zero. */
3518 if (lbitsize == 1 && ! integer_zerop (rhs))
3520 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3521 rhs = build_int_cst (type, 0);
3524 /* Make a new bitfield reference, shift the constant over the
3525 appropriate number of bits and mask it with the computed mask
3526 (in case this was a signed field). If we changed it, make a new one. */
3527 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3528 if (lvolatilep)
3530 TREE_SIDE_EFFECTS (lhs) = 1;
3531 TREE_THIS_VOLATILE (lhs) = 1;
3534 rhs = const_binop (BIT_AND_EXPR,
3535 const_binop (LSHIFT_EXPR,
3536 fold_convert (unsigned_type, rhs),
3537 size_int (lbitpos), 0),
3538 mask, 0);
3540 return build2 (code, compare_type,
3541 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3542 rhs);
3545 /* Subroutine for fold_truthop: decode a field reference.
3547 If EXP is a comparison reference, we return the innermost reference.
3549 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3550 set to the starting bit number.
3552 If the innermost field can be completely contained in a mode-sized
3553 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3555 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3556 otherwise it is not changed.
3558 *PUNSIGNEDP is set to the signedness of the field.
3560 *PMASK is set to the mask used. This is either contained in a
3561 BIT_AND_EXPR or derived from the width of the field.
3563 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3565 Return 0 if this is not a component reference or is one that we can't
3566 do anything with. */
3568 static tree
3569 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3570 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3571 int *punsignedp, int *pvolatilep,
3572 tree *pmask, tree *pand_mask)
3574 tree outer_type = 0;
3575 tree and_mask = 0;
3576 tree mask, inner, offset;
3577 tree unsigned_type;
3578 unsigned int precision;
3580 /* All the optimizations using this function assume integer fields.
3581 There are problems with FP fields since the type_for_size call
3582 below can fail for, e.g., XFmode. */
3583 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3584 return 0;
3586 /* We are interested in the bare arrangement of bits, so strip everything
3587 that doesn't affect the machine mode. However, record the type of the
3588 outermost expression if it may matter below. */
3589 if (TREE_CODE (exp) == NOP_EXPR
3590 || TREE_CODE (exp) == CONVERT_EXPR
3591 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3592 outer_type = TREE_TYPE (exp);
3593 STRIP_NOPS (exp);
3595 if (TREE_CODE (exp) == BIT_AND_EXPR)
3597 and_mask = TREE_OPERAND (exp, 1);
3598 exp = TREE_OPERAND (exp, 0);
3599 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3600 if (TREE_CODE (and_mask) != INTEGER_CST)
3601 return 0;
3604 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3605 punsignedp, pvolatilep, false);
3606 if ((inner == exp && and_mask == 0)
3607 || *pbitsize < 0 || offset != 0
3608 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3609 return 0;
3611 /* If the number of bits in the reference is the same as the bitsize of
3612 the outer type, then the outer type gives the signedness. Otherwise
3613 (in case of a small bitfield) the signedness is unchanged. */
3614 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3615 *punsignedp = TYPE_UNSIGNED (outer_type);
3617 /* Compute the mask to access the bitfield. */
3618 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3619 precision = TYPE_PRECISION (unsigned_type);
3621 mask = build_int_cst_type (unsigned_type, -1);
3623 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3624 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3626 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3627 if (and_mask != 0)
3628 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3629 fold_convert (unsigned_type, and_mask), mask);
3631 *pmask = mask;
3632 *pand_mask = and_mask;
3633 return inner;
3636 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3637 bit positions. */
3639 static int
3640 all_ones_mask_p (tree mask, int size)
3642 tree type = TREE_TYPE (mask);
3643 unsigned int precision = TYPE_PRECISION (type);
3644 tree tmask;
3646 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3648 return
3649 tree_int_cst_equal (mask,
3650 const_binop (RSHIFT_EXPR,
3651 const_binop (LSHIFT_EXPR, tmask,
3652 size_int (precision - size),
3654 size_int (precision - size), 0));
3657 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3658 represents the sign bit of EXP's type. If EXP represents a sign
3659 or zero extension, also test VAL against the unextended type.
3660 The return value is the (sub)expression whose sign bit is VAL,
3661 or NULL_TREE otherwise. */
3663 static tree
3664 sign_bit_p (tree exp, tree val)
3666 unsigned HOST_WIDE_INT mask_lo, lo;
3667 HOST_WIDE_INT mask_hi, hi;
3668 int width;
3669 tree t;
3671 /* Tree EXP must have an integral type. */
3672 t = TREE_TYPE (exp);
3673 if (! INTEGRAL_TYPE_P (t))
3674 return NULL_TREE;
3676 /* Tree VAL must be an integer constant. */
3677 if (TREE_CODE (val) != INTEGER_CST
3678 || TREE_OVERFLOW (val))
3679 return NULL_TREE;
3681 width = TYPE_PRECISION (t);
3682 if (width > HOST_BITS_PER_WIDE_INT)
3684 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3685 lo = 0;
3687 mask_hi = ((unsigned HOST_WIDE_INT) -1
3688 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3689 mask_lo = -1;
3691 else
3693 hi = 0;
3694 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3696 mask_hi = 0;
3697 mask_lo = ((unsigned HOST_WIDE_INT) -1
3698 >> (HOST_BITS_PER_WIDE_INT - width));
3701 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3702 treat VAL as if it were unsigned. */
3703 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3704 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3705 return exp;
3707 /* Handle extension from a narrower type. */
3708 if (TREE_CODE (exp) == NOP_EXPR
3709 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3710 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3712 return NULL_TREE;
3715 /* Subroutine for fold_truthop: determine if an operand is simple enough
3716 to be evaluated unconditionally. */
3718 static int
3719 simple_operand_p (tree exp)
3721 /* Strip any conversions that don't change the machine mode. */
3722 STRIP_NOPS (exp);
3724 return (CONSTANT_CLASS_P (exp)
3725 || TREE_CODE (exp) == SSA_NAME
3726 || (DECL_P (exp)
3727 && ! TREE_ADDRESSABLE (exp)
3728 && ! TREE_THIS_VOLATILE (exp)
3729 && ! DECL_NONLOCAL (exp)
3730 /* Don't regard global variables as simple. They may be
3731 allocated in ways unknown to the compiler (shared memory,
3732 #pragma weak, etc). */
3733 && ! TREE_PUBLIC (exp)
3734 && ! DECL_EXTERNAL (exp)
3735 /* Loading a static variable is unduly expensive, but global
3736 registers aren't expensive. */
3737 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3740 /* The following functions are subroutines to fold_range_test and allow it to
3741 try to change a logical combination of comparisons into a range test.
3743 For example, both
3744 X == 2 || X == 3 || X == 4 || X == 5
3746 X >= 2 && X <= 5
3747 are converted to
3748 (unsigned) (X - 2) <= 3
3750 We describe each set of comparisons as being either inside or outside
3751 a range, using a variable named like IN_P, and then describe the
3752 range with a lower and upper bound. If one of the bounds is omitted,
3753 it represents either the highest or lowest value of the type.
3755 In the comments below, we represent a range by two numbers in brackets
3756 preceded by a "+" to designate being inside that range, or a "-" to
3757 designate being outside that range, so the condition can be inverted by
3758 flipping the prefix. An omitted bound is represented by a "-". For
3759 example, "- [-, 10]" means being outside the range starting at the lowest
3760 possible value and ending at 10, in other words, being greater than 10.
3761 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3762 always false.
3764 We set up things so that the missing bounds are handled in a consistent
3765 manner so neither a missing bound nor "true" and "false" need to be
3766 handled using a special case. */
3768 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3769 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3770 and UPPER1_P are nonzero if the respective argument is an upper bound
3771 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3772 must be specified for a comparison. ARG1 will be converted to ARG0's
3773 type if both are specified. */
3775 static tree
3776 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3777 tree arg1, int upper1_p)
3779 tree tem;
3780 int result;
3781 int sgn0, sgn1;
3783 /* If neither arg represents infinity, do the normal operation.
3784 Else, if not a comparison, return infinity. Else handle the special
3785 comparison rules. Note that most of the cases below won't occur, but
3786 are handled for consistency. */
3788 if (arg0 != 0 && arg1 != 0)
3790 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3791 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3792 STRIP_NOPS (tem);
3793 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3796 if (TREE_CODE_CLASS (code) != tcc_comparison)
3797 return 0;
3799 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3800 for neither. In real maths, we cannot assume open ended ranges are
3801 the same. But, this is computer arithmetic, where numbers are finite.
3802 We can therefore make the transformation of any unbounded range with
3803 the value Z, Z being greater than any representable number. This permits
3804 us to treat unbounded ranges as equal. */
3805 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3806 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3807 switch (code)
3809 case EQ_EXPR:
3810 result = sgn0 == sgn1;
3811 break;
3812 case NE_EXPR:
3813 result = sgn0 != sgn1;
3814 break;
3815 case LT_EXPR:
3816 result = sgn0 < sgn1;
3817 break;
3818 case LE_EXPR:
3819 result = sgn0 <= sgn1;
3820 break;
3821 case GT_EXPR:
3822 result = sgn0 > sgn1;
3823 break;
3824 case GE_EXPR:
3825 result = sgn0 >= sgn1;
3826 break;
3827 default:
3828 gcc_unreachable ();
3831 return constant_boolean_node (result, type);
3834 /* Given EXP, a logical expression, set the range it is testing into
3835 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3836 actually being tested. *PLOW and *PHIGH will be made of the same type
3837 as the returned expression. If EXP is not a comparison, we will most
3838 likely not be returning a useful value and range. */
3840 static tree
3841 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3843 enum tree_code code;
3844 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3845 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3846 int in_p, n_in_p;
3847 tree low, high, n_low, n_high;
3849 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3850 and see if we can refine the range. Some of the cases below may not
3851 happen, but it doesn't seem worth worrying about this. We "continue"
3852 the outer loop when we've changed something; otherwise we "break"
3853 the switch, which will "break" the while. */
3855 in_p = 0;
3856 low = high = build_int_cst (TREE_TYPE (exp), 0);
3858 while (1)
3860 code = TREE_CODE (exp);
3861 exp_type = TREE_TYPE (exp);
3863 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3865 if (TREE_CODE_LENGTH (code) > 0)
3866 arg0 = TREE_OPERAND (exp, 0);
3867 if (TREE_CODE_CLASS (code) == tcc_comparison
3868 || TREE_CODE_CLASS (code) == tcc_unary
3869 || TREE_CODE_CLASS (code) == tcc_binary)
3870 arg0_type = TREE_TYPE (arg0);
3871 if (TREE_CODE_CLASS (code) == tcc_binary
3872 || TREE_CODE_CLASS (code) == tcc_comparison
3873 || (TREE_CODE_CLASS (code) == tcc_expression
3874 && TREE_CODE_LENGTH (code) > 1))
3875 arg1 = TREE_OPERAND (exp, 1);
3878 switch (code)
3880 case TRUTH_NOT_EXPR:
3881 in_p = ! in_p, exp = arg0;
3882 continue;
3884 case EQ_EXPR: case NE_EXPR:
3885 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3886 /* We can only do something if the range is testing for zero
3887 and if the second operand is an integer constant. Note that
3888 saying something is "in" the range we make is done by
3889 complementing IN_P since it will set in the initial case of
3890 being not equal to zero; "out" is leaving it alone. */
3891 if (low == 0 || high == 0
3892 || ! integer_zerop (low) || ! integer_zerop (high)
3893 || TREE_CODE (arg1) != INTEGER_CST)
3894 break;
3896 switch (code)
3898 case NE_EXPR: /* - [c, c] */
3899 low = high = arg1;
3900 break;
3901 case EQ_EXPR: /* + [c, c] */
3902 in_p = ! in_p, low = high = arg1;
3903 break;
3904 case GT_EXPR: /* - [-, c] */
3905 low = 0, high = arg1;
3906 break;
3907 case GE_EXPR: /* + [c, -] */
3908 in_p = ! in_p, low = arg1, high = 0;
3909 break;
3910 case LT_EXPR: /* - [c, -] */
3911 low = arg1, high = 0;
3912 break;
3913 case LE_EXPR: /* + [-, c] */
3914 in_p = ! in_p, low = 0, high = arg1;
3915 break;
3916 default:
3917 gcc_unreachable ();
3920 /* If this is an unsigned comparison, we also know that EXP is
3921 greater than or equal to zero. We base the range tests we make
3922 on that fact, so we record it here so we can parse existing
3923 range tests. We test arg0_type since often the return type
3924 of, e.g. EQ_EXPR, is boolean. */
3925 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3927 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3928 in_p, low, high, 1,
3929 build_int_cst (arg0_type, 0),
3930 NULL_TREE))
3931 break;
3933 in_p = n_in_p, low = n_low, high = n_high;
3935 /* If the high bound is missing, but we have a nonzero low
3936 bound, reverse the range so it goes from zero to the low bound
3937 minus 1. */
3938 if (high == 0 && low && ! integer_zerop (low))
3940 in_p = ! in_p;
3941 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3942 integer_one_node, 0);
3943 low = build_int_cst (arg0_type, 0);
3947 exp = arg0;
3948 continue;
3950 case NEGATE_EXPR:
3951 /* (-x) IN [a,b] -> x in [-b, -a] */
3952 n_low = range_binop (MINUS_EXPR, exp_type,
3953 build_int_cst (exp_type, 0),
3954 0, high, 1);
3955 n_high = range_binop (MINUS_EXPR, exp_type,
3956 build_int_cst (exp_type, 0),
3957 0, low, 0);
3958 low = n_low, high = n_high;
3959 exp = arg0;
3960 continue;
3962 case BIT_NOT_EXPR:
3963 /* ~ X -> -X - 1 */
3964 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3965 build_int_cst (exp_type, 1));
3966 continue;
3968 case PLUS_EXPR: case MINUS_EXPR:
3969 if (TREE_CODE (arg1) != INTEGER_CST)
3970 break;
3972 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3973 move a constant to the other side. */
3974 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3975 break;
3977 /* If EXP is signed, any overflow in the computation is undefined,
3978 so we don't worry about it so long as our computations on
3979 the bounds don't overflow. For unsigned, overflow is defined
3980 and this is exactly the right thing. */
3981 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3982 arg0_type, low, 0, arg1, 0);
3983 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3984 arg0_type, high, 1, arg1, 0);
3985 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3986 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3987 break;
3989 /* Check for an unsigned range which has wrapped around the maximum
3990 value thus making n_high < n_low, and normalize it. */
3991 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3993 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3994 integer_one_node, 0);
3995 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3996 integer_one_node, 0);
3998 /* If the range is of the form +/- [ x+1, x ], we won't
3999 be able to normalize it. But then, it represents the
4000 whole range or the empty set, so make it
4001 +/- [ -, - ]. */
4002 if (tree_int_cst_equal (n_low, low)
4003 && tree_int_cst_equal (n_high, high))
4004 low = high = 0;
4005 else
4006 in_p = ! in_p;
4008 else
4009 low = n_low, high = n_high;
4011 exp = arg0;
4012 continue;
4014 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4015 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4016 break;
4018 if (! INTEGRAL_TYPE_P (arg0_type)
4019 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4020 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4021 break;
4023 n_low = low, n_high = high;
4025 if (n_low != 0)
4026 n_low = fold_convert (arg0_type, n_low);
4028 if (n_high != 0)
4029 n_high = fold_convert (arg0_type, n_high);
4032 /* If we're converting arg0 from an unsigned type, to exp,
4033 a signed type, we will be doing the comparison as unsigned.
4034 The tests above have already verified that LOW and HIGH
4035 are both positive.
4037 So we have to ensure that we will handle large unsigned
4038 values the same way that the current signed bounds treat
4039 negative values. */
4041 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4043 tree high_positive;
4044 tree equiv_type = lang_hooks.types.type_for_mode
4045 (TYPE_MODE (arg0_type), 1);
4047 /* A range without an upper bound is, naturally, unbounded.
4048 Since convert would have cropped a very large value, use
4049 the max value for the destination type. */
4050 high_positive
4051 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4052 : TYPE_MAX_VALUE (arg0_type);
4054 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4055 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4056 fold_convert (arg0_type,
4057 high_positive),
4058 build_int_cst (arg0_type, 1));
4060 /* If the low bound is specified, "and" the range with the
4061 range for which the original unsigned value will be
4062 positive. */
4063 if (low != 0)
4065 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4066 1, n_low, n_high, 1,
4067 fold_convert (arg0_type,
4068 integer_zero_node),
4069 high_positive))
4070 break;
4072 in_p = (n_in_p == in_p);
4074 else
4076 /* Otherwise, "or" the range with the range of the input
4077 that will be interpreted as negative. */
4078 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4079 0, n_low, n_high, 1,
4080 fold_convert (arg0_type,
4081 integer_zero_node),
4082 high_positive))
4083 break;
4085 in_p = (in_p != n_in_p);
4089 exp = arg0;
4090 low = n_low, high = n_high;
4091 continue;
4093 default:
4094 break;
4097 break;
4100 /* If EXP is a constant, we can evaluate whether this is true or false. */
4101 if (TREE_CODE (exp) == INTEGER_CST)
4103 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4104 exp, 0, low, 0))
4105 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4106 exp, 1, high, 1)));
4107 low = high = 0;
4108 exp = 0;
4111 *pin_p = in_p, *plow = low, *phigh = high;
4112 return exp;
4115 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4116 type, TYPE, return an expression to test if EXP is in (or out of, depending
4117 on IN_P) the range. Return 0 if the test couldn't be created. */
4119 static tree
4120 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4122 tree etype = TREE_TYPE (exp);
4123 tree value;
4125 #ifdef HAVE_canonicalize_funcptr_for_compare
4126 /* Disable this optimization for function pointer expressions
4127 on targets that require function pointer canonicalization. */
4128 if (HAVE_canonicalize_funcptr_for_compare
4129 && TREE_CODE (etype) == POINTER_TYPE
4130 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4131 return NULL_TREE;
4132 #endif
4134 if (! in_p)
4136 value = build_range_check (type, exp, 1, low, high);
4137 if (value != 0)
4138 return invert_truthvalue (value);
4140 return 0;
4143 if (low == 0 && high == 0)
4144 return build_int_cst (type, 1);
4146 if (low == 0)
4147 return fold_build2 (LE_EXPR, type, exp,
4148 fold_convert (etype, high));
4150 if (high == 0)
4151 return fold_build2 (GE_EXPR, type, exp,
4152 fold_convert (etype, low));
4154 if (operand_equal_p (low, high, 0))
4155 return fold_build2 (EQ_EXPR, type, exp,
4156 fold_convert (etype, low));
4158 if (integer_zerop (low))
4160 if (! TYPE_UNSIGNED (etype))
4162 etype = lang_hooks.types.unsigned_type (etype);
4163 high = fold_convert (etype, high);
4164 exp = fold_convert (etype, exp);
4166 return build_range_check (type, exp, 1, 0, high);
4169 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4170 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4172 unsigned HOST_WIDE_INT lo;
4173 HOST_WIDE_INT hi;
4174 int prec;
4176 prec = TYPE_PRECISION (etype);
4177 if (prec <= HOST_BITS_PER_WIDE_INT)
4179 hi = 0;
4180 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4182 else
4184 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4185 lo = (unsigned HOST_WIDE_INT) -1;
4188 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4190 if (TYPE_UNSIGNED (etype))
4192 etype = lang_hooks.types.signed_type (etype);
4193 exp = fold_convert (etype, exp);
4195 return fold_build2 (GT_EXPR, type, exp,
4196 build_int_cst (etype, 0));
4200 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4201 This requires wrap-around arithmetics for the type of the expression. */
4202 switch (TREE_CODE (etype))
4204 case INTEGER_TYPE:
4205 /* There is no requirement that LOW be within the range of ETYPE
4206 if the latter is a subtype. It must, however, be within the base
4207 type of ETYPE. So be sure we do the subtraction in that type. */
4208 if (TREE_TYPE (etype))
4209 etype = TREE_TYPE (etype);
4210 break;
4212 case ENUMERAL_TYPE:
4213 case BOOLEAN_TYPE:
4214 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4215 TYPE_UNSIGNED (etype));
4216 break;
4218 default:
4219 break;
4222 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4223 if (TREE_CODE (etype) == INTEGER_TYPE
4224 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4226 tree utype, minv, maxv;
4228 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4229 for the type in question, as we rely on this here. */
4230 utype = lang_hooks.types.unsigned_type (etype);
4231 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4232 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4233 integer_one_node, 1);
4234 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4236 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4237 minv, 1, maxv, 1)))
4238 etype = utype;
4239 else
4240 return 0;
4243 high = fold_convert (etype, high);
4244 low = fold_convert (etype, low);
4245 exp = fold_convert (etype, exp);
4247 value = const_binop (MINUS_EXPR, high, low, 0);
4249 if (value != 0 && !TREE_OVERFLOW (value))
4250 return build_range_check (type,
4251 fold_build2 (MINUS_EXPR, etype, exp, low),
4252 1, build_int_cst (etype, 0), value);
4254 return 0;
4257 /* Return the predecessor of VAL in its type, handling the infinite case. */
4259 static tree
4260 range_predecessor (tree val)
4262 tree type = TREE_TYPE (val);
4264 if (INTEGRAL_TYPE_P (type)
4265 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4266 return 0;
4267 else
4268 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4271 /* Return the successor of VAL in its type, handling the infinite case. */
4273 static tree
4274 range_successor (tree val)
4276 tree type = TREE_TYPE (val);
4278 if (INTEGRAL_TYPE_P (type)
4279 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4280 return 0;
4281 else
4282 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4285 /* Given two ranges, see if we can merge them into one. Return 1 if we
4286 can, 0 if we can't. Set the output range into the specified parameters. */
4288 static int
4289 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4290 tree high0, int in1_p, tree low1, tree high1)
4292 int no_overlap;
4293 int subset;
4294 int temp;
4295 tree tem;
4296 int in_p;
4297 tree low, high;
4298 int lowequal = ((low0 == 0 && low1 == 0)
4299 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4300 low0, 0, low1, 0)));
4301 int highequal = ((high0 == 0 && high1 == 0)
4302 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4303 high0, 1, high1, 1)));
4305 /* Make range 0 be the range that starts first, or ends last if they
4306 start at the same value. Swap them if it isn't. */
4307 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4308 low0, 0, low1, 0))
4309 || (lowequal
4310 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4311 high1, 1, high0, 1))))
4313 temp = in0_p, in0_p = in1_p, in1_p = temp;
4314 tem = low0, low0 = low1, low1 = tem;
4315 tem = high0, high0 = high1, high1 = tem;
4318 /* Now flag two cases, whether the ranges are disjoint or whether the
4319 second range is totally subsumed in the first. Note that the tests
4320 below are simplified by the ones above. */
4321 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4322 high0, 1, low1, 0));
4323 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4324 high1, 1, high0, 1));
4326 /* We now have four cases, depending on whether we are including or
4327 excluding the two ranges. */
4328 if (in0_p && in1_p)
4330 /* If they don't overlap, the result is false. If the second range
4331 is a subset it is the result. Otherwise, the range is from the start
4332 of the second to the end of the first. */
4333 if (no_overlap)
4334 in_p = 0, low = high = 0;
4335 else if (subset)
4336 in_p = 1, low = low1, high = high1;
4337 else
4338 in_p = 1, low = low1, high = high0;
4341 else if (in0_p && ! in1_p)
4343 /* If they don't overlap, the result is the first range. If they are
4344 equal, the result is false. If the second range is a subset of the
4345 first, and the ranges begin at the same place, we go from just after
4346 the end of the second range to the end of the first. If the second
4347 range is not a subset of the first, or if it is a subset and both
4348 ranges end at the same place, the range starts at the start of the
4349 first range and ends just before the second range.
4350 Otherwise, we can't describe this as a single range. */
4351 if (no_overlap)
4352 in_p = 1, low = low0, high = high0;
4353 else if (lowequal && highequal)
4354 in_p = 0, low = high = 0;
4355 else if (subset && lowequal)
4357 low = range_successor (high1);
4358 high = high0;
4359 in_p = (low != 0);
4361 else if (! subset || highequal)
4363 low = low0;
4364 high = range_predecessor (low1);
4365 in_p = (high != 0);
4367 else
4368 return 0;
4371 else if (! in0_p && in1_p)
4373 /* If they don't overlap, the result is the second range. If the second
4374 is a subset of the first, the result is false. Otherwise,
4375 the range starts just after the first range and ends at the
4376 end of the second. */
4377 if (no_overlap)
4378 in_p = 1, low = low1, high = high1;
4379 else if (subset || highequal)
4380 in_p = 0, low = high = 0;
4381 else
4383 low = range_successor (high0);
4384 high = high1;
4385 in_p = (low != 0);
4389 else
4391 /* The case where we are excluding both ranges. Here the complex case
4392 is if they don't overlap. In that case, the only time we have a
4393 range is if they are adjacent. If the second is a subset of the
4394 first, the result is the first. Otherwise, the range to exclude
4395 starts at the beginning of the first range and ends at the end of the
4396 second. */
4397 if (no_overlap)
4399 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 range_successor (high0),
4401 1, low1, 0)))
4402 in_p = 0, low = low0, high = high1;
4403 else
4405 /* Canonicalize - [min, x] into - [-, x]. */
4406 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4407 switch (TREE_CODE (TREE_TYPE (low0)))
4409 case ENUMERAL_TYPE:
4410 if (TYPE_PRECISION (TREE_TYPE (low0))
4411 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4412 break;
4413 /* FALLTHROUGH */
4414 case INTEGER_TYPE:
4415 if (tree_int_cst_equal (low0,
4416 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4417 low0 = 0;
4418 break;
4419 case POINTER_TYPE:
4420 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4421 && integer_zerop (low0))
4422 low0 = 0;
4423 break;
4424 default:
4425 break;
4428 /* Canonicalize - [x, max] into - [x, -]. */
4429 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4430 switch (TREE_CODE (TREE_TYPE (high1)))
4432 case ENUMERAL_TYPE:
4433 if (TYPE_PRECISION (TREE_TYPE (high1))
4434 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4435 break;
4436 /* FALLTHROUGH */
4437 case INTEGER_TYPE:
4438 if (tree_int_cst_equal (high1,
4439 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4440 high1 = 0;
4441 break;
4442 case POINTER_TYPE:
4443 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4444 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4445 high1, 1,
4446 integer_one_node, 1)))
4447 high1 = 0;
4448 break;
4449 default:
4450 break;
4453 /* The ranges might be also adjacent between the maximum and
4454 minimum values of the given type. For
4455 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4456 return + [x + 1, y - 1]. */
4457 if (low0 == 0 && high1 == 0)
4459 low = range_successor (high0);
4460 high = range_predecessor (low1);
4461 if (low == 0 || high == 0)
4462 return 0;
4464 in_p = 1;
4466 else
4467 return 0;
4470 else if (subset)
4471 in_p = 0, low = low0, high = high0;
4472 else
4473 in_p = 0, low = low0, high = high1;
4476 *pin_p = in_p, *plow = low, *phigh = high;
4477 return 1;
4481 /* Subroutine of fold, looking inside expressions of the form
4482 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4483 of the COND_EXPR. This function is being used also to optimize
4484 A op B ? C : A, by reversing the comparison first.
4486 Return a folded expression whose code is not a COND_EXPR
4487 anymore, or NULL_TREE if no folding opportunity is found. */
4489 static tree
4490 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4492 enum tree_code comp_code = TREE_CODE (arg0);
4493 tree arg00 = TREE_OPERAND (arg0, 0);
4494 tree arg01 = TREE_OPERAND (arg0, 1);
4495 tree arg1_type = TREE_TYPE (arg1);
4496 tree tem;
4498 STRIP_NOPS (arg1);
4499 STRIP_NOPS (arg2);
4501 /* If we have A op 0 ? A : -A, consider applying the following
4502 transformations:
4504 A == 0? A : -A same as -A
4505 A != 0? A : -A same as A
4506 A >= 0? A : -A same as abs (A)
4507 A > 0? A : -A same as abs (A)
4508 A <= 0? A : -A same as -abs (A)
4509 A < 0? A : -A same as -abs (A)
4511 None of these transformations work for modes with signed
4512 zeros. If A is +/-0, the first two transformations will
4513 change the sign of the result (from +0 to -0, or vice
4514 versa). The last four will fix the sign of the result,
4515 even though the original expressions could be positive or
4516 negative, depending on the sign of A.
4518 Note that all these transformations are correct if A is
4519 NaN, since the two alternatives (A and -A) are also NaNs. */
4520 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4521 ? real_zerop (arg01)
4522 : integer_zerop (arg01))
4523 && ((TREE_CODE (arg2) == NEGATE_EXPR
4524 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4525 /* In the case that A is of the form X-Y, '-A' (arg2) may
4526 have already been folded to Y-X, check for that. */
4527 || (TREE_CODE (arg1) == MINUS_EXPR
4528 && TREE_CODE (arg2) == MINUS_EXPR
4529 && operand_equal_p (TREE_OPERAND (arg1, 0),
4530 TREE_OPERAND (arg2, 1), 0)
4531 && operand_equal_p (TREE_OPERAND (arg1, 1),
4532 TREE_OPERAND (arg2, 0), 0))))
4533 switch (comp_code)
4535 case EQ_EXPR:
4536 case UNEQ_EXPR:
4537 tem = fold_convert (arg1_type, arg1);
4538 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4539 case NE_EXPR:
4540 case LTGT_EXPR:
4541 return pedantic_non_lvalue (fold_convert (type, arg1));
4542 case UNGE_EXPR:
4543 case UNGT_EXPR:
4544 if (flag_trapping_math)
4545 break;
4546 /* Fall through. */
4547 case GE_EXPR:
4548 case GT_EXPR:
4549 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4550 arg1 = fold_convert (lang_hooks.types.signed_type
4551 (TREE_TYPE (arg1)), arg1);
4552 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4553 return pedantic_non_lvalue (fold_convert (type, tem));
4554 case UNLE_EXPR:
4555 case UNLT_EXPR:
4556 if (flag_trapping_math)
4557 break;
4558 case LE_EXPR:
4559 case LT_EXPR:
4560 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4561 arg1 = fold_convert (lang_hooks.types.signed_type
4562 (TREE_TYPE (arg1)), arg1);
4563 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4564 return negate_expr (fold_convert (type, tem));
4565 default:
4566 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4567 break;
4570 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4571 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4572 both transformations are correct when A is NaN: A != 0
4573 is then true, and A == 0 is false. */
4575 if (integer_zerop (arg01) && integer_zerop (arg2))
4577 if (comp_code == NE_EXPR)
4578 return pedantic_non_lvalue (fold_convert (type, arg1));
4579 else if (comp_code == EQ_EXPR)
4580 return build_int_cst (type, 0);
4583 /* Try some transformations of A op B ? A : B.
4585 A == B? A : B same as B
4586 A != B? A : B same as A
4587 A >= B? A : B same as max (A, B)
4588 A > B? A : B same as max (B, A)
4589 A <= B? A : B same as min (A, B)
4590 A < B? A : B same as min (B, A)
4592 As above, these transformations don't work in the presence
4593 of signed zeros. For example, if A and B are zeros of
4594 opposite sign, the first two transformations will change
4595 the sign of the result. In the last four, the original
4596 expressions give different results for (A=+0, B=-0) and
4597 (A=-0, B=+0), but the transformed expressions do not.
4599 The first two transformations are correct if either A or B
4600 is a NaN. In the first transformation, the condition will
4601 be false, and B will indeed be chosen. In the case of the
4602 second transformation, the condition A != B will be true,
4603 and A will be chosen.
4605 The conversions to max() and min() are not correct if B is
4606 a number and A is not. The conditions in the original
4607 expressions will be false, so all four give B. The min()
4608 and max() versions would give a NaN instead. */
4609 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4610 /* Avoid these transformations if the COND_EXPR may be used
4611 as an lvalue in the C++ front-end. PR c++/19199. */
4612 && (in_gimple_form
4613 || (strcmp (lang_hooks.name, "GNU C++") != 0
4614 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4615 || ! maybe_lvalue_p (arg1)
4616 || ! maybe_lvalue_p (arg2)))
4618 tree comp_op0 = arg00;
4619 tree comp_op1 = arg01;
4620 tree comp_type = TREE_TYPE (comp_op0);
4622 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4623 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4625 comp_type = type;
4626 comp_op0 = arg1;
4627 comp_op1 = arg2;
4630 switch (comp_code)
4632 case EQ_EXPR:
4633 return pedantic_non_lvalue (fold_convert (type, arg2));
4634 case NE_EXPR:
4635 return pedantic_non_lvalue (fold_convert (type, arg1));
4636 case LE_EXPR:
4637 case LT_EXPR:
4638 case UNLE_EXPR:
4639 case UNLT_EXPR:
4640 /* In C++ a ?: expression can be an lvalue, so put the
4641 operand which will be used if they are equal first
4642 so that we can convert this back to the
4643 corresponding COND_EXPR. */
4644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4646 comp_op0 = fold_convert (comp_type, comp_op0);
4647 comp_op1 = fold_convert (comp_type, comp_op1);
4648 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4649 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4650 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4651 return pedantic_non_lvalue (fold_convert (type, tem));
4653 break;
4654 case GE_EXPR:
4655 case GT_EXPR:
4656 case UNGE_EXPR:
4657 case UNGT_EXPR:
4658 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4660 comp_op0 = fold_convert (comp_type, comp_op0);
4661 comp_op1 = fold_convert (comp_type, comp_op1);
4662 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4663 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4664 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4665 return pedantic_non_lvalue (fold_convert (type, tem));
4667 break;
4668 case UNEQ_EXPR:
4669 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4670 return pedantic_non_lvalue (fold_convert (type, arg2));
4671 break;
4672 case LTGT_EXPR:
4673 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4674 return pedantic_non_lvalue (fold_convert (type, arg1));
4675 break;
4676 default:
4677 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4678 break;
4682 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4683 we might still be able to simplify this. For example,
4684 if C1 is one less or one more than C2, this might have started
4685 out as a MIN or MAX and been transformed by this function.
4686 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4688 if (INTEGRAL_TYPE_P (type)
4689 && TREE_CODE (arg01) == INTEGER_CST
4690 && TREE_CODE (arg2) == INTEGER_CST)
4691 switch (comp_code)
4693 case EQ_EXPR:
4694 /* We can replace A with C1 in this case. */
4695 arg1 = fold_convert (type, arg01);
4696 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4698 case LT_EXPR:
4699 /* If C1 is C2 + 1, this is min(A, C2). */
4700 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4701 OEP_ONLY_CONST)
4702 && operand_equal_p (arg01,
4703 const_binop (PLUS_EXPR, arg2,
4704 build_int_cst (type, 1), 0),
4705 OEP_ONLY_CONST))
4706 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4707 type, arg1, arg2));
4708 break;
4710 case LE_EXPR:
4711 /* If C1 is C2 - 1, this is min(A, C2). */
4712 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4713 OEP_ONLY_CONST)
4714 && operand_equal_p (arg01,
4715 const_binop (MINUS_EXPR, arg2,
4716 build_int_cst (type, 1), 0),
4717 OEP_ONLY_CONST))
4718 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4719 type, arg1, arg2));
4720 break;
4722 case GT_EXPR:
4723 /* If C1 is C2 - 1, this is max(A, C2). */
4724 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4725 OEP_ONLY_CONST)
4726 && operand_equal_p (arg01,
4727 const_binop (MINUS_EXPR, arg2,
4728 build_int_cst (type, 1), 0),
4729 OEP_ONLY_CONST))
4730 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4731 type, arg1, arg2));
4732 break;
4734 case GE_EXPR:
4735 /* If C1 is C2 + 1, this is max(A, C2). */
4736 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4737 OEP_ONLY_CONST)
4738 && operand_equal_p (arg01,
4739 const_binop (PLUS_EXPR, arg2,
4740 build_int_cst (type, 1), 0),
4741 OEP_ONLY_CONST))
4742 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4743 type, arg1, arg2));
4744 break;
4745 case NE_EXPR:
4746 break;
4747 default:
4748 gcc_unreachable ();
4751 return NULL_TREE;
4756 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4757 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4758 #endif
4760 /* EXP is some logical combination of boolean tests. See if we can
4761 merge it into some range test. Return the new tree if so. */
4763 static tree
4764 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4766 int or_op = (code == TRUTH_ORIF_EXPR
4767 || code == TRUTH_OR_EXPR);
4768 int in0_p, in1_p, in_p;
4769 tree low0, low1, low, high0, high1, high;
4770 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4771 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4772 tree tem;
4774 /* If this is an OR operation, invert both sides; we will invert
4775 again at the end. */
4776 if (or_op)
4777 in0_p = ! in0_p, in1_p = ! in1_p;
4779 /* If both expressions are the same, if we can merge the ranges, and we
4780 can build the range test, return it or it inverted. If one of the
4781 ranges is always true or always false, consider it to be the same
4782 expression as the other. */
4783 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4784 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4785 in1_p, low1, high1)
4786 && 0 != (tem = (build_range_check (type,
4787 lhs != 0 ? lhs
4788 : rhs != 0 ? rhs : integer_zero_node,
4789 in_p, low, high))))
4790 return or_op ? invert_truthvalue (tem) : tem;
4792 /* On machines where the branch cost is expensive, if this is a
4793 short-circuited branch and the underlying object on both sides
4794 is the same, make a non-short-circuit operation. */
4795 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4796 && lhs != 0 && rhs != 0
4797 && (code == TRUTH_ANDIF_EXPR
4798 || code == TRUTH_ORIF_EXPR)
4799 && operand_equal_p (lhs, rhs, 0))
4801 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4802 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4803 which cases we can't do this. */
4804 if (simple_operand_p (lhs))
4805 return build2 (code == TRUTH_ANDIF_EXPR
4806 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4807 type, op0, op1);
4809 else if (lang_hooks.decls.global_bindings_p () == 0
4810 && ! CONTAINS_PLACEHOLDER_P (lhs))
4812 tree common = save_expr (lhs);
4814 if (0 != (lhs = build_range_check (type, common,
4815 or_op ? ! in0_p : in0_p,
4816 low0, high0))
4817 && (0 != (rhs = build_range_check (type, common,
4818 or_op ? ! in1_p : in1_p,
4819 low1, high1))))
4820 return build2 (code == TRUTH_ANDIF_EXPR
4821 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4822 type, lhs, rhs);
4826 return 0;
4829 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4830 bit value. Arrange things so the extra bits will be set to zero if and
4831 only if C is signed-extended to its full width. If MASK is nonzero,
4832 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4834 static tree
4835 unextend (tree c, int p, int unsignedp, tree mask)
4837 tree type = TREE_TYPE (c);
4838 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4839 tree temp;
4841 if (p == modesize || unsignedp)
4842 return c;
4844 /* We work by getting just the sign bit into the low-order bit, then
4845 into the high-order bit, then sign-extend. We then XOR that value
4846 with C. */
4847 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4848 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4850 /* We must use a signed type in order to get an arithmetic right shift.
4851 However, we must also avoid introducing accidental overflows, so that
4852 a subsequent call to integer_zerop will work. Hence we must
4853 do the type conversion here. At this point, the constant is either
4854 zero or one, and the conversion to a signed type can never overflow.
4855 We could get an overflow if this conversion is done anywhere else. */
4856 if (TYPE_UNSIGNED (type))
4857 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4859 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4860 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4861 if (mask != 0)
4862 temp = const_binop (BIT_AND_EXPR, temp,
4863 fold_convert (TREE_TYPE (c), mask), 0);
4864 /* If necessary, convert the type back to match the type of C. */
4865 if (TYPE_UNSIGNED (type))
4866 temp = fold_convert (type, temp);
4868 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4871 /* Find ways of folding logical expressions of LHS and RHS:
4872 Try to merge two comparisons to the same innermost item.
4873 Look for range tests like "ch >= '0' && ch <= '9'".
4874 Look for combinations of simple terms on machines with expensive branches
4875 and evaluate the RHS unconditionally.
4877 For example, if we have p->a == 2 && p->b == 4 and we can make an
4878 object large enough to span both A and B, we can do this with a comparison
4879 against the object ANDed with the a mask.
4881 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4882 operations to do this with one comparison.
4884 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4885 function and the one above.
4887 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4888 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4890 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4891 two operands.
4893 We return the simplified tree or 0 if no optimization is possible. */
4895 static tree
4896 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4898 /* If this is the "or" of two comparisons, we can do something if
4899 the comparisons are NE_EXPR. If this is the "and", we can do something
4900 if the comparisons are EQ_EXPR. I.e.,
4901 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4903 WANTED_CODE is this operation code. For single bit fields, we can
4904 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4905 comparison for one-bit fields. */
4907 enum tree_code wanted_code;
4908 enum tree_code lcode, rcode;
4909 tree ll_arg, lr_arg, rl_arg, rr_arg;
4910 tree ll_inner, lr_inner, rl_inner, rr_inner;
4911 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4912 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4913 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4914 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4915 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4916 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4917 enum machine_mode lnmode, rnmode;
4918 tree ll_mask, lr_mask, rl_mask, rr_mask;
4919 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4920 tree l_const, r_const;
4921 tree lntype, rntype, result;
4922 int first_bit, end_bit;
4923 int volatilep;
4924 tree orig_lhs = lhs, orig_rhs = rhs;
4925 enum tree_code orig_code = code;
4927 /* Start by getting the comparison codes. Fail if anything is volatile.
4928 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4929 it were surrounded with a NE_EXPR. */
4931 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4932 return 0;
4934 lcode = TREE_CODE (lhs);
4935 rcode = TREE_CODE (rhs);
4937 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4939 lhs = build2 (NE_EXPR, truth_type, lhs,
4940 build_int_cst (TREE_TYPE (lhs), 0));
4941 lcode = NE_EXPR;
4944 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4946 rhs = build2 (NE_EXPR, truth_type, rhs,
4947 build_int_cst (TREE_TYPE (rhs), 0));
4948 rcode = NE_EXPR;
4951 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4952 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4953 return 0;
4955 ll_arg = TREE_OPERAND (lhs, 0);
4956 lr_arg = TREE_OPERAND (lhs, 1);
4957 rl_arg = TREE_OPERAND (rhs, 0);
4958 rr_arg = TREE_OPERAND (rhs, 1);
4960 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4961 if (simple_operand_p (ll_arg)
4962 && simple_operand_p (lr_arg))
4964 tree result;
4965 if (operand_equal_p (ll_arg, rl_arg, 0)
4966 && operand_equal_p (lr_arg, rr_arg, 0))
4968 result = combine_comparisons (code, lcode, rcode,
4969 truth_type, ll_arg, lr_arg);
4970 if (result)
4971 return result;
4973 else if (operand_equal_p (ll_arg, rr_arg, 0)
4974 && operand_equal_p (lr_arg, rl_arg, 0))
4976 result = combine_comparisons (code, lcode,
4977 swap_tree_comparison (rcode),
4978 truth_type, ll_arg, lr_arg);
4979 if (result)
4980 return result;
4984 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4985 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4987 /* If the RHS can be evaluated unconditionally and its operands are
4988 simple, it wins to evaluate the RHS unconditionally on machines
4989 with expensive branches. In this case, this isn't a comparison
4990 that can be merged. Avoid doing this if the RHS is a floating-point
4991 comparison since those can trap. */
4993 if (BRANCH_COST >= 2
4994 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4995 && simple_operand_p (rl_arg)
4996 && simple_operand_p (rr_arg))
4998 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4999 if (code == TRUTH_OR_EXPR
5000 && lcode == NE_EXPR && integer_zerop (lr_arg)
5001 && rcode == NE_EXPR && integer_zerop (rr_arg)
5002 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5003 return build2 (NE_EXPR, truth_type,
5004 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5005 ll_arg, rl_arg),
5006 build_int_cst (TREE_TYPE (ll_arg), 0));
5008 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5009 if (code == TRUTH_AND_EXPR
5010 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5011 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5012 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5013 return build2 (EQ_EXPR, truth_type,
5014 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5015 ll_arg, rl_arg),
5016 build_int_cst (TREE_TYPE (ll_arg), 0));
5018 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5020 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5021 return build2 (code, truth_type, lhs, rhs);
5022 return NULL_TREE;
5026 /* See if the comparisons can be merged. Then get all the parameters for
5027 each side. */
5029 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5030 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5031 return 0;
5033 volatilep = 0;
5034 ll_inner = decode_field_reference (ll_arg,
5035 &ll_bitsize, &ll_bitpos, &ll_mode,
5036 &ll_unsignedp, &volatilep, &ll_mask,
5037 &ll_and_mask);
5038 lr_inner = decode_field_reference (lr_arg,
5039 &lr_bitsize, &lr_bitpos, &lr_mode,
5040 &lr_unsignedp, &volatilep, &lr_mask,
5041 &lr_and_mask);
5042 rl_inner = decode_field_reference (rl_arg,
5043 &rl_bitsize, &rl_bitpos, &rl_mode,
5044 &rl_unsignedp, &volatilep, &rl_mask,
5045 &rl_and_mask);
5046 rr_inner = decode_field_reference (rr_arg,
5047 &rr_bitsize, &rr_bitpos, &rr_mode,
5048 &rr_unsignedp, &volatilep, &rr_mask,
5049 &rr_and_mask);
5051 /* It must be true that the inner operation on the lhs of each
5052 comparison must be the same if we are to be able to do anything.
5053 Then see if we have constants. If not, the same must be true for
5054 the rhs's. */
5055 if (volatilep || ll_inner == 0 || rl_inner == 0
5056 || ! operand_equal_p (ll_inner, rl_inner, 0))
5057 return 0;
5059 if (TREE_CODE (lr_arg) == INTEGER_CST
5060 && TREE_CODE (rr_arg) == INTEGER_CST)
5061 l_const = lr_arg, r_const = rr_arg;
5062 else if (lr_inner == 0 || rr_inner == 0
5063 || ! operand_equal_p (lr_inner, rr_inner, 0))
5064 return 0;
5065 else
5066 l_const = r_const = 0;
5068 /* If either comparison code is not correct for our logical operation,
5069 fail. However, we can convert a one-bit comparison against zero into
5070 the opposite comparison against that bit being set in the field. */
5072 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5073 if (lcode != wanted_code)
5075 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5077 /* Make the left operand unsigned, since we are only interested
5078 in the value of one bit. Otherwise we are doing the wrong
5079 thing below. */
5080 ll_unsignedp = 1;
5081 l_const = ll_mask;
5083 else
5084 return 0;
5087 /* This is analogous to the code for l_const above. */
5088 if (rcode != wanted_code)
5090 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5092 rl_unsignedp = 1;
5093 r_const = rl_mask;
5095 else
5096 return 0;
5099 /* See if we can find a mode that contains both fields being compared on
5100 the left. If we can't, fail. Otherwise, update all constants and masks
5101 to be relative to a field of that size. */
5102 first_bit = MIN (ll_bitpos, rl_bitpos);
5103 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5104 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5105 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5106 volatilep);
5107 if (lnmode == VOIDmode)
5108 return 0;
5110 lnbitsize = GET_MODE_BITSIZE (lnmode);
5111 lnbitpos = first_bit & ~ (lnbitsize - 1);
5112 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5113 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5115 if (BYTES_BIG_ENDIAN)
5117 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5118 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5121 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5122 size_int (xll_bitpos), 0);
5123 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5124 size_int (xrl_bitpos), 0);
5126 if (l_const)
5128 l_const = fold_convert (lntype, l_const);
5129 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5130 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5131 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5132 fold_build1 (BIT_NOT_EXPR,
5133 lntype, ll_mask),
5134 0)))
5136 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5138 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5141 if (r_const)
5143 r_const = fold_convert (lntype, r_const);
5144 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5145 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5146 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5147 fold_build1 (BIT_NOT_EXPR,
5148 lntype, rl_mask),
5149 0)))
5151 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5153 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5157 /* If the right sides are not constant, do the same for it. Also,
5158 disallow this optimization if a size or signedness mismatch occurs
5159 between the left and right sides. */
5160 if (l_const == 0)
5162 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5163 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5164 /* Make sure the two fields on the right
5165 correspond to the left without being swapped. */
5166 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5167 return 0;
5169 first_bit = MIN (lr_bitpos, rr_bitpos);
5170 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5171 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5172 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5173 volatilep);
5174 if (rnmode == VOIDmode)
5175 return 0;
5177 rnbitsize = GET_MODE_BITSIZE (rnmode);
5178 rnbitpos = first_bit & ~ (rnbitsize - 1);
5179 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5180 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5182 if (BYTES_BIG_ENDIAN)
5184 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5185 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5188 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5189 size_int (xlr_bitpos), 0);
5190 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5191 size_int (xrr_bitpos), 0);
5193 /* Make a mask that corresponds to both fields being compared.
5194 Do this for both items being compared. If the operands are the
5195 same size and the bits being compared are in the same position
5196 then we can do this by masking both and comparing the masked
5197 results. */
5198 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5199 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5200 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5202 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5203 ll_unsignedp || rl_unsignedp);
5204 if (! all_ones_mask_p (ll_mask, lnbitsize))
5205 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5207 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5208 lr_unsignedp || rr_unsignedp);
5209 if (! all_ones_mask_p (lr_mask, rnbitsize))
5210 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5212 return build2 (wanted_code, truth_type, lhs, rhs);
5215 /* There is still another way we can do something: If both pairs of
5216 fields being compared are adjacent, we may be able to make a wider
5217 field containing them both.
5219 Note that we still must mask the lhs/rhs expressions. Furthermore,
5220 the mask must be shifted to account for the shift done by
5221 make_bit_field_ref. */
5222 if ((ll_bitsize + ll_bitpos == rl_bitpos
5223 && lr_bitsize + lr_bitpos == rr_bitpos)
5224 || (ll_bitpos == rl_bitpos + rl_bitsize
5225 && lr_bitpos == rr_bitpos + rr_bitsize))
5227 tree type;
5229 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5230 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5231 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5232 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5234 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5235 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5236 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5237 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5239 /* Convert to the smaller type before masking out unwanted bits. */
5240 type = lntype;
5241 if (lntype != rntype)
5243 if (lnbitsize > rnbitsize)
5245 lhs = fold_convert (rntype, lhs);
5246 ll_mask = fold_convert (rntype, ll_mask);
5247 type = rntype;
5249 else if (lnbitsize < rnbitsize)
5251 rhs = fold_convert (lntype, rhs);
5252 lr_mask = fold_convert (lntype, lr_mask);
5253 type = lntype;
5257 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5258 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5260 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5261 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5263 return build2 (wanted_code, truth_type, lhs, rhs);
5266 return 0;
5269 /* Handle the case of comparisons with constants. If there is something in
5270 common between the masks, those bits of the constants must be the same.
5271 If not, the condition is always false. Test for this to avoid generating
5272 incorrect code below. */
5273 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5274 if (! integer_zerop (result)
5275 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5276 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5278 if (wanted_code == NE_EXPR)
5280 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5281 return constant_boolean_node (true, truth_type);
5283 else
5285 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5286 return constant_boolean_node (false, truth_type);
5290 /* Construct the expression we will return. First get the component
5291 reference we will make. Unless the mask is all ones the width of
5292 that field, perform the mask operation. Then compare with the
5293 merged constant. */
5294 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5295 ll_unsignedp || rl_unsignedp);
5297 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5298 if (! all_ones_mask_p (ll_mask, lnbitsize))
5299 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5301 return build2 (wanted_code, truth_type, result,
5302 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5305 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5306 constant. */
5308 static tree
5309 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5311 tree arg0 = op0;
5312 enum tree_code op_code;
5313 tree comp_const = op1;
5314 tree minmax_const;
5315 int consts_equal, consts_lt;
5316 tree inner;
5318 STRIP_SIGN_NOPS (arg0);
5320 op_code = TREE_CODE (arg0);
5321 minmax_const = TREE_OPERAND (arg0, 1);
5322 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5323 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5324 inner = TREE_OPERAND (arg0, 0);
5326 /* If something does not permit us to optimize, return the original tree. */
5327 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5328 || TREE_CODE (comp_const) != INTEGER_CST
5329 || TREE_OVERFLOW (comp_const)
5330 || TREE_CODE (minmax_const) != INTEGER_CST
5331 || TREE_OVERFLOW (minmax_const))
5332 return NULL_TREE;
5334 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5335 and GT_EXPR, doing the rest with recursive calls using logical
5336 simplifications. */
5337 switch (code)
5339 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5341 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5342 type, op0, op1);
5343 if (tem)
5344 return invert_truthvalue (tem);
5345 return NULL_TREE;
5348 case GE_EXPR:
5349 return
5350 fold_build2 (TRUTH_ORIF_EXPR, type,
5351 optimize_minmax_comparison
5352 (EQ_EXPR, type, arg0, comp_const),
5353 optimize_minmax_comparison
5354 (GT_EXPR, type, arg0, comp_const));
5356 case EQ_EXPR:
5357 if (op_code == MAX_EXPR && consts_equal)
5358 /* MAX (X, 0) == 0 -> X <= 0 */
5359 return fold_build2 (LE_EXPR, type, inner, comp_const);
5361 else if (op_code == MAX_EXPR && consts_lt)
5362 /* MAX (X, 0) == 5 -> X == 5 */
5363 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5365 else if (op_code == MAX_EXPR)
5366 /* MAX (X, 0) == -1 -> false */
5367 return omit_one_operand (type, integer_zero_node, inner);
5369 else if (consts_equal)
5370 /* MIN (X, 0) == 0 -> X >= 0 */
5371 return fold_build2 (GE_EXPR, type, inner, comp_const);
5373 else if (consts_lt)
5374 /* MIN (X, 0) == 5 -> false */
5375 return omit_one_operand (type, integer_zero_node, inner);
5377 else
5378 /* MIN (X, 0) == -1 -> X == -1 */
5379 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5381 case GT_EXPR:
5382 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5383 /* MAX (X, 0) > 0 -> X > 0
5384 MAX (X, 0) > 5 -> X > 5 */
5385 return fold_build2 (GT_EXPR, type, inner, comp_const);
5387 else if (op_code == MAX_EXPR)
5388 /* MAX (X, 0) > -1 -> true */
5389 return omit_one_operand (type, integer_one_node, inner);
5391 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5392 /* MIN (X, 0) > 0 -> false
5393 MIN (X, 0) > 5 -> false */
5394 return omit_one_operand (type, integer_zero_node, inner);
5396 else
5397 /* MIN (X, 0) > -1 -> X > -1 */
5398 return fold_build2 (GT_EXPR, type, inner, comp_const);
5400 default:
5401 return NULL_TREE;
5405 /* T is an integer expression that is being multiplied, divided, or taken a
5406 modulus (CODE says which and what kind of divide or modulus) by a
5407 constant C. See if we can eliminate that operation by folding it with
5408 other operations already in T. WIDE_TYPE, if non-null, is a type that
5409 should be used for the computation if wider than our type.
5411 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5412 (X * 2) + (Y * 4). We must, however, be assured that either the original
5413 expression would not overflow or that overflow is undefined for the type
5414 in the language in question.
5416 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5417 the machine has a multiply-accumulate insn or that this is part of an
5418 addressing calculation.
5420 If we return a non-null expression, it is an equivalent form of the
5421 original computation, but need not be in the original type. */
5423 static tree
5424 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5426 /* To avoid exponential search depth, refuse to allow recursion past
5427 three levels. Beyond that (1) it's highly unlikely that we'll find
5428 something interesting and (2) we've probably processed it before
5429 when we built the inner expression. */
5431 static int depth;
5432 tree ret;
5434 if (depth > 3)
5435 return NULL;
5437 depth++;
5438 ret = extract_muldiv_1 (t, c, code, wide_type);
5439 depth--;
5441 return ret;
5444 static tree
5445 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5447 tree type = TREE_TYPE (t);
5448 enum tree_code tcode = TREE_CODE (t);
5449 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5450 > GET_MODE_SIZE (TYPE_MODE (type)))
5451 ? wide_type : type);
5452 tree t1, t2;
5453 int same_p = tcode == code;
5454 tree op0 = NULL_TREE, op1 = NULL_TREE;
5456 /* Don't deal with constants of zero here; they confuse the code below. */
5457 if (integer_zerop (c))
5458 return NULL_TREE;
5460 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5461 op0 = TREE_OPERAND (t, 0);
5463 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5464 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5466 /* Note that we need not handle conditional operations here since fold
5467 already handles those cases. So just do arithmetic here. */
5468 switch (tcode)
5470 case INTEGER_CST:
5471 /* For a constant, we can always simplify if we are a multiply
5472 or (for divide and modulus) if it is a multiple of our constant. */
5473 if (code == MULT_EXPR
5474 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5475 return const_binop (code, fold_convert (ctype, t),
5476 fold_convert (ctype, c), 0);
5477 break;
5479 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5480 /* If op0 is an expression ... */
5481 if ((COMPARISON_CLASS_P (op0)
5482 || UNARY_CLASS_P (op0)
5483 || BINARY_CLASS_P (op0)
5484 || EXPRESSION_CLASS_P (op0))
5485 /* ... and is unsigned, and its type is smaller than ctype,
5486 then we cannot pass through as widening. */
5487 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5488 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5489 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5490 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5491 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5492 /* ... or this is a truncation (t is narrower than op0),
5493 then we cannot pass through this narrowing. */
5494 || (GET_MODE_SIZE (TYPE_MODE (type))
5495 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5496 /* ... or signedness changes for division or modulus,
5497 then we cannot pass through this conversion. */
5498 || (code != MULT_EXPR
5499 && (TYPE_UNSIGNED (ctype)
5500 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5501 break;
5503 /* Pass the constant down and see if we can make a simplification. If
5504 we can, replace this expression with the inner simplification for
5505 possible later conversion to our or some other type. */
5506 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5507 && TREE_CODE (t2) == INTEGER_CST
5508 && !TREE_OVERFLOW (t2)
5509 && (0 != (t1 = extract_muldiv (op0, t2, code,
5510 code == MULT_EXPR
5511 ? ctype : NULL_TREE))))
5512 return t1;
5513 break;
5515 case ABS_EXPR:
5516 /* If widening the type changes it from signed to unsigned, then we
5517 must avoid building ABS_EXPR itself as unsigned. */
5518 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5520 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5521 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5523 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5524 return fold_convert (ctype, t1);
5526 break;
5528 /* FALLTHROUGH */
5529 case NEGATE_EXPR:
5530 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5531 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5532 break;
5534 case MIN_EXPR: case MAX_EXPR:
5535 /* If widening the type changes the signedness, then we can't perform
5536 this optimization as that changes the result. */
5537 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5538 break;
5540 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5541 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5542 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5544 if (tree_int_cst_sgn (c) < 0)
5545 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5547 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5548 fold_convert (ctype, t2));
5550 break;
5552 case LSHIFT_EXPR: case RSHIFT_EXPR:
5553 /* If the second operand is constant, this is a multiplication
5554 or floor division, by a power of two, so we can treat it that
5555 way unless the multiplier or divisor overflows. Signed
5556 left-shift overflow is implementation-defined rather than
5557 undefined in C90, so do not convert signed left shift into
5558 multiplication. */
5559 if (TREE_CODE (op1) == INTEGER_CST
5560 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5561 /* const_binop may not detect overflow correctly,
5562 so check for it explicitly here. */
5563 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5564 && TREE_INT_CST_HIGH (op1) == 0
5565 && 0 != (t1 = fold_convert (ctype,
5566 const_binop (LSHIFT_EXPR,
5567 size_one_node,
5568 op1, 0)))
5569 && !TREE_OVERFLOW (t1))
5570 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5571 ? MULT_EXPR : FLOOR_DIV_EXPR,
5572 ctype, fold_convert (ctype, op0), t1),
5573 c, code, wide_type);
5574 break;
5576 case PLUS_EXPR: case MINUS_EXPR:
5577 /* See if we can eliminate the operation on both sides. If we can, we
5578 can return a new PLUS or MINUS. If we can't, the only remaining
5579 cases where we can do anything are if the second operand is a
5580 constant. */
5581 t1 = extract_muldiv (op0, c, code, wide_type);
5582 t2 = extract_muldiv (op1, c, code, wide_type);
5583 if (t1 != 0 && t2 != 0
5584 && (code == MULT_EXPR
5585 /* If not multiplication, we can only do this if both operands
5586 are divisible by c. */
5587 || (multiple_of_p (ctype, op0, c)
5588 && multiple_of_p (ctype, op1, c))))
5589 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5590 fold_convert (ctype, t2));
5592 /* If this was a subtraction, negate OP1 and set it to be an addition.
5593 This simplifies the logic below. */
5594 if (tcode == MINUS_EXPR)
5595 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5597 if (TREE_CODE (op1) != INTEGER_CST)
5598 break;
5600 /* If either OP1 or C are negative, this optimization is not safe for
5601 some of the division and remainder types while for others we need
5602 to change the code. */
5603 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5605 if (code == CEIL_DIV_EXPR)
5606 code = FLOOR_DIV_EXPR;
5607 else if (code == FLOOR_DIV_EXPR)
5608 code = CEIL_DIV_EXPR;
5609 else if (code != MULT_EXPR
5610 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5611 break;
5614 /* If it's a multiply or a division/modulus operation of a multiple
5615 of our constant, do the operation and verify it doesn't overflow. */
5616 if (code == MULT_EXPR
5617 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5619 op1 = const_binop (code, fold_convert (ctype, op1),
5620 fold_convert (ctype, c), 0);
5621 /* We allow the constant to overflow with wrapping semantics. */
5622 if (op1 == 0
5623 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5624 break;
5626 else
5627 break;
5629 /* If we have an unsigned type is not a sizetype, we cannot widen
5630 the operation since it will change the result if the original
5631 computation overflowed. */
5632 if (TYPE_UNSIGNED (ctype)
5633 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5634 && ctype != type)
5635 break;
5637 /* If we were able to eliminate our operation from the first side,
5638 apply our operation to the second side and reform the PLUS. */
5639 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5640 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5642 /* The last case is if we are a multiply. In that case, we can
5643 apply the distributive law to commute the multiply and addition
5644 if the multiplication of the constants doesn't overflow. */
5645 if (code == MULT_EXPR)
5646 return fold_build2 (tcode, ctype,
5647 fold_build2 (code, ctype,
5648 fold_convert (ctype, op0),
5649 fold_convert (ctype, c)),
5650 op1);
5652 break;
5654 case MULT_EXPR:
5655 /* We have a special case here if we are doing something like
5656 (C * 8) % 4 since we know that's zero. */
5657 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5658 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5659 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5660 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5661 return omit_one_operand (type, integer_zero_node, op0);
5663 /* ... fall through ... */
5665 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5666 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5667 /* If we can extract our operation from the LHS, do so and return a
5668 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5669 do something only if the second operand is a constant. */
5670 if (same_p
5671 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5672 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5673 fold_convert (ctype, op1));
5674 else if (tcode == MULT_EXPR && code == MULT_EXPR
5675 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5676 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5677 fold_convert (ctype, t1));
5678 else if (TREE_CODE (op1) != INTEGER_CST)
5679 return 0;
5681 /* If these are the same operation types, we can associate them
5682 assuming no overflow. */
5683 if (tcode == code
5684 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5685 fold_convert (ctype, c), 0))
5686 && !TREE_OVERFLOW (t1))
5687 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5689 /* If these operations "cancel" each other, we have the main
5690 optimizations of this pass, which occur when either constant is a
5691 multiple of the other, in which case we replace this with either an
5692 operation or CODE or TCODE.
5694 If we have an unsigned type that is not a sizetype, we cannot do
5695 this since it will change the result if the original computation
5696 overflowed. */
5697 if ((! TYPE_UNSIGNED (ctype)
5698 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5699 && ! flag_wrapv
5700 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5701 || (tcode == MULT_EXPR
5702 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5703 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5705 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5706 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5707 fold_convert (ctype,
5708 const_binop (TRUNC_DIV_EXPR,
5709 op1, c, 0)));
5710 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5711 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5712 fold_convert (ctype,
5713 const_binop (TRUNC_DIV_EXPR,
5714 c, op1, 0)));
5716 break;
5718 default:
5719 break;
5722 return 0;
5725 /* Return a node which has the indicated constant VALUE (either 0 or
5726 1), and is of the indicated TYPE. */
5728 tree
5729 constant_boolean_node (int value, tree type)
5731 if (type == integer_type_node)
5732 return value ? integer_one_node : integer_zero_node;
5733 else if (type == boolean_type_node)
5734 return value ? boolean_true_node : boolean_false_node;
5735 else
5736 return build_int_cst (type, value);
5740 /* Return true if expr looks like an ARRAY_REF and set base and
5741 offset to the appropriate trees. If there is no offset,
5742 offset is set to NULL_TREE. Base will be canonicalized to
5743 something you can get the element type from using
5744 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5745 in bytes to the base. */
5747 static bool
5748 extract_array_ref (tree expr, tree *base, tree *offset)
5750 /* One canonical form is a PLUS_EXPR with the first
5751 argument being an ADDR_EXPR with a possible NOP_EXPR
5752 attached. */
5753 if (TREE_CODE (expr) == PLUS_EXPR)
5755 tree op0 = TREE_OPERAND (expr, 0);
5756 tree inner_base, dummy1;
5757 /* Strip NOP_EXPRs here because the C frontends and/or
5758 folders present us (int *)&x.a + 4B possibly. */
5759 STRIP_NOPS (op0);
5760 if (extract_array_ref (op0, &inner_base, &dummy1))
5762 *base = inner_base;
5763 if (dummy1 == NULL_TREE)
5764 *offset = TREE_OPERAND (expr, 1);
5765 else
5766 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5767 dummy1, TREE_OPERAND (expr, 1));
5768 return true;
5771 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5772 which we transform into an ADDR_EXPR with appropriate
5773 offset. For other arguments to the ADDR_EXPR we assume
5774 zero offset and as such do not care about the ADDR_EXPR
5775 type and strip possible nops from it. */
5776 else if (TREE_CODE (expr) == ADDR_EXPR)
5778 tree op0 = TREE_OPERAND (expr, 0);
5779 if (TREE_CODE (op0) == ARRAY_REF)
5781 tree idx = TREE_OPERAND (op0, 1);
5782 *base = TREE_OPERAND (op0, 0);
5783 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5784 array_ref_element_size (op0));
5786 else
5788 /* Handle array-to-pointer decay as &a. */
5789 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5790 *base = TREE_OPERAND (expr, 0);
5791 else
5792 *base = expr;
5793 *offset = NULL_TREE;
5795 return true;
5797 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5798 else if (SSA_VAR_P (expr)
5799 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5801 *base = expr;
5802 *offset = NULL_TREE;
5803 return true;
5806 return false;
5810 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5811 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5812 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5813 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5814 COND is the first argument to CODE; otherwise (as in the example
5815 given here), it is the second argument. TYPE is the type of the
5816 original expression. Return NULL_TREE if no simplification is
5817 possible. */
5819 static tree
5820 fold_binary_op_with_conditional_arg (enum tree_code code,
5821 tree type, tree op0, tree op1,
5822 tree cond, tree arg, int cond_first_p)
5824 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5825 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5826 tree test, true_value, false_value;
5827 tree lhs = NULL_TREE;
5828 tree rhs = NULL_TREE;
5830 /* This transformation is only worthwhile if we don't have to wrap
5831 arg in a SAVE_EXPR, and the operation can be simplified on at least
5832 one of the branches once its pushed inside the COND_EXPR. */
5833 if (!TREE_CONSTANT (arg))
5834 return NULL_TREE;
5836 if (TREE_CODE (cond) == COND_EXPR)
5838 test = TREE_OPERAND (cond, 0);
5839 true_value = TREE_OPERAND (cond, 1);
5840 false_value = TREE_OPERAND (cond, 2);
5841 /* If this operand throws an expression, then it does not make
5842 sense to try to perform a logical or arithmetic operation
5843 involving it. */
5844 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5845 lhs = true_value;
5846 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5847 rhs = false_value;
5849 else
5851 tree testtype = TREE_TYPE (cond);
5852 test = cond;
5853 true_value = constant_boolean_node (true, testtype);
5854 false_value = constant_boolean_node (false, testtype);
5857 arg = fold_convert (arg_type, arg);
5858 if (lhs == 0)
5860 true_value = fold_convert (cond_type, true_value);
5861 if (cond_first_p)
5862 lhs = fold_build2 (code, type, true_value, arg);
5863 else
5864 lhs = fold_build2 (code, type, arg, true_value);
5866 if (rhs == 0)
5868 false_value = fold_convert (cond_type, false_value);
5869 if (cond_first_p)
5870 rhs = fold_build2 (code, type, false_value, arg);
5871 else
5872 rhs = fold_build2 (code, type, arg, false_value);
5875 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5876 return fold_convert (type, test);
5880 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5882 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5883 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5884 ADDEND is the same as X.
5886 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5887 and finite. The problematic cases are when X is zero, and its mode
5888 has signed zeros. In the case of rounding towards -infinity,
5889 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5890 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5892 static bool
5893 fold_real_zero_addition_p (tree type, tree addend, int negate)
5895 if (!real_zerop (addend))
5896 return false;
5898 /* Don't allow the fold with -fsignaling-nans. */
5899 if (HONOR_SNANS (TYPE_MODE (type)))
5900 return false;
5902 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5903 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5904 return true;
5906 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5907 if (TREE_CODE (addend) == REAL_CST
5908 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5909 negate = !negate;
5911 /* The mode has signed zeros, and we have to honor their sign.
5912 In this situation, there is only one case we can return true for.
5913 X - 0 is the same as X unless rounding towards -infinity is
5914 supported. */
5915 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5918 /* Subroutine of fold() that checks comparisons of built-in math
5919 functions against real constants.
5921 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5922 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5923 is the type of the result and ARG0 and ARG1 are the operands of the
5924 comparison. ARG1 must be a TREE_REAL_CST.
5926 The function returns the constant folded tree if a simplification
5927 can be made, and NULL_TREE otherwise. */
5929 static tree
5930 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5931 tree type, tree arg0, tree arg1)
5933 REAL_VALUE_TYPE c;
5935 if (BUILTIN_SQRT_P (fcode))
5937 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5938 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5940 c = TREE_REAL_CST (arg1);
5941 if (REAL_VALUE_NEGATIVE (c))
5943 /* sqrt(x) < y is always false, if y is negative. */
5944 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5945 return omit_one_operand (type, integer_zero_node, arg);
5947 /* sqrt(x) > y is always true, if y is negative and we
5948 don't care about NaNs, i.e. negative values of x. */
5949 if (code == NE_EXPR || !HONOR_NANS (mode))
5950 return omit_one_operand (type, integer_one_node, arg);
5952 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5953 return fold_build2 (GE_EXPR, type, arg,
5954 build_real (TREE_TYPE (arg), dconst0));
5956 else if (code == GT_EXPR || code == GE_EXPR)
5958 REAL_VALUE_TYPE c2;
5960 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5961 real_convert (&c2, mode, &c2);
5963 if (REAL_VALUE_ISINF (c2))
5965 /* sqrt(x) > y is x == +Inf, when y is very large. */
5966 if (HONOR_INFINITIES (mode))
5967 return fold_build2 (EQ_EXPR, type, arg,
5968 build_real (TREE_TYPE (arg), c2));
5970 /* sqrt(x) > y is always false, when y is very large
5971 and we don't care about infinities. */
5972 return omit_one_operand (type, integer_zero_node, arg);
5975 /* sqrt(x) > c is the same as x > c*c. */
5976 return fold_build2 (code, type, arg,
5977 build_real (TREE_TYPE (arg), c2));
5979 else if (code == LT_EXPR || code == LE_EXPR)
5981 REAL_VALUE_TYPE c2;
5983 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5984 real_convert (&c2, mode, &c2);
5986 if (REAL_VALUE_ISINF (c2))
5988 /* sqrt(x) < y is always true, when y is a very large
5989 value and we don't care about NaNs or Infinities. */
5990 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5991 return omit_one_operand (type, integer_one_node, arg);
5993 /* sqrt(x) < y is x != +Inf when y is very large and we
5994 don't care about NaNs. */
5995 if (! HONOR_NANS (mode))
5996 return fold_build2 (NE_EXPR, type, arg,
5997 build_real (TREE_TYPE (arg), c2));
5999 /* sqrt(x) < y is x >= 0 when y is very large and we
6000 don't care about Infinities. */
6001 if (! HONOR_INFINITIES (mode))
6002 return fold_build2 (GE_EXPR, type, arg,
6003 build_real (TREE_TYPE (arg), dconst0));
6005 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6006 if (lang_hooks.decls.global_bindings_p () != 0
6007 || CONTAINS_PLACEHOLDER_P (arg))
6008 return NULL_TREE;
6010 arg = save_expr (arg);
6011 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6012 fold_build2 (GE_EXPR, type, arg,
6013 build_real (TREE_TYPE (arg),
6014 dconst0)),
6015 fold_build2 (NE_EXPR, type, arg,
6016 build_real (TREE_TYPE (arg),
6017 c2)));
6020 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6021 if (! HONOR_NANS (mode))
6022 return fold_build2 (code, type, arg,
6023 build_real (TREE_TYPE (arg), c2));
6025 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6026 if (lang_hooks.decls.global_bindings_p () == 0
6027 && ! CONTAINS_PLACEHOLDER_P (arg))
6029 arg = save_expr (arg);
6030 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6031 fold_build2 (GE_EXPR, type, arg,
6032 build_real (TREE_TYPE (arg),
6033 dconst0)),
6034 fold_build2 (code, type, arg,
6035 build_real (TREE_TYPE (arg),
6036 c2)));
6041 return NULL_TREE;
6044 /* Subroutine of fold() that optimizes comparisons against Infinities,
6045 either +Inf or -Inf.
6047 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6048 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6049 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6051 The function returns the constant folded tree if a simplification
6052 can be made, and NULL_TREE otherwise. */
6054 static tree
6055 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6057 enum machine_mode mode;
6058 REAL_VALUE_TYPE max;
6059 tree temp;
6060 bool neg;
6062 mode = TYPE_MODE (TREE_TYPE (arg0));
6064 /* For negative infinity swap the sense of the comparison. */
6065 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6066 if (neg)
6067 code = swap_tree_comparison (code);
6069 switch (code)
6071 case GT_EXPR:
6072 /* x > +Inf is always false, if with ignore sNANs. */
6073 if (HONOR_SNANS (mode))
6074 return NULL_TREE;
6075 return omit_one_operand (type, integer_zero_node, arg0);
6077 case LE_EXPR:
6078 /* x <= +Inf is always true, if we don't case about NaNs. */
6079 if (! HONOR_NANS (mode))
6080 return omit_one_operand (type, integer_one_node, arg0);
6082 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6083 if (lang_hooks.decls.global_bindings_p () == 0
6084 && ! CONTAINS_PLACEHOLDER_P (arg0))
6086 arg0 = save_expr (arg0);
6087 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6089 break;
6091 case EQ_EXPR:
6092 case GE_EXPR:
6093 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6094 real_maxval (&max, neg, mode);
6095 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6096 arg0, build_real (TREE_TYPE (arg0), max));
6098 case LT_EXPR:
6099 /* x < +Inf is always equal to x <= DBL_MAX. */
6100 real_maxval (&max, neg, mode);
6101 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6102 arg0, build_real (TREE_TYPE (arg0), max));
6104 case NE_EXPR:
6105 /* x != +Inf is always equal to !(x > DBL_MAX). */
6106 real_maxval (&max, neg, mode);
6107 if (! HONOR_NANS (mode))
6108 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6109 arg0, build_real (TREE_TYPE (arg0), max));
6111 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6112 arg0, build_real (TREE_TYPE (arg0), max));
6113 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6115 default:
6116 break;
6119 return NULL_TREE;
6122 /* Subroutine of fold() that optimizes comparisons of a division by
6123 a nonzero integer constant against an integer constant, i.e.
6124 X/C1 op C2.
6126 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6127 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6128 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6130 The function returns the constant folded tree if a simplification
6131 can be made, and NULL_TREE otherwise. */
6133 static tree
6134 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6136 tree prod, tmp, hi, lo;
6137 tree arg00 = TREE_OPERAND (arg0, 0);
6138 tree arg01 = TREE_OPERAND (arg0, 1);
6139 unsigned HOST_WIDE_INT lpart;
6140 HOST_WIDE_INT hpart;
6141 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6142 bool neg_overflow;
6143 int overflow;
6145 /* We have to do this the hard way to detect unsigned overflow.
6146 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6147 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6148 TREE_INT_CST_HIGH (arg01),
6149 TREE_INT_CST_LOW (arg1),
6150 TREE_INT_CST_HIGH (arg1),
6151 &lpart, &hpart, unsigned_p);
6152 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6153 -1, overflow);
6154 neg_overflow = false;
6156 if (unsigned_p)
6158 tmp = int_const_binop (MINUS_EXPR, arg01,
6159 build_int_cst (TREE_TYPE (arg01), 1), 0);
6160 lo = prod;
6162 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6163 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6164 TREE_INT_CST_HIGH (prod),
6165 TREE_INT_CST_LOW (tmp),
6166 TREE_INT_CST_HIGH (tmp),
6167 &lpart, &hpart, unsigned_p);
6168 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6169 -1, overflow | TREE_OVERFLOW (prod));
6171 else if (tree_int_cst_sgn (arg01) >= 0)
6173 tmp = int_const_binop (MINUS_EXPR, arg01,
6174 build_int_cst (TREE_TYPE (arg01), 1), 0);
6175 switch (tree_int_cst_sgn (arg1))
6177 case -1:
6178 neg_overflow = true;
6179 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6180 hi = prod;
6181 break;
6183 case 0:
6184 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6185 hi = tmp;
6186 break;
6188 case 1:
6189 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6190 lo = prod;
6191 break;
6193 default:
6194 gcc_unreachable ();
6197 else
6199 /* A negative divisor reverses the relational operators. */
6200 code = swap_tree_comparison (code);
6202 tmp = int_const_binop (PLUS_EXPR, arg01,
6203 build_int_cst (TREE_TYPE (arg01), 1), 0);
6204 switch (tree_int_cst_sgn (arg1))
6206 case -1:
6207 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6208 lo = prod;
6209 break;
6211 case 0:
6212 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6213 lo = tmp;
6214 break;
6216 case 1:
6217 neg_overflow = true;
6218 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6219 hi = prod;
6220 break;
6222 default:
6223 gcc_unreachable ();
6227 switch (code)
6229 case EQ_EXPR:
6230 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6231 return omit_one_operand (type, integer_zero_node, arg00);
6232 if (TREE_OVERFLOW (hi))
6233 return fold_build2 (GE_EXPR, type, arg00, lo);
6234 if (TREE_OVERFLOW (lo))
6235 return fold_build2 (LE_EXPR, type, arg00, hi);
6236 return build_range_check (type, arg00, 1, lo, hi);
6238 case NE_EXPR:
6239 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6240 return omit_one_operand (type, integer_one_node, arg00);
6241 if (TREE_OVERFLOW (hi))
6242 return fold_build2 (LT_EXPR, type, arg00, lo);
6243 if (TREE_OVERFLOW (lo))
6244 return fold_build2 (GT_EXPR, type, arg00, hi);
6245 return build_range_check (type, arg00, 0, lo, hi);
6247 case LT_EXPR:
6248 if (TREE_OVERFLOW (lo))
6250 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6251 return omit_one_operand (type, tmp, arg00);
6253 return fold_build2 (LT_EXPR, type, arg00, lo);
6255 case LE_EXPR:
6256 if (TREE_OVERFLOW (hi))
6258 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6259 return omit_one_operand (type, tmp, arg00);
6261 return fold_build2 (LE_EXPR, type, arg00, hi);
6263 case GT_EXPR:
6264 if (TREE_OVERFLOW (hi))
6266 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6267 return omit_one_operand (type, tmp, arg00);
6269 return fold_build2 (GT_EXPR, type, arg00, hi);
6271 case GE_EXPR:
6272 if (TREE_OVERFLOW (lo))
6274 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6275 return omit_one_operand (type, tmp, arg00);
6277 return fold_build2 (GE_EXPR, type, arg00, lo);
6279 default:
6280 break;
6283 return NULL_TREE;
6287 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6288 equality/inequality test, then return a simplified form of the test
6289 using a sign testing. Otherwise return NULL. TYPE is the desired
6290 result type. */
6292 static tree
6293 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6294 tree result_type)
6296 /* If this is testing a single bit, we can optimize the test. */
6297 if ((code == NE_EXPR || code == EQ_EXPR)
6298 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6299 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6301 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6302 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6303 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6305 if (arg00 != NULL_TREE
6306 /* This is only a win if casting to a signed type is cheap,
6307 i.e. when arg00's type is not a partial mode. */
6308 && TYPE_PRECISION (TREE_TYPE (arg00))
6309 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6311 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6312 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6313 result_type, fold_convert (stype, arg00),
6314 build_int_cst (stype, 0));
6318 return NULL_TREE;
6321 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6322 equality/inequality test, then return a simplified form of
6323 the test using shifts and logical operations. Otherwise return
6324 NULL. TYPE is the desired result type. */
6326 tree
6327 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6328 tree result_type)
6330 /* If this is testing a single bit, we can optimize the test. */
6331 if ((code == NE_EXPR || code == EQ_EXPR)
6332 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6333 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6335 tree inner = TREE_OPERAND (arg0, 0);
6336 tree type = TREE_TYPE (arg0);
6337 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6338 enum machine_mode operand_mode = TYPE_MODE (type);
6339 int ops_unsigned;
6340 tree signed_type, unsigned_type, intermediate_type;
6341 tree tem, one;
6343 /* First, see if we can fold the single bit test into a sign-bit
6344 test. */
6345 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6346 result_type);
6347 if (tem)
6348 return tem;
6350 /* Otherwise we have (A & C) != 0 where C is a single bit,
6351 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6352 Similarly for (A & C) == 0. */
6354 /* If INNER is a right shift of a constant and it plus BITNUM does
6355 not overflow, adjust BITNUM and INNER. */
6356 if (TREE_CODE (inner) == RSHIFT_EXPR
6357 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6358 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6359 && bitnum < TYPE_PRECISION (type)
6360 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6361 bitnum - TYPE_PRECISION (type)))
6363 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6364 inner = TREE_OPERAND (inner, 0);
6367 /* If we are going to be able to omit the AND below, we must do our
6368 operations as unsigned. If we must use the AND, we have a choice.
6369 Normally unsigned is faster, but for some machines signed is. */
6370 #ifdef LOAD_EXTEND_OP
6371 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6372 && !flag_syntax_only) ? 0 : 1;
6373 #else
6374 ops_unsigned = 1;
6375 #endif
6377 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6378 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6379 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6380 inner = fold_convert (intermediate_type, inner);
6382 if (bitnum != 0)
6383 inner = build2 (RSHIFT_EXPR, intermediate_type,
6384 inner, size_int (bitnum));
6386 one = build_int_cst (intermediate_type, 1);
6388 if (code == EQ_EXPR)
6389 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6391 /* Put the AND last so it can combine with more things. */
6392 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6394 /* Make sure to return the proper type. */
6395 inner = fold_convert (result_type, inner);
6397 return inner;
6399 return NULL_TREE;
6402 /* Check whether we are allowed to reorder operands arg0 and arg1,
6403 such that the evaluation of arg1 occurs before arg0. */
6405 static bool
6406 reorder_operands_p (tree arg0, tree arg1)
6408 if (! flag_evaluation_order)
6409 return true;
6410 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6411 return true;
6412 return ! TREE_SIDE_EFFECTS (arg0)
6413 && ! TREE_SIDE_EFFECTS (arg1);
6416 /* Test whether it is preferable two swap two operands, ARG0 and
6417 ARG1, for example because ARG0 is an integer constant and ARG1
6418 isn't. If REORDER is true, only recommend swapping if we can
6419 evaluate the operands in reverse order. */
6421 bool
6422 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6424 STRIP_SIGN_NOPS (arg0);
6425 STRIP_SIGN_NOPS (arg1);
6427 if (TREE_CODE (arg1) == INTEGER_CST)
6428 return 0;
6429 if (TREE_CODE (arg0) == INTEGER_CST)
6430 return 1;
6432 if (TREE_CODE (arg1) == REAL_CST)
6433 return 0;
6434 if (TREE_CODE (arg0) == REAL_CST)
6435 return 1;
6437 if (TREE_CODE (arg1) == COMPLEX_CST)
6438 return 0;
6439 if (TREE_CODE (arg0) == COMPLEX_CST)
6440 return 1;
6442 if (TREE_CONSTANT (arg1))
6443 return 0;
6444 if (TREE_CONSTANT (arg0))
6445 return 1;
6447 if (optimize_size)
6448 return 0;
6450 if (reorder && flag_evaluation_order
6451 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6452 return 0;
6454 if (DECL_P (arg1))
6455 return 0;
6456 if (DECL_P (arg0))
6457 return 1;
6459 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6460 for commutative and comparison operators. Ensuring a canonical
6461 form allows the optimizers to find additional redundancies without
6462 having to explicitly check for both orderings. */
6463 if (TREE_CODE (arg0) == SSA_NAME
6464 && TREE_CODE (arg1) == SSA_NAME
6465 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6466 return 1;
6468 return 0;
6471 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6472 ARG0 is extended to a wider type. */
6474 static tree
6475 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6477 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6478 tree arg1_unw;
6479 tree shorter_type, outer_type;
6480 tree min, max;
6481 bool above, below;
6483 if (arg0_unw == arg0)
6484 return NULL_TREE;
6485 shorter_type = TREE_TYPE (arg0_unw);
6487 #ifdef HAVE_canonicalize_funcptr_for_compare
6488 /* Disable this optimization if we're casting a function pointer
6489 type on targets that require function pointer canonicalization. */
6490 if (HAVE_canonicalize_funcptr_for_compare
6491 && TREE_CODE (shorter_type) == POINTER_TYPE
6492 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6493 return NULL_TREE;
6494 #endif
6496 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6497 return NULL_TREE;
6499 arg1_unw = get_unwidened (arg1, shorter_type);
6501 /* If possible, express the comparison in the shorter mode. */
6502 if ((code == EQ_EXPR || code == NE_EXPR
6503 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6504 && (TREE_TYPE (arg1_unw) == shorter_type
6505 || (TREE_CODE (arg1_unw) == INTEGER_CST
6506 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6507 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6508 && int_fits_type_p (arg1_unw, shorter_type))))
6509 return fold_build2 (code, type, arg0_unw,
6510 fold_convert (shorter_type, arg1_unw));
6512 if (TREE_CODE (arg1_unw) != INTEGER_CST
6513 || TREE_CODE (shorter_type) != INTEGER_TYPE
6514 || !int_fits_type_p (arg1_unw, shorter_type))
6515 return NULL_TREE;
6517 /* If we are comparing with the integer that does not fit into the range
6518 of the shorter type, the result is known. */
6519 outer_type = TREE_TYPE (arg1_unw);
6520 min = lower_bound_in_type (outer_type, shorter_type);
6521 max = upper_bound_in_type (outer_type, shorter_type);
6523 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6524 max, arg1_unw));
6525 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6526 arg1_unw, min));
6528 switch (code)
6530 case EQ_EXPR:
6531 if (above || below)
6532 return omit_one_operand (type, integer_zero_node, arg0);
6533 break;
6535 case NE_EXPR:
6536 if (above || below)
6537 return omit_one_operand (type, integer_one_node, arg0);
6538 break;
6540 case LT_EXPR:
6541 case LE_EXPR:
6542 if (above)
6543 return omit_one_operand (type, integer_one_node, arg0);
6544 else if (below)
6545 return omit_one_operand (type, integer_zero_node, arg0);
6547 case GT_EXPR:
6548 case GE_EXPR:
6549 if (above)
6550 return omit_one_operand (type, integer_zero_node, arg0);
6551 else if (below)
6552 return omit_one_operand (type, integer_one_node, arg0);
6554 default:
6555 break;
6558 return NULL_TREE;
6561 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6562 ARG0 just the signedness is changed. */
6564 static tree
6565 fold_sign_changed_comparison (enum tree_code code, tree type,
6566 tree arg0, tree arg1)
6568 tree arg0_inner;
6569 tree inner_type, outer_type;
6571 if (TREE_CODE (arg0) != NOP_EXPR
6572 && TREE_CODE (arg0) != CONVERT_EXPR)
6573 return NULL_TREE;
6575 outer_type = TREE_TYPE (arg0);
6576 arg0_inner = TREE_OPERAND (arg0, 0);
6577 inner_type = TREE_TYPE (arg0_inner);
6579 #ifdef HAVE_canonicalize_funcptr_for_compare
6580 /* Disable this optimization if we're casting a function pointer
6581 type on targets that require function pointer canonicalization. */
6582 if (HAVE_canonicalize_funcptr_for_compare
6583 && TREE_CODE (inner_type) == POINTER_TYPE
6584 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6585 return NULL_TREE;
6586 #endif
6588 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6589 return NULL_TREE;
6591 if (TREE_CODE (arg1) != INTEGER_CST
6592 && !((TREE_CODE (arg1) == NOP_EXPR
6593 || TREE_CODE (arg1) == CONVERT_EXPR)
6594 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6595 return NULL_TREE;
6597 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6598 && code != NE_EXPR
6599 && code != EQ_EXPR)
6600 return NULL_TREE;
6602 if (TREE_CODE (arg1) == INTEGER_CST)
6603 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6604 TREE_INT_CST_HIGH (arg1), 0,
6605 TREE_OVERFLOW (arg1));
6606 else
6607 arg1 = fold_convert (inner_type, arg1);
6609 return fold_build2 (code, type, arg0_inner, arg1);
6612 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6613 step of the array. Reconstructs s and delta in the case of s * delta
6614 being an integer constant (and thus already folded).
6615 ADDR is the address. MULT is the multiplicative expression.
6616 If the function succeeds, the new address expression is returned. Otherwise
6617 NULL_TREE is returned. */
6619 static tree
6620 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6622 tree s, delta, step;
6623 tree ref = TREE_OPERAND (addr, 0), pref;
6624 tree ret, pos;
6625 tree itype;
6627 /* Canonicalize op1 into a possibly non-constant delta
6628 and an INTEGER_CST s. */
6629 if (TREE_CODE (op1) == MULT_EXPR)
6631 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6633 STRIP_NOPS (arg0);
6634 STRIP_NOPS (arg1);
6636 if (TREE_CODE (arg0) == INTEGER_CST)
6638 s = arg0;
6639 delta = arg1;
6641 else if (TREE_CODE (arg1) == INTEGER_CST)
6643 s = arg1;
6644 delta = arg0;
6646 else
6647 return NULL_TREE;
6649 else if (TREE_CODE (op1) == INTEGER_CST)
6651 delta = op1;
6652 s = NULL_TREE;
6654 else
6656 /* Simulate we are delta * 1. */
6657 delta = op1;
6658 s = integer_one_node;
6661 for (;; ref = TREE_OPERAND (ref, 0))
6663 if (TREE_CODE (ref) == ARRAY_REF)
6665 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6666 if (! itype)
6667 continue;
6669 step = array_ref_element_size (ref);
6670 if (TREE_CODE (step) != INTEGER_CST)
6671 continue;
6673 if (s)
6675 if (! tree_int_cst_equal (step, s))
6676 continue;
6678 else
6680 /* Try if delta is a multiple of step. */
6681 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6682 if (! tmp)
6683 continue;
6684 delta = tmp;
6687 break;
6690 if (!handled_component_p (ref))
6691 return NULL_TREE;
6694 /* We found the suitable array reference. So copy everything up to it,
6695 and replace the index. */
6697 pref = TREE_OPERAND (addr, 0);
6698 ret = copy_node (pref);
6699 pos = ret;
6701 while (pref != ref)
6703 pref = TREE_OPERAND (pref, 0);
6704 TREE_OPERAND (pos, 0) = copy_node (pref);
6705 pos = TREE_OPERAND (pos, 0);
6708 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6709 fold_convert (itype,
6710 TREE_OPERAND (pos, 1)),
6711 fold_convert (itype, delta));
6713 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6717 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6718 means A >= Y && A != MAX, but in this case we know that
6719 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6721 static tree
6722 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6724 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6726 if (TREE_CODE (bound) == LT_EXPR)
6727 a = TREE_OPERAND (bound, 0);
6728 else if (TREE_CODE (bound) == GT_EXPR)
6729 a = TREE_OPERAND (bound, 1);
6730 else
6731 return NULL_TREE;
6733 typea = TREE_TYPE (a);
6734 if (!INTEGRAL_TYPE_P (typea)
6735 && !POINTER_TYPE_P (typea))
6736 return NULL_TREE;
6738 if (TREE_CODE (ineq) == LT_EXPR)
6740 a1 = TREE_OPERAND (ineq, 1);
6741 y = TREE_OPERAND (ineq, 0);
6743 else if (TREE_CODE (ineq) == GT_EXPR)
6745 a1 = TREE_OPERAND (ineq, 0);
6746 y = TREE_OPERAND (ineq, 1);
6748 else
6749 return NULL_TREE;
6751 if (TREE_TYPE (a1) != typea)
6752 return NULL_TREE;
6754 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6755 if (!integer_onep (diff))
6756 return NULL_TREE;
6758 return fold_build2 (GE_EXPR, type, a, y);
6761 /* Fold a sum or difference of at least one multiplication.
6762 Returns the folded tree or NULL if no simplification could be made. */
6764 static tree
6765 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6767 tree arg00, arg01, arg10, arg11;
6768 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6770 /* (A * C) +- (B * C) -> (A+-B) * C.
6771 (A * C) +- A -> A * (C+-1).
6772 We are most concerned about the case where C is a constant,
6773 but other combinations show up during loop reduction. Since
6774 it is not difficult, try all four possibilities. */
6776 if (TREE_CODE (arg0) == MULT_EXPR)
6778 arg00 = TREE_OPERAND (arg0, 0);
6779 arg01 = TREE_OPERAND (arg0, 1);
6781 else
6783 arg00 = arg0;
6784 arg01 = build_one_cst (type);
6786 if (TREE_CODE (arg1) == MULT_EXPR)
6788 arg10 = TREE_OPERAND (arg1, 0);
6789 arg11 = TREE_OPERAND (arg1, 1);
6791 else
6793 arg10 = arg1;
6794 arg11 = build_one_cst (type);
6796 same = NULL_TREE;
6798 if (operand_equal_p (arg01, arg11, 0))
6799 same = arg01, alt0 = arg00, alt1 = arg10;
6800 else if (operand_equal_p (arg00, arg10, 0))
6801 same = arg00, alt0 = arg01, alt1 = arg11;
6802 else if (operand_equal_p (arg00, arg11, 0))
6803 same = arg00, alt0 = arg01, alt1 = arg10;
6804 else if (operand_equal_p (arg01, arg10, 0))
6805 same = arg01, alt0 = arg00, alt1 = arg11;
6807 /* No identical multiplicands; see if we can find a common
6808 power-of-two factor in non-power-of-two multiplies. This
6809 can help in multi-dimensional array access. */
6810 else if (host_integerp (arg01, 0)
6811 && host_integerp (arg11, 0))
6813 HOST_WIDE_INT int01, int11, tmp;
6814 bool swap = false;
6815 tree maybe_same;
6816 int01 = TREE_INT_CST_LOW (arg01);
6817 int11 = TREE_INT_CST_LOW (arg11);
6819 /* Move min of absolute values to int11. */
6820 if ((int01 >= 0 ? int01 : -int01)
6821 < (int11 >= 0 ? int11 : -int11))
6823 tmp = int01, int01 = int11, int11 = tmp;
6824 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6825 maybe_same = arg01;
6826 swap = true;
6828 else
6829 maybe_same = arg11;
6831 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6833 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6834 build_int_cst (TREE_TYPE (arg00),
6835 int01 / int11));
6836 alt1 = arg10;
6837 same = maybe_same;
6838 if (swap)
6839 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6843 if (same)
6844 return fold_build2 (MULT_EXPR, type,
6845 fold_build2 (code, type,
6846 fold_convert (type, alt0),
6847 fold_convert (type, alt1)),
6848 fold_convert (type, same));
6850 return NULL_TREE;
6853 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6854 specified by EXPR into the buffer PTR of length LEN bytes.
6855 Return the number of bytes placed in the buffer, or zero
6856 upon failure. */
6858 static int
6859 native_encode_int (tree expr, unsigned char *ptr, int len)
6861 tree type = TREE_TYPE (expr);
6862 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6863 int byte, offset, word, words;
6864 unsigned char value;
6866 if (total_bytes > len)
6867 return 0;
6868 words = total_bytes / UNITS_PER_WORD;
6870 for (byte = 0; byte < total_bytes; byte++)
6872 int bitpos = byte * BITS_PER_UNIT;
6873 if (bitpos < HOST_BITS_PER_WIDE_INT)
6874 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6875 else
6876 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6877 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6879 if (total_bytes > UNITS_PER_WORD)
6881 word = byte / UNITS_PER_WORD;
6882 if (WORDS_BIG_ENDIAN)
6883 word = (words - 1) - word;
6884 offset = word * UNITS_PER_WORD;
6885 if (BYTES_BIG_ENDIAN)
6886 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6887 else
6888 offset += byte % UNITS_PER_WORD;
6890 else
6891 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6892 ptr[offset] = value;
6894 return total_bytes;
6898 /* Subroutine of native_encode_expr. Encode the REAL_CST
6899 specified by EXPR into the buffer PTR of length LEN bytes.
6900 Return the number of bytes placed in the buffer, or zero
6901 upon failure. */
6903 static int
6904 native_encode_real (tree expr, unsigned char *ptr, int len)
6906 tree type = TREE_TYPE (expr);
6907 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6908 int byte, offset, word, words;
6909 unsigned char value;
6911 /* There are always 32 bits in each long, no matter the size of
6912 the hosts long. We handle floating point representations with
6913 up to 192 bits. */
6914 long tmp[6];
6916 if (total_bytes > len)
6917 return 0;
6918 words = total_bytes / UNITS_PER_WORD;
6920 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6922 for (byte = 0; byte < total_bytes; byte++)
6924 int bitpos = byte * BITS_PER_UNIT;
6925 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6927 if (total_bytes > UNITS_PER_WORD)
6929 word = byte / UNITS_PER_WORD;
6930 if (FLOAT_WORDS_BIG_ENDIAN)
6931 word = (words - 1) - word;
6932 offset = word * UNITS_PER_WORD;
6933 if (BYTES_BIG_ENDIAN)
6934 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6935 else
6936 offset += byte % UNITS_PER_WORD;
6938 else
6939 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6940 ptr[offset] = value;
6942 return total_bytes;
6945 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6946 specified by EXPR into the buffer PTR of length LEN bytes.
6947 Return the number of bytes placed in the buffer, or zero
6948 upon failure. */
6950 static int
6951 native_encode_complex (tree expr, unsigned char *ptr, int len)
6953 int rsize, isize;
6954 tree part;
6956 part = TREE_REALPART (expr);
6957 rsize = native_encode_expr (part, ptr, len);
6958 if (rsize == 0)
6959 return 0;
6960 part = TREE_IMAGPART (expr);
6961 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6962 if (isize != rsize)
6963 return 0;
6964 return rsize + isize;
6968 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6969 specified by EXPR into the buffer PTR of length LEN bytes.
6970 Return the number of bytes placed in the buffer, or zero
6971 upon failure. */
6973 static int
6974 native_encode_vector (tree expr, unsigned char *ptr, int len)
6976 int i, size, offset, count;
6977 tree itype, elem, elements;
6979 offset = 0;
6980 elements = TREE_VECTOR_CST_ELTS (expr);
6981 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6982 itype = TREE_TYPE (TREE_TYPE (expr));
6983 size = GET_MODE_SIZE (TYPE_MODE (itype));
6984 for (i = 0; i < count; i++)
6986 if (elements)
6988 elem = TREE_VALUE (elements);
6989 elements = TREE_CHAIN (elements);
6991 else
6992 elem = NULL_TREE;
6994 if (elem)
6996 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6997 return 0;
6999 else
7001 if (offset + size > len)
7002 return 0;
7003 memset (ptr+offset, 0, size);
7005 offset += size;
7007 return offset;
7011 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7012 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7013 buffer PTR of length LEN bytes. Return the number of bytes
7014 placed in the buffer, or zero upon failure. */
7016 static int
7017 native_encode_expr (tree expr, unsigned char *ptr, int len)
7019 switch (TREE_CODE (expr))
7021 case INTEGER_CST:
7022 return native_encode_int (expr, ptr, len);
7024 case REAL_CST:
7025 return native_encode_real (expr, ptr, len);
7027 case COMPLEX_CST:
7028 return native_encode_complex (expr, ptr, len);
7030 case VECTOR_CST:
7031 return native_encode_vector (expr, ptr, len);
7033 default:
7034 return 0;
7039 /* Subroutine of native_interpret_expr. Interpret the contents of
7040 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7041 If the buffer cannot be interpreted, return NULL_TREE. */
7043 static tree
7044 native_interpret_int (tree type, unsigned char *ptr, int len)
7046 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7047 int byte, offset, word, words;
7048 unsigned char value;
7049 unsigned int HOST_WIDE_INT lo = 0;
7050 HOST_WIDE_INT hi = 0;
7052 if (total_bytes > len)
7053 return NULL_TREE;
7054 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7055 return NULL_TREE;
7056 words = total_bytes / UNITS_PER_WORD;
7058 for (byte = 0; byte < total_bytes; byte++)
7060 int bitpos = byte * BITS_PER_UNIT;
7061 if (total_bytes > UNITS_PER_WORD)
7063 word = byte / UNITS_PER_WORD;
7064 if (WORDS_BIG_ENDIAN)
7065 word = (words - 1) - word;
7066 offset = word * UNITS_PER_WORD;
7067 if (BYTES_BIG_ENDIAN)
7068 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7069 else
7070 offset += byte % UNITS_PER_WORD;
7072 else
7073 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7074 value = ptr[offset];
7076 if (bitpos < HOST_BITS_PER_WIDE_INT)
7077 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7078 else
7079 hi |= (unsigned HOST_WIDE_INT) value
7080 << (bitpos - HOST_BITS_PER_WIDE_INT);
7083 return build_int_cst_wide_type (type, lo, hi);
7087 /* Subroutine of native_interpret_expr. Interpret the contents of
7088 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7089 If the buffer cannot be interpreted, return NULL_TREE. */
7091 static tree
7092 native_interpret_real (tree type, unsigned char *ptr, int len)
7094 enum machine_mode mode = TYPE_MODE (type);
7095 int total_bytes = GET_MODE_SIZE (mode);
7096 int byte, offset, word, words;
7097 unsigned char value;
7098 /* There are always 32 bits in each long, no matter the size of
7099 the hosts long. We handle floating point representations with
7100 up to 192 bits. */
7101 REAL_VALUE_TYPE r;
7102 long tmp[6];
7104 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7105 if (total_bytes > len || total_bytes > 24)
7106 return NULL_TREE;
7107 words = total_bytes / UNITS_PER_WORD;
7109 memset (tmp, 0, sizeof (tmp));
7110 for (byte = 0; byte < total_bytes; byte++)
7112 int bitpos = byte * BITS_PER_UNIT;
7113 if (total_bytes > UNITS_PER_WORD)
7115 word = byte / UNITS_PER_WORD;
7116 if (FLOAT_WORDS_BIG_ENDIAN)
7117 word = (words - 1) - word;
7118 offset = word * UNITS_PER_WORD;
7119 if (BYTES_BIG_ENDIAN)
7120 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7121 else
7122 offset += byte % UNITS_PER_WORD;
7124 else
7125 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7126 value = ptr[offset];
7128 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7131 real_from_target (&r, tmp, mode);
7132 return build_real (type, r);
7136 /* Subroutine of native_interpret_expr. Interpret the contents of
7137 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7138 If the buffer cannot be interpreted, return NULL_TREE. */
7140 static tree
7141 native_interpret_complex (tree type, unsigned char *ptr, int len)
7143 tree etype, rpart, ipart;
7144 int size;
7146 etype = TREE_TYPE (type);
7147 size = GET_MODE_SIZE (TYPE_MODE (etype));
7148 if (size * 2 > len)
7149 return NULL_TREE;
7150 rpart = native_interpret_expr (etype, ptr, size);
7151 if (!rpart)
7152 return NULL_TREE;
7153 ipart = native_interpret_expr (etype, ptr+size, size);
7154 if (!ipart)
7155 return NULL_TREE;
7156 return build_complex (type, rpart, ipart);
7160 /* Subroutine of native_interpret_expr. Interpret the contents of
7161 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7162 If the buffer cannot be interpreted, return NULL_TREE. */
7164 static tree
7165 native_interpret_vector (tree type, unsigned char *ptr, int len)
7167 tree etype, elem, elements;
7168 int i, size, count;
7170 etype = TREE_TYPE (type);
7171 size = GET_MODE_SIZE (TYPE_MODE (etype));
7172 count = TYPE_VECTOR_SUBPARTS (type);
7173 if (size * count > len)
7174 return NULL_TREE;
7176 elements = NULL_TREE;
7177 for (i = count - 1; i >= 0; i--)
7179 elem = native_interpret_expr (etype, ptr+(i*size), size);
7180 if (!elem)
7181 return NULL_TREE;
7182 elements = tree_cons (NULL_TREE, elem, elements);
7184 return build_vector (type, elements);
7188 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7189 the buffer PTR of length LEN as a constant of type TYPE. For
7190 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7191 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7192 return NULL_TREE. */
7194 static tree
7195 native_interpret_expr (tree type, unsigned char *ptr, int len)
7197 switch (TREE_CODE (type))
7199 case INTEGER_TYPE:
7200 case ENUMERAL_TYPE:
7201 case BOOLEAN_TYPE:
7202 return native_interpret_int (type, ptr, len);
7204 case REAL_TYPE:
7205 return native_interpret_real (type, ptr, len);
7207 case COMPLEX_TYPE:
7208 return native_interpret_complex (type, ptr, len);
7210 case VECTOR_TYPE:
7211 return native_interpret_vector (type, ptr, len);
7213 default:
7214 return NULL_TREE;
7219 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7220 TYPE at compile-time. If we're unable to perform the conversion
7221 return NULL_TREE. */
7223 static tree
7224 fold_view_convert_expr (tree type, tree expr)
7226 /* We support up to 512-bit values (for V8DFmode). */
7227 unsigned char buffer[64];
7228 int len;
7230 /* Check that the host and target are sane. */
7231 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7232 return NULL_TREE;
7234 len = native_encode_expr (expr, buffer, sizeof (buffer));
7235 if (len == 0)
7236 return NULL_TREE;
7238 return native_interpret_expr (type, buffer, len);
7242 /* Fold a unary expression of code CODE and type TYPE with operand
7243 OP0. Return the folded expression if folding is successful.
7244 Otherwise, return NULL_TREE. */
7246 tree
7247 fold_unary (enum tree_code code, tree type, tree op0)
7249 tree tem;
7250 tree arg0;
7251 enum tree_code_class kind = TREE_CODE_CLASS (code);
7253 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7254 && TREE_CODE_LENGTH (code) == 1);
7256 arg0 = op0;
7257 if (arg0)
7259 if (code == NOP_EXPR || code == CONVERT_EXPR
7260 || code == FLOAT_EXPR || code == ABS_EXPR)
7262 /* Don't use STRIP_NOPS, because signedness of argument type
7263 matters. */
7264 STRIP_SIGN_NOPS (arg0);
7266 else
7268 /* Strip any conversions that don't change the mode. This
7269 is safe for every expression, except for a comparison
7270 expression because its signedness is derived from its
7271 operands.
7273 Note that this is done as an internal manipulation within
7274 the constant folder, in order to find the simplest
7275 representation of the arguments so that their form can be
7276 studied. In any cases, the appropriate type conversions
7277 should be put back in the tree that will get out of the
7278 constant folder. */
7279 STRIP_NOPS (arg0);
7283 if (TREE_CODE_CLASS (code) == tcc_unary)
7285 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7286 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7287 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7288 else if (TREE_CODE (arg0) == COND_EXPR)
7290 tree arg01 = TREE_OPERAND (arg0, 1);
7291 tree arg02 = TREE_OPERAND (arg0, 2);
7292 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7293 arg01 = fold_build1 (code, type, arg01);
7294 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7295 arg02 = fold_build1 (code, type, arg02);
7296 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7297 arg01, arg02);
7299 /* If this was a conversion, and all we did was to move into
7300 inside the COND_EXPR, bring it back out. But leave it if
7301 it is a conversion from integer to integer and the
7302 result precision is no wider than a word since such a
7303 conversion is cheap and may be optimized away by combine,
7304 while it couldn't if it were outside the COND_EXPR. Then return
7305 so we don't get into an infinite recursion loop taking the
7306 conversion out and then back in. */
7308 if ((code == NOP_EXPR || code == CONVERT_EXPR
7309 || code == NON_LVALUE_EXPR)
7310 && TREE_CODE (tem) == COND_EXPR
7311 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7312 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7313 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7314 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7315 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7316 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7317 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7318 && (INTEGRAL_TYPE_P
7319 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7320 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7321 || flag_syntax_only))
7322 tem = build1 (code, type,
7323 build3 (COND_EXPR,
7324 TREE_TYPE (TREE_OPERAND
7325 (TREE_OPERAND (tem, 1), 0)),
7326 TREE_OPERAND (tem, 0),
7327 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7328 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7329 return tem;
7331 else if (COMPARISON_CLASS_P (arg0))
7333 if (TREE_CODE (type) == BOOLEAN_TYPE)
7335 arg0 = copy_node (arg0);
7336 TREE_TYPE (arg0) = type;
7337 return arg0;
7339 else if (TREE_CODE (type) != INTEGER_TYPE)
7340 return fold_build3 (COND_EXPR, type, arg0,
7341 fold_build1 (code, type,
7342 integer_one_node),
7343 fold_build1 (code, type,
7344 integer_zero_node));
7348 switch (code)
7350 case NOP_EXPR:
7351 case FLOAT_EXPR:
7352 case CONVERT_EXPR:
7353 case FIX_TRUNC_EXPR:
7354 if (TREE_TYPE (op0) == type)
7355 return op0;
7357 /* If we have (type) (a CMP b) and type is an integral type, return
7358 new expression involving the new type. */
7359 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7360 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7361 TREE_OPERAND (op0, 1));
7363 /* Handle cases of two conversions in a row. */
7364 if (TREE_CODE (op0) == NOP_EXPR
7365 || TREE_CODE (op0) == CONVERT_EXPR)
7367 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7368 tree inter_type = TREE_TYPE (op0);
7369 int inside_int = INTEGRAL_TYPE_P (inside_type);
7370 int inside_ptr = POINTER_TYPE_P (inside_type);
7371 int inside_float = FLOAT_TYPE_P (inside_type);
7372 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7373 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7374 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7375 int inter_int = INTEGRAL_TYPE_P (inter_type);
7376 int inter_ptr = POINTER_TYPE_P (inter_type);
7377 int inter_float = FLOAT_TYPE_P (inter_type);
7378 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7379 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7380 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7381 int final_int = INTEGRAL_TYPE_P (type);
7382 int final_ptr = POINTER_TYPE_P (type);
7383 int final_float = FLOAT_TYPE_P (type);
7384 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7385 unsigned int final_prec = TYPE_PRECISION (type);
7386 int final_unsignedp = TYPE_UNSIGNED (type);
7388 /* In addition to the cases of two conversions in a row
7389 handled below, if we are converting something to its own
7390 type via an object of identical or wider precision, neither
7391 conversion is needed. */
7392 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7393 && (((inter_int || inter_ptr) && final_int)
7394 || (inter_float && final_float))
7395 && inter_prec >= final_prec)
7396 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7398 /* Likewise, if the intermediate and final types are either both
7399 float or both integer, we don't need the middle conversion if
7400 it is wider than the final type and doesn't change the signedness
7401 (for integers). Avoid this if the final type is a pointer
7402 since then we sometimes need the inner conversion. Likewise if
7403 the outer has a precision not equal to the size of its mode. */
7404 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7405 || (inter_float && inside_float)
7406 || (inter_vec && inside_vec))
7407 && inter_prec >= inside_prec
7408 && (inter_float || inter_vec
7409 || inter_unsignedp == inside_unsignedp)
7410 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7411 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7412 && ! final_ptr
7413 && (! final_vec || inter_prec == inside_prec))
7414 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7416 /* If we have a sign-extension of a zero-extended value, we can
7417 replace that by a single zero-extension. */
7418 if (inside_int && inter_int && final_int
7419 && inside_prec < inter_prec && inter_prec < final_prec
7420 && inside_unsignedp && !inter_unsignedp)
7421 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7423 /* Two conversions in a row are not needed unless:
7424 - some conversion is floating-point (overstrict for now), or
7425 - some conversion is a vector (overstrict for now), or
7426 - the intermediate type is narrower than both initial and
7427 final, or
7428 - the intermediate type and innermost type differ in signedness,
7429 and the outermost type is wider than the intermediate, or
7430 - the initial type is a pointer type and the precisions of the
7431 intermediate and final types differ, or
7432 - the final type is a pointer type and the precisions of the
7433 initial and intermediate types differ.
7434 - the final type is a pointer type and the initial type not
7435 - the initial type is a pointer to an array and the final type
7436 not. */
7437 if (! inside_float && ! inter_float && ! final_float
7438 && ! inside_vec && ! inter_vec && ! final_vec
7439 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7440 && ! (inside_int && inter_int
7441 && inter_unsignedp != inside_unsignedp
7442 && inter_prec < final_prec)
7443 && ((inter_unsignedp && inter_prec > inside_prec)
7444 == (final_unsignedp && final_prec > inter_prec))
7445 && ! (inside_ptr && inter_prec != final_prec)
7446 && ! (final_ptr && inside_prec != inter_prec)
7447 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7448 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7449 && final_ptr == inside_ptr
7450 && ! (inside_ptr
7451 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7452 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7453 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7456 /* Handle (T *)&A.B.C for A being of type T and B and C
7457 living at offset zero. This occurs frequently in
7458 C++ upcasting and then accessing the base. */
7459 if (TREE_CODE (op0) == ADDR_EXPR
7460 && POINTER_TYPE_P (type)
7461 && handled_component_p (TREE_OPERAND (op0, 0)))
7463 HOST_WIDE_INT bitsize, bitpos;
7464 tree offset;
7465 enum machine_mode mode;
7466 int unsignedp, volatilep;
7467 tree base = TREE_OPERAND (op0, 0);
7468 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7469 &mode, &unsignedp, &volatilep, false);
7470 /* If the reference was to a (constant) zero offset, we can use
7471 the address of the base if it has the same base type
7472 as the result type. */
7473 if (! offset && bitpos == 0
7474 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7475 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7476 return fold_convert (type, build_fold_addr_expr (base));
7479 if ((TREE_CODE (op0) == MODIFY_EXPR
7480 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7481 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7482 /* Detect assigning a bitfield. */
7483 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7484 && DECL_BIT_FIELD
7485 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7487 /* Don't leave an assignment inside a conversion
7488 unless assigning a bitfield. */
7489 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7490 /* First do the assignment, then return converted constant. */
7491 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7492 TREE_NO_WARNING (tem) = 1;
7493 TREE_USED (tem) = 1;
7494 return tem;
7497 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7498 constants (if x has signed type, the sign bit cannot be set
7499 in c). This folds extension into the BIT_AND_EXPR. */
7500 if (INTEGRAL_TYPE_P (type)
7501 && TREE_CODE (type) != BOOLEAN_TYPE
7502 && TREE_CODE (op0) == BIT_AND_EXPR
7503 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7505 tree and = op0;
7506 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7507 int change = 0;
7509 if (TYPE_UNSIGNED (TREE_TYPE (and))
7510 || (TYPE_PRECISION (type)
7511 <= TYPE_PRECISION (TREE_TYPE (and))))
7512 change = 1;
7513 else if (TYPE_PRECISION (TREE_TYPE (and1))
7514 <= HOST_BITS_PER_WIDE_INT
7515 && host_integerp (and1, 1))
7517 unsigned HOST_WIDE_INT cst;
7519 cst = tree_low_cst (and1, 1);
7520 cst &= (HOST_WIDE_INT) -1
7521 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7522 change = (cst == 0);
7523 #ifdef LOAD_EXTEND_OP
7524 if (change
7525 && !flag_syntax_only
7526 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7527 == ZERO_EXTEND))
7529 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7530 and0 = fold_convert (uns, and0);
7531 and1 = fold_convert (uns, and1);
7533 #endif
7535 if (change)
7537 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7538 TREE_INT_CST_HIGH (and1), 0,
7539 TREE_OVERFLOW (and1));
7540 return fold_build2 (BIT_AND_EXPR, type,
7541 fold_convert (type, and0), tem);
7545 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7546 T2 being pointers to types of the same size. */
7547 if (POINTER_TYPE_P (type)
7548 && BINARY_CLASS_P (arg0)
7549 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7550 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7552 tree arg00 = TREE_OPERAND (arg0, 0);
7553 tree t0 = type;
7554 tree t1 = TREE_TYPE (arg00);
7555 tree tt0 = TREE_TYPE (t0);
7556 tree tt1 = TREE_TYPE (t1);
7557 tree s0 = TYPE_SIZE (tt0);
7558 tree s1 = TYPE_SIZE (tt1);
7560 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7561 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7562 TREE_OPERAND (arg0, 1));
7565 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7566 of the same precision, and X is a integer type not narrower than
7567 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7568 if (INTEGRAL_TYPE_P (type)
7569 && TREE_CODE (op0) == BIT_NOT_EXPR
7570 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7571 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7572 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7573 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7575 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7576 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7577 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7578 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7581 tem = fold_convert_const (code, type, arg0);
7582 return tem ? tem : NULL_TREE;
7584 case VIEW_CONVERT_EXPR:
7585 if (TREE_TYPE (op0) == type)
7586 return op0;
7587 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7588 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7589 return fold_view_convert_expr (type, op0);
7591 case NEGATE_EXPR:
7592 tem = fold_negate_expr (arg0);
7593 if (tem)
7594 return fold_convert (type, tem);
7595 return NULL_TREE;
7597 case ABS_EXPR:
7598 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7599 return fold_abs_const (arg0, type);
7600 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7601 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7602 /* Convert fabs((double)float) into (double)fabsf(float). */
7603 else if (TREE_CODE (arg0) == NOP_EXPR
7604 && TREE_CODE (type) == REAL_TYPE)
7606 tree targ0 = strip_float_extensions (arg0);
7607 if (targ0 != arg0)
7608 return fold_convert (type, fold_build1 (ABS_EXPR,
7609 TREE_TYPE (targ0),
7610 targ0));
7612 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7613 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7614 return arg0;
7616 /* Strip sign ops from argument. */
7617 if (TREE_CODE (type) == REAL_TYPE)
7619 tem = fold_strip_sign_ops (arg0);
7620 if (tem)
7621 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7623 return NULL_TREE;
7625 case CONJ_EXPR:
7626 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7627 return fold_convert (type, arg0);
7628 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7630 tree itype = TREE_TYPE (type);
7631 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7632 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7633 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7635 if (TREE_CODE (arg0) == COMPLEX_CST)
7637 tree itype = TREE_TYPE (type);
7638 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7639 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7640 return build_complex (type, rpart, negate_expr (ipart));
7642 if (TREE_CODE (arg0) == CONJ_EXPR)
7643 return fold_convert (type, TREE_OPERAND (arg0, 0));
7644 return NULL_TREE;
7646 case BIT_NOT_EXPR:
7647 if (TREE_CODE (arg0) == INTEGER_CST)
7648 return fold_not_const (arg0, type);
7649 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7650 return TREE_OPERAND (arg0, 0);
7651 /* Convert ~ (-A) to A - 1. */
7652 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7653 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7654 build_int_cst (type, 1));
7655 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7656 else if (INTEGRAL_TYPE_P (type)
7657 && ((TREE_CODE (arg0) == MINUS_EXPR
7658 && integer_onep (TREE_OPERAND (arg0, 1)))
7659 || (TREE_CODE (arg0) == PLUS_EXPR
7660 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7661 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7662 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7663 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7664 && (tem = fold_unary (BIT_NOT_EXPR, type,
7665 fold_convert (type,
7666 TREE_OPERAND (arg0, 0)))))
7667 return fold_build2 (BIT_XOR_EXPR, type, tem,
7668 fold_convert (type, TREE_OPERAND (arg0, 1)));
7669 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7670 && (tem = fold_unary (BIT_NOT_EXPR, type,
7671 fold_convert (type,
7672 TREE_OPERAND (arg0, 1)))))
7673 return fold_build2 (BIT_XOR_EXPR, type,
7674 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7676 return NULL_TREE;
7678 case TRUTH_NOT_EXPR:
7679 /* The argument to invert_truthvalue must have Boolean type. */
7680 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7681 arg0 = fold_convert (boolean_type_node, arg0);
7683 /* Note that the operand of this must be an int
7684 and its values must be 0 or 1.
7685 ("true" is a fixed value perhaps depending on the language,
7686 but we don't handle values other than 1 correctly yet.) */
7687 tem = fold_truth_not_expr (arg0);
7688 if (!tem)
7689 return NULL_TREE;
7690 return fold_convert (type, tem);
7692 case REALPART_EXPR:
7693 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7694 return fold_convert (type, arg0);
7695 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7696 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7697 TREE_OPERAND (arg0, 1));
7698 if (TREE_CODE (arg0) == COMPLEX_CST)
7699 return fold_convert (type, TREE_REALPART (arg0));
7700 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7702 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7703 tem = fold_build2 (TREE_CODE (arg0), itype,
7704 fold_build1 (REALPART_EXPR, itype,
7705 TREE_OPERAND (arg0, 0)),
7706 fold_build1 (REALPART_EXPR, itype,
7707 TREE_OPERAND (arg0, 1)));
7708 return fold_convert (type, tem);
7710 if (TREE_CODE (arg0) == CONJ_EXPR)
7712 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7713 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7714 return fold_convert (type, tem);
7716 if (TREE_CODE (arg0) == CALL_EXPR)
7718 tree fn = get_callee_fndecl (arg0);
7719 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7720 switch (DECL_FUNCTION_CODE (fn))
7722 CASE_FLT_FN (BUILT_IN_CEXPI):
7723 fn = mathfn_built_in (type, BUILT_IN_COS);
7724 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7726 default:;
7729 return NULL_TREE;
7731 case IMAGPART_EXPR:
7732 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7733 return fold_convert (type, integer_zero_node);
7734 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7735 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7736 TREE_OPERAND (arg0, 0));
7737 if (TREE_CODE (arg0) == COMPLEX_CST)
7738 return fold_convert (type, TREE_IMAGPART (arg0));
7739 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7741 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7742 tem = fold_build2 (TREE_CODE (arg0), itype,
7743 fold_build1 (IMAGPART_EXPR, itype,
7744 TREE_OPERAND (arg0, 0)),
7745 fold_build1 (IMAGPART_EXPR, itype,
7746 TREE_OPERAND (arg0, 1)));
7747 return fold_convert (type, tem);
7749 if (TREE_CODE (arg0) == CONJ_EXPR)
7751 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7752 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7753 return fold_convert (type, negate_expr (tem));
7755 if (TREE_CODE (arg0) == CALL_EXPR)
7757 tree fn = get_callee_fndecl (arg0);
7758 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7759 switch (DECL_FUNCTION_CODE (fn))
7761 CASE_FLT_FN (BUILT_IN_CEXPI):
7762 fn = mathfn_built_in (type, BUILT_IN_SIN);
7763 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7765 default:;
7768 return NULL_TREE;
7770 default:
7771 return NULL_TREE;
7772 } /* switch (code) */
7775 /* Fold a binary expression of code CODE and type TYPE with operands
7776 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7777 Return the folded expression if folding is successful. Otherwise,
7778 return NULL_TREE. */
7780 static tree
7781 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7783 enum tree_code compl_code;
7785 if (code == MIN_EXPR)
7786 compl_code = MAX_EXPR;
7787 else if (code == MAX_EXPR)
7788 compl_code = MIN_EXPR;
7789 else
7790 gcc_unreachable ();
7792 /* MIN (MAX (a, b), b) == b. */
7793 if (TREE_CODE (op0) == compl_code
7794 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7795 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7797 /* MIN (MAX (b, a), b) == b. */
7798 if (TREE_CODE (op0) == compl_code
7799 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7800 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7801 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7803 /* MIN (a, MAX (a, b)) == a. */
7804 if (TREE_CODE (op1) == compl_code
7805 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7806 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7807 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7809 /* MIN (a, MAX (b, a)) == a. */
7810 if (TREE_CODE (op1) == compl_code
7811 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7812 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7813 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7815 return NULL_TREE;
7818 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7819 by changing CODE to reduce the magnitude of constants involved in
7820 ARG0 of the comparison.
7821 Returns a canonicalized comparison tree if a simplification was
7822 possible, otherwise returns NULL_TREE. */
7824 static tree
7825 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7826 tree arg0, tree arg1)
7828 enum tree_code code0 = TREE_CODE (arg0);
7829 tree t, cst0 = NULL_TREE;
7830 int sgn0;
7831 bool swap = false;
7833 /* Match A +- CST code arg1 and CST code arg1. */
7834 if (!(((code0 == MINUS_EXPR
7835 || code0 == PLUS_EXPR)
7836 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7837 || code0 == INTEGER_CST))
7838 return NULL_TREE;
7840 /* Identify the constant in arg0 and its sign. */
7841 if (code0 == INTEGER_CST)
7842 cst0 = arg0;
7843 else
7844 cst0 = TREE_OPERAND (arg0, 1);
7845 sgn0 = tree_int_cst_sgn (cst0);
7847 /* Overflowed constants and zero will cause problems. */
7848 if (integer_zerop (cst0)
7849 || TREE_OVERFLOW (cst0))
7850 return NULL_TREE;
7852 /* See if we can reduce the magnitude of the constant in
7853 arg0 by changing the comparison code. */
7854 if (code0 == INTEGER_CST)
7856 /* CST <= arg1 -> CST-1 < arg1. */
7857 if (code == LE_EXPR && sgn0 == 1)
7858 code = LT_EXPR;
7859 /* -CST < arg1 -> -CST-1 <= arg1. */
7860 else if (code == LT_EXPR && sgn0 == -1)
7861 code = LE_EXPR;
7862 /* CST > arg1 -> CST-1 >= arg1. */
7863 else if (code == GT_EXPR && sgn0 == 1)
7864 code = GE_EXPR;
7865 /* -CST >= arg1 -> -CST-1 > arg1. */
7866 else if (code == GE_EXPR && sgn0 == -1)
7867 code = GT_EXPR;
7868 else
7869 return NULL_TREE;
7870 /* arg1 code' CST' might be more canonical. */
7871 swap = true;
7873 else
7875 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7876 if (code == LT_EXPR
7877 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7878 code = LE_EXPR;
7879 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7880 else if (code == GT_EXPR
7881 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7882 code = GE_EXPR;
7883 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7884 else if (code == LE_EXPR
7885 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7886 code = LT_EXPR;
7887 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7888 else if (code == GE_EXPR
7889 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7890 code = GT_EXPR;
7891 else
7892 return NULL_TREE;
7895 /* Now build the constant reduced in magnitude. */
7896 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7897 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7898 if (code0 != INTEGER_CST)
7899 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7901 /* If swapping might yield to a more canonical form, do so. */
7902 if (swap)
7903 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7904 else
7905 return fold_build2 (code, type, t, arg1);
7908 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7909 overflow further. Try to decrease the magnitude of constants involved
7910 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7911 and put sole constants at the second argument position.
7912 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7914 static tree
7915 maybe_canonicalize_comparison (enum tree_code code, tree type,
7916 tree arg0, tree arg1)
7918 tree t;
7920 /* In principle pointers also have undefined overflow behavior,
7921 but that causes problems elsewhere. */
7922 if ((flag_wrapv || flag_trapv)
7923 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7924 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7925 return NULL_TREE;
7927 /* Try canonicalization by simplifying arg0. */
7928 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7929 if (t)
7930 return t;
7932 /* Try canonicalization by simplifying arg1 using the swapped
7933 comparison. */
7934 code = swap_tree_comparison (code);
7935 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7938 /* Subroutine of fold_binary. This routine performs all of the
7939 transformations that are common to the equality/inequality
7940 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7941 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7942 fold_binary should call fold_binary. Fold a comparison with
7943 tree code CODE and type TYPE with operands OP0 and OP1. Return
7944 the folded comparison or NULL_TREE. */
7946 static tree
7947 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7949 tree arg0, arg1, tem;
7951 arg0 = op0;
7952 arg1 = op1;
7954 STRIP_SIGN_NOPS (arg0);
7955 STRIP_SIGN_NOPS (arg1);
7957 tem = fold_relational_const (code, type, arg0, arg1);
7958 if (tem != NULL_TREE)
7959 return tem;
7961 /* If one arg is a real or integer constant, put it last. */
7962 if (tree_swap_operands_p (arg0, arg1, true))
7963 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7965 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7966 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7967 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7968 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7969 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7970 && !(flag_wrapv || flag_trapv))
7971 && (TREE_CODE (arg1) == INTEGER_CST
7972 && !TREE_OVERFLOW (arg1)))
7974 tree const1 = TREE_OPERAND (arg0, 1);
7975 tree const2 = arg1;
7976 tree variable = TREE_OPERAND (arg0, 0);
7977 tree lhs;
7978 int lhs_add;
7979 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7981 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7982 TREE_TYPE (arg1), const2, const1);
7983 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7984 && (TREE_CODE (lhs) != INTEGER_CST
7985 || !TREE_OVERFLOW (lhs)))
7986 return fold_build2 (code, type, variable, lhs);
7989 /* For comparisons of pointers we can decompose it to a compile time
7990 comparison of the base objects and the offsets into the object.
7991 This requires at least one operand being an ADDR_EXPR to do more
7992 than the operand_equal_p test below. */
7993 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7994 && (TREE_CODE (arg0) == ADDR_EXPR
7995 || TREE_CODE (arg1) == ADDR_EXPR))
7997 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
7998 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
7999 enum machine_mode mode;
8000 int volatilep, unsignedp;
8001 bool indirect_base0 = false;
8003 /* Get base and offset for the access. Strip ADDR_EXPR for
8004 get_inner_reference, but put it back by stripping INDIRECT_REF
8005 off the base object if possible. */
8006 base0 = arg0;
8007 if (TREE_CODE (arg0) == ADDR_EXPR)
8009 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8010 &bitsize, &bitpos0, &offset0, &mode,
8011 &unsignedp, &volatilep, false);
8012 if (TREE_CODE (base0) == INDIRECT_REF)
8013 base0 = TREE_OPERAND (base0, 0);
8014 else
8015 indirect_base0 = true;
8018 base1 = arg1;
8019 if (TREE_CODE (arg1) == ADDR_EXPR)
8021 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8022 &bitsize, &bitpos1, &offset1, &mode,
8023 &unsignedp, &volatilep, false);
8024 /* We have to make sure to have an indirect/non-indirect base1
8025 just the same as we did for base0. */
8026 if (TREE_CODE (base1) == INDIRECT_REF
8027 && !indirect_base0)
8028 base1 = TREE_OPERAND (base1, 0);
8029 else if (!indirect_base0)
8030 base1 = NULL_TREE;
8032 else if (indirect_base0)
8033 base1 = NULL_TREE;
8035 /* If we have equivalent bases we might be able to simplify. */
8036 if (base0 && base1
8037 && operand_equal_p (base0, base1, 0))
8039 /* We can fold this expression to a constant if the non-constant
8040 offset parts are equal. */
8041 if (offset0 == offset1
8042 || (offset0 && offset1
8043 && operand_equal_p (offset0, offset1, 0)))
8045 switch (code)
8047 case EQ_EXPR:
8048 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8049 case NE_EXPR:
8050 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8051 case LT_EXPR:
8052 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8053 case LE_EXPR:
8054 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8055 case GE_EXPR:
8056 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8057 case GT_EXPR:
8058 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8059 default:;
8062 /* We can simplify the comparison to a comparison of the variable
8063 offset parts if the constant offset parts are equal.
8064 Be careful to use signed size type here because otherwise we
8065 mess with array offsets in the wrong way. This is possible
8066 because pointer arithmetic is restricted to retain within an
8067 object and overflow on pointer differences is undefined as of
8068 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8069 else if (bitpos0 == bitpos1)
8071 tree signed_size_type_node;
8072 signed_size_type_node = signed_type_for (size_type_node);
8074 /* By converting to signed size type we cover middle-end pointer
8075 arithmetic which operates on unsigned pointer types of size
8076 type size and ARRAY_REF offsets which are properly sign or
8077 zero extended from their type in case it is narrower than
8078 size type. */
8079 if (offset0 == NULL_TREE)
8080 offset0 = build_int_cst (signed_size_type_node, 0);
8081 else
8082 offset0 = fold_convert (signed_size_type_node, offset0);
8083 if (offset1 == NULL_TREE)
8084 offset1 = build_int_cst (signed_size_type_node, 0);
8085 else
8086 offset1 = fold_convert (signed_size_type_node, offset1);
8088 return fold_build2 (code, type, offset0, offset1);
8093 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8094 same object, then we can fold this to a comparison of the two offsets in
8095 signed size type. This is possible because pointer arithmetic is
8096 restricted to retain within an object and overflow on pointer differences
8097 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8098 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8099 && !flag_wrapv && !flag_trapv)
8101 tree base0, offset0, base1, offset1;
8103 if (extract_array_ref (arg0, &base0, &offset0)
8104 && extract_array_ref (arg1, &base1, &offset1)
8105 && operand_equal_p (base0, base1, 0))
8107 tree signed_size_type_node;
8108 signed_size_type_node = signed_type_for (size_type_node);
8110 /* By converting to signed size type we cover middle-end pointer
8111 arithmetic which operates on unsigned pointer types of size
8112 type size and ARRAY_REF offsets which are properly sign or
8113 zero extended from their type in case it is narrower than
8114 size type. */
8115 if (offset0 == NULL_TREE)
8116 offset0 = build_int_cst (signed_size_type_node, 0);
8117 else
8118 offset0 = fold_convert (signed_size_type_node, offset0);
8119 if (offset1 == NULL_TREE)
8120 offset1 = build_int_cst (signed_size_type_node, 0);
8121 else
8122 offset1 = fold_convert (signed_size_type_node, offset1);
8124 return fold_build2 (code, type, offset0, offset1);
8128 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8129 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8130 the resulting offset is smaller in absolute value than the
8131 original one. */
8132 if (!(flag_wrapv || flag_trapv)
8133 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8134 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8135 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8136 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8137 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8138 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8139 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8141 tree const1 = TREE_OPERAND (arg0, 1);
8142 tree const2 = TREE_OPERAND (arg1, 1);
8143 tree variable1 = TREE_OPERAND (arg0, 0);
8144 tree variable2 = TREE_OPERAND (arg1, 0);
8145 tree cst;
8147 /* Put the constant on the side where it doesn't overflow and is
8148 of lower absolute value than before. */
8149 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8150 ? MINUS_EXPR : PLUS_EXPR,
8151 const2, const1, 0);
8152 if (!TREE_OVERFLOW (cst)
8153 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8154 return fold_build2 (code, type,
8155 variable1,
8156 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8157 variable2, cst));
8159 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8160 ? MINUS_EXPR : PLUS_EXPR,
8161 const1, const2, 0);
8162 if (!TREE_OVERFLOW (cst)
8163 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8164 return fold_build2 (code, type,
8165 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8166 variable1, cst),
8167 variable2);
8170 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8171 signed arithmetic case. That form is created by the compiler
8172 often enough for folding it to be of value. One example is in
8173 computing loop trip counts after Operator Strength Reduction. */
8174 if (!(flag_wrapv || flag_trapv)
8175 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8176 && TREE_CODE (arg0) == MULT_EXPR
8177 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8178 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8179 && integer_zerop (arg1))
8181 tree const1 = TREE_OPERAND (arg0, 1);
8182 tree const2 = arg1; /* zero */
8183 tree variable1 = TREE_OPERAND (arg0, 0);
8184 enum tree_code cmp_code = code;
8186 gcc_assert (!integer_zerop (const1));
8188 /* If const1 is negative we swap the sense of the comparison. */
8189 if (tree_int_cst_sgn (const1) < 0)
8190 cmp_code = swap_tree_comparison (cmp_code);
8192 return fold_build2 (cmp_code, type, variable1, const2);
8195 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8196 if (tem)
8197 return tem;
8199 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8201 tree targ0 = strip_float_extensions (arg0);
8202 tree targ1 = strip_float_extensions (arg1);
8203 tree newtype = TREE_TYPE (targ0);
8205 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8206 newtype = TREE_TYPE (targ1);
8208 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8209 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8210 return fold_build2 (code, type, fold_convert (newtype, targ0),
8211 fold_convert (newtype, targ1));
8213 /* (-a) CMP (-b) -> b CMP a */
8214 if (TREE_CODE (arg0) == NEGATE_EXPR
8215 && TREE_CODE (arg1) == NEGATE_EXPR)
8216 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8217 TREE_OPERAND (arg0, 0));
8219 if (TREE_CODE (arg1) == REAL_CST)
8221 REAL_VALUE_TYPE cst;
8222 cst = TREE_REAL_CST (arg1);
8224 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8225 if (TREE_CODE (arg0) == NEGATE_EXPR)
8226 return fold_build2 (swap_tree_comparison (code), type,
8227 TREE_OPERAND (arg0, 0),
8228 build_real (TREE_TYPE (arg1),
8229 REAL_VALUE_NEGATE (cst)));
8231 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8232 /* a CMP (-0) -> a CMP 0 */
8233 if (REAL_VALUE_MINUS_ZERO (cst))
8234 return fold_build2 (code, type, arg0,
8235 build_real (TREE_TYPE (arg1), dconst0));
8237 /* x != NaN is always true, other ops are always false. */
8238 if (REAL_VALUE_ISNAN (cst)
8239 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8241 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8242 return omit_one_operand (type, tem, arg0);
8245 /* Fold comparisons against infinity. */
8246 if (REAL_VALUE_ISINF (cst))
8248 tem = fold_inf_compare (code, type, arg0, arg1);
8249 if (tem != NULL_TREE)
8250 return tem;
8254 /* If this is a comparison of a real constant with a PLUS_EXPR
8255 or a MINUS_EXPR of a real constant, we can convert it into a
8256 comparison with a revised real constant as long as no overflow
8257 occurs when unsafe_math_optimizations are enabled. */
8258 if (flag_unsafe_math_optimizations
8259 && TREE_CODE (arg1) == REAL_CST
8260 && (TREE_CODE (arg0) == PLUS_EXPR
8261 || TREE_CODE (arg0) == MINUS_EXPR)
8262 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8263 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8264 ? MINUS_EXPR : PLUS_EXPR,
8265 arg1, TREE_OPERAND (arg0, 1), 0))
8266 && !TREE_OVERFLOW (tem))
8267 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8269 /* Likewise, we can simplify a comparison of a real constant with
8270 a MINUS_EXPR whose first operand is also a real constant, i.e.
8271 (c1 - x) < c2 becomes x > c1-c2. */
8272 if (flag_unsafe_math_optimizations
8273 && TREE_CODE (arg1) == REAL_CST
8274 && TREE_CODE (arg0) == MINUS_EXPR
8275 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8276 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8277 arg1, 0))
8278 && !TREE_OVERFLOW (tem))
8279 return fold_build2 (swap_tree_comparison (code), type,
8280 TREE_OPERAND (arg0, 1), tem);
8282 /* Fold comparisons against built-in math functions. */
8283 if (TREE_CODE (arg1) == REAL_CST
8284 && flag_unsafe_math_optimizations
8285 && ! flag_errno_math)
8287 enum built_in_function fcode = builtin_mathfn_code (arg0);
8289 if (fcode != END_BUILTINS)
8291 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8292 if (tem != NULL_TREE)
8293 return tem;
8298 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8299 if (TREE_CONSTANT (arg1)
8300 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8301 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8302 /* This optimization is invalid for ordered comparisons
8303 if CONST+INCR overflows or if foo+incr might overflow.
8304 This optimization is invalid for floating point due to rounding.
8305 For pointer types we assume overflow doesn't happen. */
8306 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8307 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8308 && (code == EQ_EXPR || code == NE_EXPR))))
8310 tree varop, newconst;
8312 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8314 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8315 arg1, TREE_OPERAND (arg0, 1));
8316 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8317 TREE_OPERAND (arg0, 0),
8318 TREE_OPERAND (arg0, 1));
8320 else
8322 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8323 arg1, TREE_OPERAND (arg0, 1));
8324 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8325 TREE_OPERAND (arg0, 0),
8326 TREE_OPERAND (arg0, 1));
8330 /* If VAROP is a reference to a bitfield, we must mask
8331 the constant by the width of the field. */
8332 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8333 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8334 && host_integerp (DECL_SIZE (TREE_OPERAND
8335 (TREE_OPERAND (varop, 0), 1)), 1))
8337 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8338 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8339 tree folded_compare, shift;
8341 /* First check whether the comparison would come out
8342 always the same. If we don't do that we would
8343 change the meaning with the masking. */
8344 folded_compare = fold_build2 (code, type,
8345 TREE_OPERAND (varop, 0), arg1);
8346 if (TREE_CODE (folded_compare) == INTEGER_CST)
8347 return omit_one_operand (type, folded_compare, varop);
8349 shift = build_int_cst (NULL_TREE,
8350 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8351 shift = fold_convert (TREE_TYPE (varop), shift);
8352 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8353 newconst, shift);
8354 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8355 newconst, shift);
8358 return fold_build2 (code, type, varop, newconst);
8361 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8362 && (TREE_CODE (arg0) == NOP_EXPR
8363 || TREE_CODE (arg0) == CONVERT_EXPR))
8365 /* If we are widening one operand of an integer comparison,
8366 see if the other operand is similarly being widened. Perhaps we
8367 can do the comparison in the narrower type. */
8368 tem = fold_widened_comparison (code, type, arg0, arg1);
8369 if (tem)
8370 return tem;
8372 /* Or if we are changing signedness. */
8373 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8374 if (tem)
8375 return tem;
8378 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8379 constant, we can simplify it. */
8380 if (TREE_CODE (arg1) == INTEGER_CST
8381 && (TREE_CODE (arg0) == MIN_EXPR
8382 || TREE_CODE (arg0) == MAX_EXPR)
8383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8385 tem = optimize_minmax_comparison (code, type, op0, op1);
8386 if (tem)
8387 return tem;
8390 /* Simplify comparison of something with itself. (For IEEE
8391 floating-point, we can only do some of these simplifications.) */
8392 if (operand_equal_p (arg0, arg1, 0))
8394 switch (code)
8396 case EQ_EXPR:
8397 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8398 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8399 return constant_boolean_node (1, type);
8400 break;
8402 case GE_EXPR:
8403 case LE_EXPR:
8404 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8405 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8406 return constant_boolean_node (1, type);
8407 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8409 case NE_EXPR:
8410 /* For NE, we can only do this simplification if integer
8411 or we don't honor IEEE floating point NaNs. */
8412 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8413 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8414 break;
8415 /* ... fall through ... */
8416 case GT_EXPR:
8417 case LT_EXPR:
8418 return constant_boolean_node (0, type);
8419 default:
8420 gcc_unreachable ();
8424 /* If we are comparing an expression that just has comparisons
8425 of two integer values, arithmetic expressions of those comparisons,
8426 and constants, we can simplify it. There are only three cases
8427 to check: the two values can either be equal, the first can be
8428 greater, or the second can be greater. Fold the expression for
8429 those three values. Since each value must be 0 or 1, we have
8430 eight possibilities, each of which corresponds to the constant 0
8431 or 1 or one of the six possible comparisons.
8433 This handles common cases like (a > b) == 0 but also handles
8434 expressions like ((x > y) - (y > x)) > 0, which supposedly
8435 occur in macroized code. */
8437 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8439 tree cval1 = 0, cval2 = 0;
8440 int save_p = 0;
8442 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8443 /* Don't handle degenerate cases here; they should already
8444 have been handled anyway. */
8445 && cval1 != 0 && cval2 != 0
8446 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8447 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8448 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8449 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8450 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8451 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8452 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8454 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8455 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8457 /* We can't just pass T to eval_subst in case cval1 or cval2
8458 was the same as ARG1. */
8460 tree high_result
8461 = fold_build2 (code, type,
8462 eval_subst (arg0, cval1, maxval,
8463 cval2, minval),
8464 arg1);
8465 tree equal_result
8466 = fold_build2 (code, type,
8467 eval_subst (arg0, cval1, maxval,
8468 cval2, maxval),
8469 arg1);
8470 tree low_result
8471 = fold_build2 (code, type,
8472 eval_subst (arg0, cval1, minval,
8473 cval2, maxval),
8474 arg1);
8476 /* All three of these results should be 0 or 1. Confirm they are.
8477 Then use those values to select the proper code to use. */
8479 if (TREE_CODE (high_result) == INTEGER_CST
8480 && TREE_CODE (equal_result) == INTEGER_CST
8481 && TREE_CODE (low_result) == INTEGER_CST)
8483 /* Make a 3-bit mask with the high-order bit being the
8484 value for `>', the next for '=', and the low for '<'. */
8485 switch ((integer_onep (high_result) * 4)
8486 + (integer_onep (equal_result) * 2)
8487 + integer_onep (low_result))
8489 case 0:
8490 /* Always false. */
8491 return omit_one_operand (type, integer_zero_node, arg0);
8492 case 1:
8493 code = LT_EXPR;
8494 break;
8495 case 2:
8496 code = EQ_EXPR;
8497 break;
8498 case 3:
8499 code = LE_EXPR;
8500 break;
8501 case 4:
8502 code = GT_EXPR;
8503 break;
8504 case 5:
8505 code = NE_EXPR;
8506 break;
8507 case 6:
8508 code = GE_EXPR;
8509 break;
8510 case 7:
8511 /* Always true. */
8512 return omit_one_operand (type, integer_one_node, arg0);
8515 if (save_p)
8516 return save_expr (build2 (code, type, cval1, cval2));
8517 return fold_build2 (code, type, cval1, cval2);
8522 /* Fold a comparison of the address of COMPONENT_REFs with the same
8523 type and component to a comparison of the address of the base
8524 object. In short, &x->a OP &y->a to x OP y and
8525 &x->a OP &y.a to x OP &y */
8526 if (TREE_CODE (arg0) == ADDR_EXPR
8527 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8528 && TREE_CODE (arg1) == ADDR_EXPR
8529 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8531 tree cref0 = TREE_OPERAND (arg0, 0);
8532 tree cref1 = TREE_OPERAND (arg1, 0);
8533 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8535 tree op0 = TREE_OPERAND (cref0, 0);
8536 tree op1 = TREE_OPERAND (cref1, 0);
8537 return fold_build2 (code, type,
8538 build_fold_addr_expr (op0),
8539 build_fold_addr_expr (op1));
8543 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8544 into a single range test. */
8545 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8546 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8547 && TREE_CODE (arg1) == INTEGER_CST
8548 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8549 && !integer_zerop (TREE_OPERAND (arg0, 1))
8550 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8551 && !TREE_OVERFLOW (arg1))
8553 tem = fold_div_compare (code, type, arg0, arg1);
8554 if (tem != NULL_TREE)
8555 return tem;
8558 /* Fold ~X op ~Y as Y op X. */
8559 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8560 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8561 return fold_build2 (code, type,
8562 TREE_OPERAND (arg1, 0),
8563 TREE_OPERAND (arg0, 0));
8565 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8566 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8567 && TREE_CODE (arg1) == INTEGER_CST)
8568 return fold_build2 (swap_tree_comparison (code), type,
8569 TREE_OPERAND (arg0, 0),
8570 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8572 return NULL_TREE;
8576 /* Subroutine of fold_binary. Optimize complex multiplications of the
8577 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8578 argument EXPR represents the expression "z" of type TYPE. */
8580 static tree
8581 fold_mult_zconjz (tree type, tree expr)
8583 tree itype = TREE_TYPE (type);
8584 tree rpart, ipart, tem;
8586 if (TREE_CODE (expr) == COMPLEX_EXPR)
8588 rpart = TREE_OPERAND (expr, 0);
8589 ipart = TREE_OPERAND (expr, 1);
8591 else if (TREE_CODE (expr) == COMPLEX_CST)
8593 rpart = TREE_REALPART (expr);
8594 ipart = TREE_IMAGPART (expr);
8596 else
8598 expr = save_expr (expr);
8599 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8600 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8603 rpart = save_expr (rpart);
8604 ipart = save_expr (ipart);
8605 tem = fold_build2 (PLUS_EXPR, itype,
8606 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8607 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8608 return fold_build2 (COMPLEX_EXPR, type, tem,
8609 fold_convert (itype, integer_zero_node));
8613 /* Fold a binary expression of code CODE and type TYPE with operands
8614 OP0 and OP1. Return the folded expression if folding is
8615 successful. Otherwise, return NULL_TREE. */
8617 tree
8618 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8620 enum tree_code_class kind = TREE_CODE_CLASS (code);
8621 tree arg0, arg1, tem;
8622 tree t1 = NULL_TREE;
8624 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8625 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8626 && TREE_CODE_LENGTH (code) == 2
8627 && op0 != NULL_TREE
8628 && op1 != NULL_TREE);
8630 arg0 = op0;
8631 arg1 = op1;
8633 /* Strip any conversions that don't change the mode. This is
8634 safe for every expression, except for a comparison expression
8635 because its signedness is derived from its operands. So, in
8636 the latter case, only strip conversions that don't change the
8637 signedness.
8639 Note that this is done as an internal manipulation within the
8640 constant folder, in order to find the simplest representation
8641 of the arguments so that their form can be studied. In any
8642 cases, the appropriate type conversions should be put back in
8643 the tree that will get out of the constant folder. */
8645 if (kind == tcc_comparison)
8647 STRIP_SIGN_NOPS (arg0);
8648 STRIP_SIGN_NOPS (arg1);
8650 else
8652 STRIP_NOPS (arg0);
8653 STRIP_NOPS (arg1);
8656 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8657 constant but we can't do arithmetic on them. */
8658 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8659 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8660 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8661 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8663 if (kind == tcc_binary)
8664 tem = const_binop (code, arg0, arg1, 0);
8665 else if (kind == tcc_comparison)
8666 tem = fold_relational_const (code, type, arg0, arg1);
8667 else
8668 tem = NULL_TREE;
8670 if (tem != NULL_TREE)
8672 if (TREE_TYPE (tem) != type)
8673 tem = fold_convert (type, tem);
8674 return tem;
8678 /* If this is a commutative operation, and ARG0 is a constant, move it
8679 to ARG1 to reduce the number of tests below. */
8680 if (commutative_tree_code (code)
8681 && tree_swap_operands_p (arg0, arg1, true))
8682 return fold_build2 (code, type, op1, op0);
8684 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8686 First check for cases where an arithmetic operation is applied to a
8687 compound, conditional, or comparison operation. Push the arithmetic
8688 operation inside the compound or conditional to see if any folding
8689 can then be done. Convert comparison to conditional for this purpose.
8690 The also optimizes non-constant cases that used to be done in
8691 expand_expr.
8693 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8694 one of the operands is a comparison and the other is a comparison, a
8695 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8696 code below would make the expression more complex. Change it to a
8697 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8698 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8700 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8701 || code == EQ_EXPR || code == NE_EXPR)
8702 && ((truth_value_p (TREE_CODE (arg0))
8703 && (truth_value_p (TREE_CODE (arg1))
8704 || (TREE_CODE (arg1) == BIT_AND_EXPR
8705 && integer_onep (TREE_OPERAND (arg1, 1)))))
8706 || (truth_value_p (TREE_CODE (arg1))
8707 && (truth_value_p (TREE_CODE (arg0))
8708 || (TREE_CODE (arg0) == BIT_AND_EXPR
8709 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8711 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8712 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8713 : TRUTH_XOR_EXPR,
8714 boolean_type_node,
8715 fold_convert (boolean_type_node, arg0),
8716 fold_convert (boolean_type_node, arg1));
8718 if (code == EQ_EXPR)
8719 tem = invert_truthvalue (tem);
8721 return fold_convert (type, tem);
8724 if (TREE_CODE_CLASS (code) == tcc_binary
8725 || TREE_CODE_CLASS (code) == tcc_comparison)
8727 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8728 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8729 fold_build2 (code, type,
8730 TREE_OPERAND (arg0, 1), op1));
8731 if (TREE_CODE (arg1) == COMPOUND_EXPR
8732 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8733 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8734 fold_build2 (code, type,
8735 op0, TREE_OPERAND (arg1, 1)));
8737 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8739 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8740 arg0, arg1,
8741 /*cond_first_p=*/1);
8742 if (tem != NULL_TREE)
8743 return tem;
8746 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8748 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8749 arg1, arg0,
8750 /*cond_first_p=*/0);
8751 if (tem != NULL_TREE)
8752 return tem;
8756 switch (code)
8758 case PLUS_EXPR:
8759 /* A + (-B) -> A - B */
8760 if (TREE_CODE (arg1) == NEGATE_EXPR)
8761 return fold_build2 (MINUS_EXPR, type,
8762 fold_convert (type, arg0),
8763 fold_convert (type, TREE_OPERAND (arg1, 0)));
8764 /* (-A) + B -> B - A */
8765 if (TREE_CODE (arg0) == NEGATE_EXPR
8766 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8767 return fold_build2 (MINUS_EXPR, type,
8768 fold_convert (type, arg1),
8769 fold_convert (type, TREE_OPERAND (arg0, 0)));
8770 /* Convert ~A + 1 to -A. */
8771 if (INTEGRAL_TYPE_P (type)
8772 && TREE_CODE (arg0) == BIT_NOT_EXPR
8773 && integer_onep (arg1))
8774 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8776 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8777 same or one. */
8778 if ((TREE_CODE (arg0) == MULT_EXPR
8779 || TREE_CODE (arg1) == MULT_EXPR)
8780 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8782 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8783 if (tem)
8784 return tem;
8787 if (! FLOAT_TYPE_P (type))
8789 if (integer_zerop (arg1))
8790 return non_lvalue (fold_convert (type, arg0));
8792 /* ~X + X is -1. */
8793 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8794 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8795 && !TYPE_TRAP_SIGNED (type))
8797 t1 = build_int_cst_type (type, -1);
8798 return omit_one_operand (type, t1, arg1);
8801 /* X + ~X is -1. */
8802 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8803 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8804 && !TYPE_TRAP_SIGNED (type))
8806 t1 = build_int_cst_type (type, -1);
8807 return omit_one_operand (type, t1, arg0);
8810 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8811 with a constant, and the two constants have no bits in common,
8812 we should treat this as a BIT_IOR_EXPR since this may produce more
8813 simplifications. */
8814 if (TREE_CODE (arg0) == BIT_AND_EXPR
8815 && TREE_CODE (arg1) == BIT_AND_EXPR
8816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8817 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8818 && integer_zerop (const_binop (BIT_AND_EXPR,
8819 TREE_OPERAND (arg0, 1),
8820 TREE_OPERAND (arg1, 1), 0)))
8822 code = BIT_IOR_EXPR;
8823 goto bit_ior;
8826 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8827 (plus (plus (mult) (mult)) (foo)) so that we can
8828 take advantage of the factoring cases below. */
8829 if (((TREE_CODE (arg0) == PLUS_EXPR
8830 || TREE_CODE (arg0) == MINUS_EXPR)
8831 && TREE_CODE (arg1) == MULT_EXPR)
8832 || ((TREE_CODE (arg1) == PLUS_EXPR
8833 || TREE_CODE (arg1) == MINUS_EXPR)
8834 && TREE_CODE (arg0) == MULT_EXPR))
8836 tree parg0, parg1, parg, marg;
8837 enum tree_code pcode;
8839 if (TREE_CODE (arg1) == MULT_EXPR)
8840 parg = arg0, marg = arg1;
8841 else
8842 parg = arg1, marg = arg0;
8843 pcode = TREE_CODE (parg);
8844 parg0 = TREE_OPERAND (parg, 0);
8845 parg1 = TREE_OPERAND (parg, 1);
8846 STRIP_NOPS (parg0);
8847 STRIP_NOPS (parg1);
8849 if (TREE_CODE (parg0) == MULT_EXPR
8850 && TREE_CODE (parg1) != MULT_EXPR)
8851 return fold_build2 (pcode, type,
8852 fold_build2 (PLUS_EXPR, type,
8853 fold_convert (type, parg0),
8854 fold_convert (type, marg)),
8855 fold_convert (type, parg1));
8856 if (TREE_CODE (parg0) != MULT_EXPR
8857 && TREE_CODE (parg1) == MULT_EXPR)
8858 return fold_build2 (PLUS_EXPR, type,
8859 fold_convert (type, parg0),
8860 fold_build2 (pcode, type,
8861 fold_convert (type, marg),
8862 fold_convert (type,
8863 parg1)));
8866 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8867 of the array. Loop optimizer sometimes produce this type of
8868 expressions. */
8869 if (TREE_CODE (arg0) == ADDR_EXPR)
8871 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8872 if (tem)
8873 return fold_convert (type, tem);
8875 else if (TREE_CODE (arg1) == ADDR_EXPR)
8877 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8878 if (tem)
8879 return fold_convert (type, tem);
8882 else
8884 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8885 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8886 return non_lvalue (fold_convert (type, arg0));
8888 /* Likewise if the operands are reversed. */
8889 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8890 return non_lvalue (fold_convert (type, arg1));
8892 /* Convert X + -C into X - C. */
8893 if (TREE_CODE (arg1) == REAL_CST
8894 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8896 tem = fold_negate_const (arg1, type);
8897 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8898 return fold_build2 (MINUS_EXPR, type,
8899 fold_convert (type, arg0),
8900 fold_convert (type, tem));
8903 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8904 to __complex__ ( x, y ). This is not the same for SNaNs or
8905 if singed zeros are involved. */
8906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8907 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8908 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8910 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8911 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8912 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8913 bool arg0rz = false, arg0iz = false;
8914 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8915 || (arg0i && (arg0iz = real_zerop (arg0i))))
8917 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8918 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8919 if (arg0rz && arg1i && real_zerop (arg1i))
8921 tree rp = arg1r ? arg1r
8922 : build1 (REALPART_EXPR, rtype, arg1);
8923 tree ip = arg0i ? arg0i
8924 : build1 (IMAGPART_EXPR, rtype, arg0);
8925 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8927 else if (arg0iz && arg1r && real_zerop (arg1r))
8929 tree rp = arg0r ? arg0r
8930 : build1 (REALPART_EXPR, rtype, arg0);
8931 tree ip = arg1i ? arg1i
8932 : build1 (IMAGPART_EXPR, rtype, arg1);
8933 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8938 if (flag_unsafe_math_optimizations
8939 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8940 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8941 && (tem = distribute_real_division (code, type, arg0, arg1)))
8942 return tem;
8944 /* Convert x+x into x*2.0. */
8945 if (operand_equal_p (arg0, arg1, 0)
8946 && SCALAR_FLOAT_TYPE_P (type))
8947 return fold_build2 (MULT_EXPR, type, arg0,
8948 build_real (type, dconst2));
8950 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8951 if (flag_unsafe_math_optimizations
8952 && TREE_CODE (arg1) == PLUS_EXPR
8953 && TREE_CODE (arg0) != MULT_EXPR)
8955 tree tree10 = TREE_OPERAND (arg1, 0);
8956 tree tree11 = TREE_OPERAND (arg1, 1);
8957 if (TREE_CODE (tree11) == MULT_EXPR
8958 && TREE_CODE (tree10) == MULT_EXPR)
8960 tree tree0;
8961 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8962 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8965 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8966 if (flag_unsafe_math_optimizations
8967 && TREE_CODE (arg0) == PLUS_EXPR
8968 && TREE_CODE (arg1) != MULT_EXPR)
8970 tree tree00 = TREE_OPERAND (arg0, 0);
8971 tree tree01 = TREE_OPERAND (arg0, 1);
8972 if (TREE_CODE (tree01) == MULT_EXPR
8973 && TREE_CODE (tree00) == MULT_EXPR)
8975 tree tree0;
8976 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8977 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8982 bit_rotate:
8983 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8984 is a rotate of A by C1 bits. */
8985 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8986 is a rotate of A by B bits. */
8988 enum tree_code code0, code1;
8989 code0 = TREE_CODE (arg0);
8990 code1 = TREE_CODE (arg1);
8991 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8992 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8993 && operand_equal_p (TREE_OPERAND (arg0, 0),
8994 TREE_OPERAND (arg1, 0), 0)
8995 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8997 tree tree01, tree11;
8998 enum tree_code code01, code11;
9000 tree01 = TREE_OPERAND (arg0, 1);
9001 tree11 = TREE_OPERAND (arg1, 1);
9002 STRIP_NOPS (tree01);
9003 STRIP_NOPS (tree11);
9004 code01 = TREE_CODE (tree01);
9005 code11 = TREE_CODE (tree11);
9006 if (code01 == INTEGER_CST
9007 && code11 == INTEGER_CST
9008 && TREE_INT_CST_HIGH (tree01) == 0
9009 && TREE_INT_CST_HIGH (tree11) == 0
9010 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9011 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9012 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9013 code0 == LSHIFT_EXPR ? tree01 : tree11);
9014 else if (code11 == MINUS_EXPR)
9016 tree tree110, tree111;
9017 tree110 = TREE_OPERAND (tree11, 0);
9018 tree111 = TREE_OPERAND (tree11, 1);
9019 STRIP_NOPS (tree110);
9020 STRIP_NOPS (tree111);
9021 if (TREE_CODE (tree110) == INTEGER_CST
9022 && 0 == compare_tree_int (tree110,
9023 TYPE_PRECISION
9024 (TREE_TYPE (TREE_OPERAND
9025 (arg0, 0))))
9026 && operand_equal_p (tree01, tree111, 0))
9027 return build2 ((code0 == LSHIFT_EXPR
9028 ? LROTATE_EXPR
9029 : RROTATE_EXPR),
9030 type, TREE_OPERAND (arg0, 0), tree01);
9032 else if (code01 == MINUS_EXPR)
9034 tree tree010, tree011;
9035 tree010 = TREE_OPERAND (tree01, 0);
9036 tree011 = TREE_OPERAND (tree01, 1);
9037 STRIP_NOPS (tree010);
9038 STRIP_NOPS (tree011);
9039 if (TREE_CODE (tree010) == INTEGER_CST
9040 && 0 == compare_tree_int (tree010,
9041 TYPE_PRECISION
9042 (TREE_TYPE (TREE_OPERAND
9043 (arg0, 0))))
9044 && operand_equal_p (tree11, tree011, 0))
9045 return build2 ((code0 != LSHIFT_EXPR
9046 ? LROTATE_EXPR
9047 : RROTATE_EXPR),
9048 type, TREE_OPERAND (arg0, 0), tree11);
9053 associate:
9054 /* In most languages, can't associate operations on floats through
9055 parentheses. Rather than remember where the parentheses were, we
9056 don't associate floats at all, unless the user has specified
9057 -funsafe-math-optimizations. */
9059 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9061 tree var0, con0, lit0, minus_lit0;
9062 tree var1, con1, lit1, minus_lit1;
9064 /* Split both trees into variables, constants, and literals. Then
9065 associate each group together, the constants with literals,
9066 then the result with variables. This increases the chances of
9067 literals being recombined later and of generating relocatable
9068 expressions for the sum of a constant and literal. */
9069 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9070 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9071 code == MINUS_EXPR);
9073 /* Only do something if we found more than two objects. Otherwise,
9074 nothing has changed and we risk infinite recursion. */
9075 if (2 < ((var0 != 0) + (var1 != 0)
9076 + (con0 != 0) + (con1 != 0)
9077 + (lit0 != 0) + (lit1 != 0)
9078 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9080 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9081 if (code == MINUS_EXPR)
9082 code = PLUS_EXPR;
9084 var0 = associate_trees (var0, var1, code, type);
9085 con0 = associate_trees (con0, con1, code, type);
9086 lit0 = associate_trees (lit0, lit1, code, type);
9087 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9089 /* Preserve the MINUS_EXPR if the negative part of the literal is
9090 greater than the positive part. Otherwise, the multiplicative
9091 folding code (i.e extract_muldiv) may be fooled in case
9092 unsigned constants are subtracted, like in the following
9093 example: ((X*2 + 4) - 8U)/2. */
9094 if (minus_lit0 && lit0)
9096 if (TREE_CODE (lit0) == INTEGER_CST
9097 && TREE_CODE (minus_lit0) == INTEGER_CST
9098 && tree_int_cst_lt (lit0, minus_lit0))
9100 minus_lit0 = associate_trees (minus_lit0, lit0,
9101 MINUS_EXPR, type);
9102 lit0 = 0;
9104 else
9106 lit0 = associate_trees (lit0, minus_lit0,
9107 MINUS_EXPR, type);
9108 minus_lit0 = 0;
9111 if (minus_lit0)
9113 if (con0 == 0)
9114 return fold_convert (type,
9115 associate_trees (var0, minus_lit0,
9116 MINUS_EXPR, type));
9117 else
9119 con0 = associate_trees (con0, minus_lit0,
9120 MINUS_EXPR, type);
9121 return fold_convert (type,
9122 associate_trees (var0, con0,
9123 PLUS_EXPR, type));
9127 con0 = associate_trees (con0, lit0, code, type);
9128 return fold_convert (type, associate_trees (var0, con0,
9129 code, type));
9133 return NULL_TREE;
9135 case MINUS_EXPR:
9136 /* A - (-B) -> A + B */
9137 if (TREE_CODE (arg1) == NEGATE_EXPR)
9138 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9139 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9140 if (TREE_CODE (arg0) == NEGATE_EXPR
9141 && (FLOAT_TYPE_P (type)
9142 || INTEGRAL_TYPE_P (type))
9143 && negate_expr_p (arg1)
9144 && reorder_operands_p (arg0, arg1))
9145 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9146 TREE_OPERAND (arg0, 0));
9147 /* Convert -A - 1 to ~A. */
9148 if (INTEGRAL_TYPE_P (type)
9149 && TREE_CODE (arg0) == NEGATE_EXPR
9150 && integer_onep (arg1)
9151 && !TYPE_TRAP_SIGNED (type))
9152 return fold_build1 (BIT_NOT_EXPR, type,
9153 fold_convert (type, TREE_OPERAND (arg0, 0)));
9155 /* Convert -1 - A to ~A. */
9156 if (INTEGRAL_TYPE_P (type)
9157 && integer_all_onesp (arg0))
9158 return fold_build1 (BIT_NOT_EXPR, type, op1);
9160 if (! FLOAT_TYPE_P (type))
9162 if (integer_zerop (arg0))
9163 return negate_expr (fold_convert (type, arg1));
9164 if (integer_zerop (arg1))
9165 return non_lvalue (fold_convert (type, arg0));
9167 /* Fold A - (A & B) into ~B & A. */
9168 if (!TREE_SIDE_EFFECTS (arg0)
9169 && TREE_CODE (arg1) == BIT_AND_EXPR)
9171 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9172 return fold_build2 (BIT_AND_EXPR, type,
9173 fold_build1 (BIT_NOT_EXPR, type,
9174 TREE_OPERAND (arg1, 0)),
9175 arg0);
9176 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9177 return fold_build2 (BIT_AND_EXPR, type,
9178 fold_build1 (BIT_NOT_EXPR, type,
9179 TREE_OPERAND (arg1, 1)),
9180 arg0);
9183 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9184 any power of 2 minus 1. */
9185 if (TREE_CODE (arg0) == BIT_AND_EXPR
9186 && TREE_CODE (arg1) == BIT_AND_EXPR
9187 && operand_equal_p (TREE_OPERAND (arg0, 0),
9188 TREE_OPERAND (arg1, 0), 0))
9190 tree mask0 = TREE_OPERAND (arg0, 1);
9191 tree mask1 = TREE_OPERAND (arg1, 1);
9192 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9194 if (operand_equal_p (tem, mask1, 0))
9196 tem = fold_build2 (BIT_XOR_EXPR, type,
9197 TREE_OPERAND (arg0, 0), mask1);
9198 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9203 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9204 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9205 return non_lvalue (fold_convert (type, arg0));
9207 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9208 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9209 (-ARG1 + ARG0) reduces to -ARG1. */
9210 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9211 return negate_expr (fold_convert (type, arg1));
9213 /* Fold &x - &x. This can happen from &x.foo - &x.
9214 This is unsafe for certain floats even in non-IEEE formats.
9215 In IEEE, it is unsafe because it does wrong for NaNs.
9216 Also note that operand_equal_p is always false if an operand
9217 is volatile. */
9219 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9220 && operand_equal_p (arg0, arg1, 0))
9221 return fold_convert (type, integer_zero_node);
9223 /* A - B -> A + (-B) if B is easily negatable. */
9224 if (negate_expr_p (arg1)
9225 && ((FLOAT_TYPE_P (type)
9226 /* Avoid this transformation if B is a positive REAL_CST. */
9227 && (TREE_CODE (arg1) != REAL_CST
9228 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9229 || INTEGRAL_TYPE_P (type)))
9230 return fold_build2 (PLUS_EXPR, type,
9231 fold_convert (type, arg0),
9232 fold_convert (type, negate_expr (arg1)));
9234 /* Try folding difference of addresses. */
9236 HOST_WIDE_INT diff;
9238 if ((TREE_CODE (arg0) == ADDR_EXPR
9239 || TREE_CODE (arg1) == ADDR_EXPR)
9240 && ptr_difference_const (arg0, arg1, &diff))
9241 return build_int_cst_type (type, diff);
9244 /* Fold &a[i] - &a[j] to i-j. */
9245 if (TREE_CODE (arg0) == ADDR_EXPR
9246 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9247 && TREE_CODE (arg1) == ADDR_EXPR
9248 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9250 tree aref0 = TREE_OPERAND (arg0, 0);
9251 tree aref1 = TREE_OPERAND (arg1, 0);
9252 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9253 TREE_OPERAND (aref1, 0), 0))
9255 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9256 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9257 tree esz = array_ref_element_size (aref0);
9258 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9259 return fold_build2 (MULT_EXPR, type, diff,
9260 fold_convert (type, esz));
9265 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9266 of the array. Loop optimizer sometimes produce this type of
9267 expressions. */
9268 if (TREE_CODE (arg0) == ADDR_EXPR)
9270 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9271 if (tem)
9272 return fold_convert (type, tem);
9275 if (flag_unsafe_math_optimizations
9276 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9277 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9278 && (tem = distribute_real_division (code, type, arg0, arg1)))
9279 return tem;
9281 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9282 same or one. */
9283 if ((TREE_CODE (arg0) == MULT_EXPR
9284 || TREE_CODE (arg1) == MULT_EXPR)
9285 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9287 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9288 if (tem)
9289 return tem;
9292 goto associate;
9294 case MULT_EXPR:
9295 /* (-A) * (-B) -> A * B */
9296 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9297 return fold_build2 (MULT_EXPR, type,
9298 fold_convert (type, TREE_OPERAND (arg0, 0)),
9299 fold_convert (type, negate_expr (arg1)));
9300 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9301 return fold_build2 (MULT_EXPR, type,
9302 fold_convert (type, negate_expr (arg0)),
9303 fold_convert (type, TREE_OPERAND (arg1, 0)));
9305 if (! FLOAT_TYPE_P (type))
9307 if (integer_zerop (arg1))
9308 return omit_one_operand (type, arg1, arg0);
9309 if (integer_onep (arg1))
9310 return non_lvalue (fold_convert (type, arg0));
9311 /* Transform x * -1 into -x. */
9312 if (integer_all_onesp (arg1))
9313 return fold_convert (type, negate_expr (arg0));
9314 /* Transform x * -C into -x * C if x is easily negatable. */
9315 if (TREE_CODE (arg1) == INTEGER_CST
9316 && tree_int_cst_sgn (arg1) == -1
9317 && negate_expr_p (arg0)
9318 && (tem = negate_expr (arg1)) != arg1
9319 && !TREE_OVERFLOW (tem))
9320 return fold_build2 (MULT_EXPR, type,
9321 negate_expr (arg0), tem);
9323 /* (a * (1 << b)) is (a << b) */
9324 if (TREE_CODE (arg1) == LSHIFT_EXPR
9325 && integer_onep (TREE_OPERAND (arg1, 0)))
9326 return fold_build2 (LSHIFT_EXPR, type, arg0,
9327 TREE_OPERAND (arg1, 1));
9328 if (TREE_CODE (arg0) == LSHIFT_EXPR
9329 && integer_onep (TREE_OPERAND (arg0, 0)))
9330 return fold_build2 (LSHIFT_EXPR, type, arg1,
9331 TREE_OPERAND (arg0, 1));
9333 if (TREE_CODE (arg1) == INTEGER_CST
9334 && 0 != (tem = extract_muldiv (op0,
9335 fold_convert (type, arg1),
9336 code, NULL_TREE)))
9337 return fold_convert (type, tem);
9339 /* Optimize z * conj(z) for integer complex numbers. */
9340 if (TREE_CODE (arg0) == CONJ_EXPR
9341 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9342 return fold_mult_zconjz (type, arg1);
9343 if (TREE_CODE (arg1) == CONJ_EXPR
9344 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9345 return fold_mult_zconjz (type, arg0);
9347 else
9349 /* Maybe fold x * 0 to 0. The expressions aren't the same
9350 when x is NaN, since x * 0 is also NaN. Nor are they the
9351 same in modes with signed zeros, since multiplying a
9352 negative value by 0 gives -0, not +0. */
9353 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9354 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9355 && real_zerop (arg1))
9356 return omit_one_operand (type, arg1, arg0);
9357 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9358 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9359 && real_onep (arg1))
9360 return non_lvalue (fold_convert (type, arg0));
9362 /* Transform x * -1.0 into -x. */
9363 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9364 && real_minus_onep (arg1))
9365 return fold_convert (type, negate_expr (arg0));
9367 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9368 if (flag_unsafe_math_optimizations
9369 && TREE_CODE (arg0) == RDIV_EXPR
9370 && TREE_CODE (arg1) == REAL_CST
9371 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9373 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9374 arg1, 0);
9375 if (tem)
9376 return fold_build2 (RDIV_EXPR, type, tem,
9377 TREE_OPERAND (arg0, 1));
9380 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9381 if (operand_equal_p (arg0, arg1, 0))
9383 tree tem = fold_strip_sign_ops (arg0);
9384 if (tem != NULL_TREE)
9386 tem = fold_convert (type, tem);
9387 return fold_build2 (MULT_EXPR, type, tem, tem);
9391 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9392 This is not the same for NaNs or if singed zeros are
9393 involved. */
9394 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9395 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9396 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9397 && TREE_CODE (arg1) == COMPLEX_CST
9398 && real_zerop (TREE_REALPART (arg1)))
9400 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9401 if (real_onep (TREE_IMAGPART (arg1)))
9402 return fold_build2 (COMPLEX_EXPR, type,
9403 negate_expr (fold_build1 (IMAGPART_EXPR,
9404 rtype, arg0)),
9405 fold_build1 (REALPART_EXPR, rtype, arg0));
9406 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9407 return fold_build2 (COMPLEX_EXPR, type,
9408 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9409 negate_expr (fold_build1 (REALPART_EXPR,
9410 rtype, arg0)));
9413 /* Optimize z * conj(z) for floating point complex numbers.
9414 Guarded by flag_unsafe_math_optimizations as non-finite
9415 imaginary components don't produce scalar results. */
9416 if (flag_unsafe_math_optimizations
9417 && TREE_CODE (arg0) == CONJ_EXPR
9418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9419 return fold_mult_zconjz (type, arg1);
9420 if (flag_unsafe_math_optimizations
9421 && TREE_CODE (arg1) == CONJ_EXPR
9422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9423 return fold_mult_zconjz (type, arg0);
9425 if (flag_unsafe_math_optimizations)
9427 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9428 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9430 /* Optimizations of root(...)*root(...). */
9431 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9433 tree rootfn, arg, arglist;
9434 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9435 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9437 /* Optimize sqrt(x)*sqrt(x) as x. */
9438 if (BUILTIN_SQRT_P (fcode0)
9439 && operand_equal_p (arg00, arg10, 0)
9440 && ! HONOR_SNANS (TYPE_MODE (type)))
9441 return arg00;
9443 /* Optimize root(x)*root(y) as root(x*y). */
9444 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9445 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9446 arglist = build_tree_list (NULL_TREE, arg);
9447 return build_function_call_expr (rootfn, arglist);
9450 /* Optimize expN(x)*expN(y) as expN(x+y). */
9451 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9453 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9454 tree arg = fold_build2 (PLUS_EXPR, type,
9455 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9456 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9457 tree arglist = build_tree_list (NULL_TREE, arg);
9458 return build_function_call_expr (expfn, arglist);
9461 /* Optimizations of pow(...)*pow(...). */
9462 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9463 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9464 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9466 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9467 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9468 1)));
9469 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9470 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9471 1)));
9473 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9474 if (operand_equal_p (arg01, arg11, 0))
9476 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9477 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9478 tree arglist = tree_cons (NULL_TREE, arg,
9479 build_tree_list (NULL_TREE,
9480 arg01));
9481 return build_function_call_expr (powfn, arglist);
9484 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9485 if (operand_equal_p (arg00, arg10, 0))
9487 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9488 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9489 tree arglist = tree_cons (NULL_TREE, arg00,
9490 build_tree_list (NULL_TREE,
9491 arg));
9492 return build_function_call_expr (powfn, arglist);
9496 /* Optimize tan(x)*cos(x) as sin(x). */
9497 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9498 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9499 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9500 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9501 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9502 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9503 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9504 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9506 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9508 if (sinfn != NULL_TREE)
9509 return build_function_call_expr (sinfn,
9510 TREE_OPERAND (arg0, 1));
9513 /* Optimize x*pow(x,c) as pow(x,c+1). */
9514 if (fcode1 == BUILT_IN_POW
9515 || fcode1 == BUILT_IN_POWF
9516 || fcode1 == BUILT_IN_POWL)
9518 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9519 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9520 1)));
9521 if (TREE_CODE (arg11) == REAL_CST
9522 && !TREE_OVERFLOW (arg11)
9523 && operand_equal_p (arg0, arg10, 0))
9525 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9526 REAL_VALUE_TYPE c;
9527 tree arg, arglist;
9529 c = TREE_REAL_CST (arg11);
9530 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9531 arg = build_real (type, c);
9532 arglist = build_tree_list (NULL_TREE, arg);
9533 arglist = tree_cons (NULL_TREE, arg0, arglist);
9534 return build_function_call_expr (powfn, arglist);
9538 /* Optimize pow(x,c)*x as pow(x,c+1). */
9539 if (fcode0 == BUILT_IN_POW
9540 || fcode0 == BUILT_IN_POWF
9541 || fcode0 == BUILT_IN_POWL)
9543 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9544 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9545 1)));
9546 if (TREE_CODE (arg01) == REAL_CST
9547 && !TREE_OVERFLOW (arg01)
9548 && operand_equal_p (arg1, arg00, 0))
9550 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9551 REAL_VALUE_TYPE c;
9552 tree arg, arglist;
9554 c = TREE_REAL_CST (arg01);
9555 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9556 arg = build_real (type, c);
9557 arglist = build_tree_list (NULL_TREE, arg);
9558 arglist = tree_cons (NULL_TREE, arg1, arglist);
9559 return build_function_call_expr (powfn, arglist);
9563 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9564 if (! optimize_size
9565 && operand_equal_p (arg0, arg1, 0))
9567 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9569 if (powfn)
9571 tree arg = build_real (type, dconst2);
9572 tree arglist = build_tree_list (NULL_TREE, arg);
9573 arglist = tree_cons (NULL_TREE, arg0, arglist);
9574 return build_function_call_expr (powfn, arglist);
9579 goto associate;
9581 case BIT_IOR_EXPR:
9582 bit_ior:
9583 if (integer_all_onesp (arg1))
9584 return omit_one_operand (type, arg1, arg0);
9585 if (integer_zerop (arg1))
9586 return non_lvalue (fold_convert (type, arg0));
9587 if (operand_equal_p (arg0, arg1, 0))
9588 return non_lvalue (fold_convert (type, arg0));
9590 /* ~X | X is -1. */
9591 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9592 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9594 t1 = build_int_cst_type (type, -1);
9595 return omit_one_operand (type, t1, arg1);
9598 /* X | ~X is -1. */
9599 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9600 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9602 t1 = build_int_cst_type (type, -1);
9603 return omit_one_operand (type, t1, arg0);
9606 /* Canonicalize (X & C1) | C2. */
9607 if (TREE_CODE (arg0) == BIT_AND_EXPR
9608 && TREE_CODE (arg1) == INTEGER_CST
9609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9611 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9612 int width = TYPE_PRECISION (type);
9613 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9614 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9615 hi2 = TREE_INT_CST_HIGH (arg1);
9616 lo2 = TREE_INT_CST_LOW (arg1);
9618 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9619 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9620 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9622 if (width > HOST_BITS_PER_WIDE_INT)
9624 mhi = (unsigned HOST_WIDE_INT) -1
9625 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9626 mlo = -1;
9628 else
9630 mhi = 0;
9631 mlo = (unsigned HOST_WIDE_INT) -1
9632 >> (HOST_BITS_PER_WIDE_INT - width);
9635 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9636 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9637 return fold_build2 (BIT_IOR_EXPR, type,
9638 TREE_OPERAND (arg0, 0), arg1);
9640 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9641 hi1 &= mhi;
9642 lo1 &= mlo;
9643 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9644 return fold_build2 (BIT_IOR_EXPR, type,
9645 fold_build2 (BIT_AND_EXPR, type,
9646 TREE_OPERAND (arg0, 0),
9647 build_int_cst_wide (type,
9648 lo1 & ~lo2,
9649 hi1 & ~hi2)),
9650 arg1);
9653 /* (X & Y) | Y is (X, Y). */
9654 if (TREE_CODE (arg0) == BIT_AND_EXPR
9655 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9656 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9657 /* (X & Y) | X is (Y, X). */
9658 if (TREE_CODE (arg0) == BIT_AND_EXPR
9659 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9660 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9661 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9662 /* X | (X & Y) is (Y, X). */
9663 if (TREE_CODE (arg1) == BIT_AND_EXPR
9664 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9665 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9666 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9667 /* X | (Y & X) is (Y, X). */
9668 if (TREE_CODE (arg1) == BIT_AND_EXPR
9669 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9670 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9671 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9673 t1 = distribute_bit_expr (code, type, arg0, arg1);
9674 if (t1 != NULL_TREE)
9675 return t1;
9677 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9679 This results in more efficient code for machines without a NAND
9680 instruction. Combine will canonicalize to the first form
9681 which will allow use of NAND instructions provided by the
9682 backend if they exist. */
9683 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9684 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9686 return fold_build1 (BIT_NOT_EXPR, type,
9687 build2 (BIT_AND_EXPR, type,
9688 TREE_OPERAND (arg0, 0),
9689 TREE_OPERAND (arg1, 0)));
9692 /* See if this can be simplified into a rotate first. If that
9693 is unsuccessful continue in the association code. */
9694 goto bit_rotate;
9696 case BIT_XOR_EXPR:
9697 if (integer_zerop (arg1))
9698 return non_lvalue (fold_convert (type, arg0));
9699 if (integer_all_onesp (arg1))
9700 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9701 if (operand_equal_p (arg0, arg1, 0))
9702 return omit_one_operand (type, integer_zero_node, arg0);
9704 /* ~X ^ X is -1. */
9705 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9706 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9708 t1 = build_int_cst_type (type, -1);
9709 return omit_one_operand (type, t1, arg1);
9712 /* X ^ ~X is -1. */
9713 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9716 t1 = build_int_cst_type (type, -1);
9717 return omit_one_operand (type, t1, arg0);
9720 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9721 with a constant, and the two constants have no bits in common,
9722 we should treat this as a BIT_IOR_EXPR since this may produce more
9723 simplifications. */
9724 if (TREE_CODE (arg0) == BIT_AND_EXPR
9725 && TREE_CODE (arg1) == BIT_AND_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9727 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9728 && integer_zerop (const_binop (BIT_AND_EXPR,
9729 TREE_OPERAND (arg0, 1),
9730 TREE_OPERAND (arg1, 1), 0)))
9732 code = BIT_IOR_EXPR;
9733 goto bit_ior;
9736 /* (X | Y) ^ X -> Y & ~ X*/
9737 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9740 tree t2 = TREE_OPERAND (arg0, 1);
9741 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9742 arg1);
9743 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9744 fold_convert (type, t1));
9745 return t1;
9748 /* (Y | X) ^ X -> Y & ~ X*/
9749 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9750 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9752 tree t2 = TREE_OPERAND (arg0, 0);
9753 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9754 arg1);
9755 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9756 fold_convert (type, t1));
9757 return t1;
9760 /* X ^ (X | Y) -> Y & ~ X*/
9761 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9762 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9764 tree t2 = TREE_OPERAND (arg1, 1);
9765 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9766 arg0);
9767 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9768 fold_convert (type, t1));
9769 return t1;
9772 /* X ^ (Y | X) -> Y & ~ X*/
9773 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9774 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9776 tree t2 = TREE_OPERAND (arg1, 0);
9777 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9778 arg0);
9779 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9780 fold_convert (type, t1));
9781 return t1;
9784 /* Convert ~X ^ ~Y to X ^ Y. */
9785 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9786 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9787 return fold_build2 (code, type,
9788 fold_convert (type, TREE_OPERAND (arg0, 0)),
9789 fold_convert (type, TREE_OPERAND (arg1, 0)));
9791 /* Convert ~X ^ C to X ^ ~C. */
9792 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9793 && TREE_CODE (arg1) == INTEGER_CST)
9794 return fold_build2 (code, type,
9795 fold_convert (type, TREE_OPERAND (arg0, 0)),
9796 fold_build1 (BIT_NOT_EXPR, type, arg1));
9798 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9799 if (TREE_CODE (arg0) == BIT_AND_EXPR
9800 && integer_onep (TREE_OPERAND (arg0, 1))
9801 && integer_onep (arg1))
9802 return fold_build2 (EQ_EXPR, type, arg0,
9803 build_int_cst (TREE_TYPE (arg0), 0));
9805 /* Fold (X & Y) ^ Y as ~X & Y. */
9806 if (TREE_CODE (arg0) == BIT_AND_EXPR
9807 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9809 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9810 return fold_build2 (BIT_AND_EXPR, type,
9811 fold_build1 (BIT_NOT_EXPR, type, tem),
9812 fold_convert (type, arg1));
9814 /* Fold (X & Y) ^ X as ~Y & X. */
9815 if (TREE_CODE (arg0) == BIT_AND_EXPR
9816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9817 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9819 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9820 return fold_build2 (BIT_AND_EXPR, type,
9821 fold_build1 (BIT_NOT_EXPR, type, tem),
9822 fold_convert (type, arg1));
9824 /* Fold X ^ (X & Y) as X & ~Y. */
9825 if (TREE_CODE (arg1) == BIT_AND_EXPR
9826 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9828 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9829 return fold_build2 (BIT_AND_EXPR, type,
9830 fold_convert (type, arg0),
9831 fold_build1 (BIT_NOT_EXPR, type, tem));
9833 /* Fold X ^ (Y & X) as ~Y & X. */
9834 if (TREE_CODE (arg1) == BIT_AND_EXPR
9835 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9836 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9838 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9839 return fold_build2 (BIT_AND_EXPR, type,
9840 fold_build1 (BIT_NOT_EXPR, type, tem),
9841 fold_convert (type, arg0));
9844 /* See if this can be simplified into a rotate first. If that
9845 is unsuccessful continue in the association code. */
9846 goto bit_rotate;
9848 case BIT_AND_EXPR:
9849 if (integer_all_onesp (arg1))
9850 return non_lvalue (fold_convert (type, arg0));
9851 if (integer_zerop (arg1))
9852 return omit_one_operand (type, arg1, arg0);
9853 if (operand_equal_p (arg0, arg1, 0))
9854 return non_lvalue (fold_convert (type, arg0));
9856 /* ~X & X is always zero. */
9857 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9858 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9859 return omit_one_operand (type, integer_zero_node, arg1);
9861 /* X & ~X is always zero. */
9862 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9864 return omit_one_operand (type, integer_zero_node, arg0);
9866 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9867 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9868 && TREE_CODE (arg1) == INTEGER_CST
9869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9870 return fold_build2 (BIT_IOR_EXPR, type,
9871 fold_build2 (BIT_AND_EXPR, type,
9872 TREE_OPERAND (arg0, 0), arg1),
9873 fold_build2 (BIT_AND_EXPR, type,
9874 TREE_OPERAND (arg0, 1), arg1));
9876 /* (X | Y) & Y is (X, Y). */
9877 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9878 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9879 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9880 /* (X | Y) & X is (Y, X). */
9881 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9882 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9883 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9884 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9885 /* X & (X | Y) is (Y, X). */
9886 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9887 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9888 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9889 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9890 /* X & (Y | X) is (Y, X). */
9891 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9892 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9893 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9894 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9896 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9897 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9898 && integer_onep (TREE_OPERAND (arg0, 1))
9899 && integer_onep (arg1))
9901 tem = TREE_OPERAND (arg0, 0);
9902 return fold_build2 (EQ_EXPR, type,
9903 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9904 build_int_cst (TREE_TYPE (tem), 1)),
9905 build_int_cst (TREE_TYPE (tem), 0));
9907 /* Fold ~X & 1 as (X & 1) == 0. */
9908 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9909 && integer_onep (arg1))
9911 tem = TREE_OPERAND (arg0, 0);
9912 return fold_build2 (EQ_EXPR, type,
9913 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9914 build_int_cst (TREE_TYPE (tem), 1)),
9915 build_int_cst (TREE_TYPE (tem), 0));
9918 /* Fold (X ^ Y) & Y as ~X & Y. */
9919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9920 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9922 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9923 return fold_build2 (BIT_AND_EXPR, type,
9924 fold_build1 (BIT_NOT_EXPR, type, tem),
9925 fold_convert (type, arg1));
9927 /* Fold (X ^ Y) & X as ~Y & X. */
9928 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9929 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9930 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9932 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9933 return fold_build2 (BIT_AND_EXPR, type,
9934 fold_build1 (BIT_NOT_EXPR, type, tem),
9935 fold_convert (type, arg1));
9937 /* Fold X & (X ^ Y) as X & ~Y. */
9938 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9939 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9941 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9942 return fold_build2 (BIT_AND_EXPR, type,
9943 fold_convert (type, arg0),
9944 fold_build1 (BIT_NOT_EXPR, type, tem));
9946 /* Fold X & (Y ^ X) as ~Y & X. */
9947 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9949 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9951 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9952 return fold_build2 (BIT_AND_EXPR, type,
9953 fold_build1 (BIT_NOT_EXPR, type, tem),
9954 fold_convert (type, arg0));
9957 t1 = distribute_bit_expr (code, type, arg0, arg1);
9958 if (t1 != NULL_TREE)
9959 return t1;
9960 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9961 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9962 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9964 unsigned int prec
9965 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9967 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9968 && (~TREE_INT_CST_LOW (arg1)
9969 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9970 return fold_convert (type, TREE_OPERAND (arg0, 0));
9973 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9975 This results in more efficient code for machines without a NOR
9976 instruction. Combine will canonicalize to the first form
9977 which will allow use of NOR instructions provided by the
9978 backend if they exist. */
9979 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9980 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9982 return fold_build1 (BIT_NOT_EXPR, type,
9983 build2 (BIT_IOR_EXPR, type,
9984 TREE_OPERAND (arg0, 0),
9985 TREE_OPERAND (arg1, 0)));
9988 goto associate;
9990 case RDIV_EXPR:
9991 /* Don't touch a floating-point divide by zero unless the mode
9992 of the constant can represent infinity. */
9993 if (TREE_CODE (arg1) == REAL_CST
9994 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9995 && real_zerop (arg1))
9996 return NULL_TREE;
9998 /* Optimize A / A to 1.0 if we don't care about
9999 NaNs or Infinities. Skip the transformation
10000 for non-real operands. */
10001 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10002 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10003 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10004 && operand_equal_p (arg0, arg1, 0))
10006 tree r = build_real (TREE_TYPE (arg0), dconst1);
10008 return omit_two_operands (type, r, arg0, arg1);
10011 /* The complex version of the above A / A optimization. */
10012 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10013 && operand_equal_p (arg0, arg1, 0))
10015 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10016 if (! HONOR_NANS (TYPE_MODE (elem_type))
10017 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10019 tree r = build_real (elem_type, dconst1);
10020 /* omit_two_operands will call fold_convert for us. */
10021 return omit_two_operands (type, r, arg0, arg1);
10025 /* (-A) / (-B) -> A / B */
10026 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10027 return fold_build2 (RDIV_EXPR, type,
10028 TREE_OPERAND (arg0, 0),
10029 negate_expr (arg1));
10030 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10031 return fold_build2 (RDIV_EXPR, type,
10032 negate_expr (arg0),
10033 TREE_OPERAND (arg1, 0));
10035 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10036 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10037 && real_onep (arg1))
10038 return non_lvalue (fold_convert (type, arg0));
10040 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10041 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10042 && real_minus_onep (arg1))
10043 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10045 /* If ARG1 is a constant, we can convert this to a multiply by the
10046 reciprocal. This does not have the same rounding properties,
10047 so only do this if -funsafe-math-optimizations. We can actually
10048 always safely do it if ARG1 is a power of two, but it's hard to
10049 tell if it is or not in a portable manner. */
10050 if (TREE_CODE (arg1) == REAL_CST)
10052 if (flag_unsafe_math_optimizations
10053 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10054 arg1, 0)))
10055 return fold_build2 (MULT_EXPR, type, arg0, tem);
10056 /* Find the reciprocal if optimizing and the result is exact. */
10057 if (optimize)
10059 REAL_VALUE_TYPE r;
10060 r = TREE_REAL_CST (arg1);
10061 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10063 tem = build_real (type, r);
10064 return fold_build2 (MULT_EXPR, type,
10065 fold_convert (type, arg0), tem);
10069 /* Convert A/B/C to A/(B*C). */
10070 if (flag_unsafe_math_optimizations
10071 && TREE_CODE (arg0) == RDIV_EXPR)
10072 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10073 fold_build2 (MULT_EXPR, type,
10074 TREE_OPERAND (arg0, 1), arg1));
10076 /* Convert A/(B/C) to (A/B)*C. */
10077 if (flag_unsafe_math_optimizations
10078 && TREE_CODE (arg1) == RDIV_EXPR)
10079 return fold_build2 (MULT_EXPR, type,
10080 fold_build2 (RDIV_EXPR, type, arg0,
10081 TREE_OPERAND (arg1, 0)),
10082 TREE_OPERAND (arg1, 1));
10084 /* Convert C1/(X*C2) into (C1/C2)/X. */
10085 if (flag_unsafe_math_optimizations
10086 && TREE_CODE (arg1) == MULT_EXPR
10087 && TREE_CODE (arg0) == REAL_CST
10088 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10090 tree tem = const_binop (RDIV_EXPR, arg0,
10091 TREE_OPERAND (arg1, 1), 0);
10092 if (tem)
10093 return fold_build2 (RDIV_EXPR, type, tem,
10094 TREE_OPERAND (arg1, 0));
10097 if (flag_unsafe_math_optimizations)
10099 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10100 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10102 /* Optimize sin(x)/cos(x) as tan(x). */
10103 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10104 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10105 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10106 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10107 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10109 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10111 if (tanfn != NULL_TREE)
10112 return build_function_call_expr (tanfn,
10113 TREE_OPERAND (arg0, 1));
10116 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10117 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10118 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10119 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10120 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10121 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10123 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10125 if (tanfn != NULL_TREE)
10127 tree tmp = TREE_OPERAND (arg0, 1);
10128 tmp = build_function_call_expr (tanfn, tmp);
10129 return fold_build2 (RDIV_EXPR, type,
10130 build_real (type, dconst1), tmp);
10134 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10135 NaNs or Infinities. */
10136 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10137 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10138 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10140 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10141 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10143 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10144 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10145 && operand_equal_p (arg00, arg01, 0))
10147 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10149 if (cosfn != NULL_TREE)
10150 return build_function_call_expr (cosfn,
10151 TREE_OPERAND (arg0, 1));
10155 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10156 NaNs or Infinities. */
10157 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10158 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10159 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10161 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10162 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10164 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10165 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10166 && operand_equal_p (arg00, arg01, 0))
10168 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10170 if (cosfn != NULL_TREE)
10172 tree tmp = TREE_OPERAND (arg0, 1);
10173 tmp = build_function_call_expr (cosfn, tmp);
10174 return fold_build2 (RDIV_EXPR, type,
10175 build_real (type, dconst1),
10176 tmp);
10181 /* Optimize pow(x,c)/x as pow(x,c-1). */
10182 if (fcode0 == BUILT_IN_POW
10183 || fcode0 == BUILT_IN_POWF
10184 || fcode0 == BUILT_IN_POWL)
10186 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10187 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10188 if (TREE_CODE (arg01) == REAL_CST
10189 && !TREE_OVERFLOW (arg01)
10190 && operand_equal_p (arg1, arg00, 0))
10192 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10193 REAL_VALUE_TYPE c;
10194 tree arg, arglist;
10196 c = TREE_REAL_CST (arg01);
10197 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10198 arg = build_real (type, c);
10199 arglist = build_tree_list (NULL_TREE, arg);
10200 arglist = tree_cons (NULL_TREE, arg1, arglist);
10201 return build_function_call_expr (powfn, arglist);
10205 /* Optimize x/expN(y) into x*expN(-y). */
10206 if (BUILTIN_EXPONENT_P (fcode1))
10208 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10209 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10210 tree arglist = build_tree_list (NULL_TREE,
10211 fold_convert (type, arg));
10212 arg1 = build_function_call_expr (expfn, arglist);
10213 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10216 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10217 if (fcode1 == BUILT_IN_POW
10218 || fcode1 == BUILT_IN_POWF
10219 || fcode1 == BUILT_IN_POWL)
10221 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10222 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10223 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10224 tree neg11 = fold_convert (type, negate_expr (arg11));
10225 tree arglist = tree_cons(NULL_TREE, arg10,
10226 build_tree_list (NULL_TREE, neg11));
10227 arg1 = build_function_call_expr (powfn, arglist);
10228 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10231 return NULL_TREE;
10233 case TRUNC_DIV_EXPR:
10234 case FLOOR_DIV_EXPR:
10235 /* Simplify A / (B << N) where A and B are positive and B is
10236 a power of 2, to A >> (N + log2(B)). */
10237 if (TREE_CODE (arg1) == LSHIFT_EXPR
10238 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10240 tree sval = TREE_OPERAND (arg1, 0);
10241 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10243 tree sh_cnt = TREE_OPERAND (arg1, 1);
10244 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10246 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10247 sh_cnt, build_int_cst (NULL_TREE, pow2));
10248 return fold_build2 (RSHIFT_EXPR, type,
10249 fold_convert (type, arg0), sh_cnt);
10252 /* Fall thru */
10254 case ROUND_DIV_EXPR:
10255 case CEIL_DIV_EXPR:
10256 case EXACT_DIV_EXPR:
10257 if (integer_onep (arg1))
10258 return non_lvalue (fold_convert (type, arg0));
10259 if (integer_zerop (arg1))
10260 return NULL_TREE;
10261 /* X / -1 is -X. */
10262 if (!TYPE_UNSIGNED (type)
10263 && TREE_CODE (arg1) == INTEGER_CST
10264 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10265 && TREE_INT_CST_HIGH (arg1) == -1)
10266 return fold_convert (type, negate_expr (arg0));
10268 /* Convert -A / -B to A / B when the type is signed and overflow is
10269 undefined. */
10270 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10271 && TREE_CODE (arg0) == NEGATE_EXPR
10272 && negate_expr_p (arg1))
10273 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10274 negate_expr (arg1));
10275 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10276 && TREE_CODE (arg1) == NEGATE_EXPR
10277 && negate_expr_p (arg0))
10278 return fold_build2 (code, type, negate_expr (arg0),
10279 TREE_OPERAND (arg1, 0));
10281 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10282 operation, EXACT_DIV_EXPR.
10284 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10285 At one time others generated faster code, it's not clear if they do
10286 after the last round to changes to the DIV code in expmed.c. */
10287 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10288 && multiple_of_p (type, arg0, arg1))
10289 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10291 if (TREE_CODE (arg1) == INTEGER_CST
10292 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10293 return fold_convert (type, tem);
10295 return NULL_TREE;
10297 case CEIL_MOD_EXPR:
10298 case FLOOR_MOD_EXPR:
10299 case ROUND_MOD_EXPR:
10300 case TRUNC_MOD_EXPR:
10301 /* X % 1 is always zero, but be sure to preserve any side
10302 effects in X. */
10303 if (integer_onep (arg1))
10304 return omit_one_operand (type, integer_zero_node, arg0);
10306 /* X % 0, return X % 0 unchanged so that we can get the
10307 proper warnings and errors. */
10308 if (integer_zerop (arg1))
10309 return NULL_TREE;
10311 /* 0 % X is always zero, but be sure to preserve any side
10312 effects in X. Place this after checking for X == 0. */
10313 if (integer_zerop (arg0))
10314 return omit_one_operand (type, integer_zero_node, arg1);
10316 /* X % -1 is zero. */
10317 if (!TYPE_UNSIGNED (type)
10318 && TREE_CODE (arg1) == INTEGER_CST
10319 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10320 && TREE_INT_CST_HIGH (arg1) == -1)
10321 return omit_one_operand (type, integer_zero_node, arg0);
10323 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10324 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10325 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10326 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10328 tree c = arg1;
10329 /* Also optimize A % (C << N) where C is a power of 2,
10330 to A & ((C << N) - 1). */
10331 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10332 c = TREE_OPERAND (arg1, 0);
10334 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10336 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10337 build_int_cst (TREE_TYPE (arg1), 1));
10338 return fold_build2 (BIT_AND_EXPR, type,
10339 fold_convert (type, arg0),
10340 fold_convert (type, mask));
10344 /* X % -C is the same as X % C. */
10345 if (code == TRUNC_MOD_EXPR
10346 && !TYPE_UNSIGNED (type)
10347 && TREE_CODE (arg1) == INTEGER_CST
10348 && !TREE_OVERFLOW (arg1)
10349 && TREE_INT_CST_HIGH (arg1) < 0
10350 && !flag_trapv
10351 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10352 && !sign_bit_p (arg1, arg1))
10353 return fold_build2 (code, type, fold_convert (type, arg0),
10354 fold_convert (type, negate_expr (arg1)));
10356 /* X % -Y is the same as X % Y. */
10357 if (code == TRUNC_MOD_EXPR
10358 && !TYPE_UNSIGNED (type)
10359 && TREE_CODE (arg1) == NEGATE_EXPR
10360 && !flag_trapv)
10361 return fold_build2 (code, type, fold_convert (type, arg0),
10362 fold_convert (type, TREE_OPERAND (arg1, 0)));
10364 if (TREE_CODE (arg1) == INTEGER_CST
10365 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10366 return fold_convert (type, tem);
10368 return NULL_TREE;
10370 case LROTATE_EXPR:
10371 case RROTATE_EXPR:
10372 if (integer_all_onesp (arg0))
10373 return omit_one_operand (type, arg0, arg1);
10374 goto shift;
10376 case RSHIFT_EXPR:
10377 /* Optimize -1 >> x for arithmetic right shifts. */
10378 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10379 return omit_one_operand (type, arg0, arg1);
10380 /* ... fall through ... */
10382 case LSHIFT_EXPR:
10383 shift:
10384 if (integer_zerop (arg1))
10385 return non_lvalue (fold_convert (type, arg0));
10386 if (integer_zerop (arg0))
10387 return omit_one_operand (type, arg0, arg1);
10389 /* Since negative shift count is not well-defined,
10390 don't try to compute it in the compiler. */
10391 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10392 return NULL_TREE;
10394 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10395 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10396 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10397 && host_integerp (TREE_OPERAND (arg0, 1), false)
10398 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10400 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10401 + TREE_INT_CST_LOW (arg1));
10403 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10404 being well defined. */
10405 if (low >= TYPE_PRECISION (type))
10407 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10408 low = low % TYPE_PRECISION (type);
10409 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10410 return build_int_cst (type, 0);
10411 else
10412 low = TYPE_PRECISION (type) - 1;
10415 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10416 build_int_cst (type, low));
10419 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10420 into x & ((unsigned)-1 >> c) for unsigned types. */
10421 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10422 || (TYPE_UNSIGNED (type)
10423 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10424 && host_integerp (arg1, false)
10425 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10426 && host_integerp (TREE_OPERAND (arg0, 1), false)
10427 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10429 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10430 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10431 tree lshift;
10432 tree arg00;
10434 if (low0 == low1)
10436 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10438 lshift = build_int_cst (type, -1);
10439 lshift = int_const_binop (code, lshift, arg1, 0);
10441 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10445 /* Rewrite an LROTATE_EXPR by a constant into an
10446 RROTATE_EXPR by a new constant. */
10447 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10449 tree tem = build_int_cst (TREE_TYPE (arg1),
10450 GET_MODE_BITSIZE (TYPE_MODE (type)));
10451 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10452 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10455 /* If we have a rotate of a bit operation with the rotate count and
10456 the second operand of the bit operation both constant,
10457 permute the two operations. */
10458 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10459 && (TREE_CODE (arg0) == BIT_AND_EXPR
10460 || TREE_CODE (arg0) == BIT_IOR_EXPR
10461 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10462 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10463 return fold_build2 (TREE_CODE (arg0), type,
10464 fold_build2 (code, type,
10465 TREE_OPERAND (arg0, 0), arg1),
10466 fold_build2 (code, type,
10467 TREE_OPERAND (arg0, 1), arg1));
10469 /* Two consecutive rotates adding up to the width of the mode can
10470 be ignored. */
10471 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10472 && TREE_CODE (arg0) == RROTATE_EXPR
10473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10474 && TREE_INT_CST_HIGH (arg1) == 0
10475 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10476 && ((TREE_INT_CST_LOW (arg1)
10477 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10478 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10479 return TREE_OPERAND (arg0, 0);
10481 return NULL_TREE;
10483 case MIN_EXPR:
10484 if (operand_equal_p (arg0, arg1, 0))
10485 return omit_one_operand (type, arg0, arg1);
10486 if (INTEGRAL_TYPE_P (type)
10487 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10488 return omit_one_operand (type, arg1, arg0);
10489 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10490 if (tem)
10491 return tem;
10492 goto associate;
10494 case MAX_EXPR:
10495 if (operand_equal_p (arg0, arg1, 0))
10496 return omit_one_operand (type, arg0, arg1);
10497 if (INTEGRAL_TYPE_P (type)
10498 && TYPE_MAX_VALUE (type)
10499 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10500 return omit_one_operand (type, arg1, arg0);
10501 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10502 if (tem)
10503 return tem;
10504 goto associate;
10506 case TRUTH_ANDIF_EXPR:
10507 /* Note that the operands of this must be ints
10508 and their values must be 0 or 1.
10509 ("true" is a fixed value perhaps depending on the language.) */
10510 /* If first arg is constant zero, return it. */
10511 if (integer_zerop (arg0))
10512 return fold_convert (type, arg0);
10513 case TRUTH_AND_EXPR:
10514 /* If either arg is constant true, drop it. */
10515 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10516 return non_lvalue (fold_convert (type, arg1));
10517 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10518 /* Preserve sequence points. */
10519 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10520 return non_lvalue (fold_convert (type, arg0));
10521 /* If second arg is constant zero, result is zero, but first arg
10522 must be evaluated. */
10523 if (integer_zerop (arg1))
10524 return omit_one_operand (type, arg1, arg0);
10525 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10526 case will be handled here. */
10527 if (integer_zerop (arg0))
10528 return omit_one_operand (type, arg0, arg1);
10530 /* !X && X is always false. */
10531 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10532 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10533 return omit_one_operand (type, integer_zero_node, arg1);
10534 /* X && !X is always false. */
10535 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10536 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10537 return omit_one_operand (type, integer_zero_node, arg0);
10539 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10540 means A >= Y && A != MAX, but in this case we know that
10541 A < X <= MAX. */
10543 if (!TREE_SIDE_EFFECTS (arg0)
10544 && !TREE_SIDE_EFFECTS (arg1))
10546 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10547 if (tem && !operand_equal_p (tem, arg0, 0))
10548 return fold_build2 (code, type, tem, arg1);
10550 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10551 if (tem && !operand_equal_p (tem, arg1, 0))
10552 return fold_build2 (code, type, arg0, tem);
10555 truth_andor:
10556 /* We only do these simplifications if we are optimizing. */
10557 if (!optimize)
10558 return NULL_TREE;
10560 /* Check for things like (A || B) && (A || C). We can convert this
10561 to A || (B && C). Note that either operator can be any of the four
10562 truth and/or operations and the transformation will still be
10563 valid. Also note that we only care about order for the
10564 ANDIF and ORIF operators. If B contains side effects, this
10565 might change the truth-value of A. */
10566 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10567 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10568 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10569 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10570 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10571 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10573 tree a00 = TREE_OPERAND (arg0, 0);
10574 tree a01 = TREE_OPERAND (arg0, 1);
10575 tree a10 = TREE_OPERAND (arg1, 0);
10576 tree a11 = TREE_OPERAND (arg1, 1);
10577 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10578 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10579 && (code == TRUTH_AND_EXPR
10580 || code == TRUTH_OR_EXPR));
10582 if (operand_equal_p (a00, a10, 0))
10583 return fold_build2 (TREE_CODE (arg0), type, a00,
10584 fold_build2 (code, type, a01, a11));
10585 else if (commutative && operand_equal_p (a00, a11, 0))
10586 return fold_build2 (TREE_CODE (arg0), type, a00,
10587 fold_build2 (code, type, a01, a10));
10588 else if (commutative && operand_equal_p (a01, a10, 0))
10589 return fold_build2 (TREE_CODE (arg0), type, a01,
10590 fold_build2 (code, type, a00, a11));
10592 /* This case if tricky because we must either have commutative
10593 operators or else A10 must not have side-effects. */
10595 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10596 && operand_equal_p (a01, a11, 0))
10597 return fold_build2 (TREE_CODE (arg0), type,
10598 fold_build2 (code, type, a00, a10),
10599 a01);
10602 /* See if we can build a range comparison. */
10603 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10604 return tem;
10606 /* Check for the possibility of merging component references. If our
10607 lhs is another similar operation, try to merge its rhs with our
10608 rhs. Then try to merge our lhs and rhs. */
10609 if (TREE_CODE (arg0) == code
10610 && 0 != (tem = fold_truthop (code, type,
10611 TREE_OPERAND (arg0, 1), arg1)))
10612 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10614 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10615 return tem;
10617 return NULL_TREE;
10619 case TRUTH_ORIF_EXPR:
10620 /* Note that the operands of this must be ints
10621 and their values must be 0 or true.
10622 ("true" is a fixed value perhaps depending on the language.) */
10623 /* If first arg is constant true, return it. */
10624 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10625 return fold_convert (type, arg0);
10626 case TRUTH_OR_EXPR:
10627 /* If either arg is constant zero, drop it. */
10628 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10629 return non_lvalue (fold_convert (type, arg1));
10630 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10631 /* Preserve sequence points. */
10632 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10633 return non_lvalue (fold_convert (type, arg0));
10634 /* If second arg is constant true, result is true, but we must
10635 evaluate first arg. */
10636 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10637 return omit_one_operand (type, arg1, arg0);
10638 /* Likewise for first arg, but note this only occurs here for
10639 TRUTH_OR_EXPR. */
10640 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10641 return omit_one_operand (type, arg0, arg1);
10643 /* !X || X is always true. */
10644 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10645 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10646 return omit_one_operand (type, integer_one_node, arg1);
10647 /* X || !X is always true. */
10648 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10649 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10650 return omit_one_operand (type, integer_one_node, arg0);
10652 goto truth_andor;
10654 case TRUTH_XOR_EXPR:
10655 /* If the second arg is constant zero, drop it. */
10656 if (integer_zerop (arg1))
10657 return non_lvalue (fold_convert (type, arg0));
10658 /* If the second arg is constant true, this is a logical inversion. */
10659 if (integer_onep (arg1))
10661 /* Only call invert_truthvalue if operand is a truth value. */
10662 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10663 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10664 else
10665 tem = invert_truthvalue (arg0);
10666 return non_lvalue (fold_convert (type, tem));
10668 /* Identical arguments cancel to zero. */
10669 if (operand_equal_p (arg0, arg1, 0))
10670 return omit_one_operand (type, integer_zero_node, arg0);
10672 /* !X ^ X is always true. */
10673 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10674 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10675 return omit_one_operand (type, integer_one_node, arg1);
10677 /* X ^ !X is always true. */
10678 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10680 return omit_one_operand (type, integer_one_node, arg0);
10682 return NULL_TREE;
10684 case EQ_EXPR:
10685 case NE_EXPR:
10686 tem = fold_comparison (code, type, op0, op1);
10687 if (tem != NULL_TREE)
10688 return tem;
10690 /* bool_var != 0 becomes bool_var. */
10691 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10692 && code == NE_EXPR)
10693 return non_lvalue (fold_convert (type, arg0));
10695 /* bool_var == 1 becomes bool_var. */
10696 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10697 && code == EQ_EXPR)
10698 return non_lvalue (fold_convert (type, arg0));
10700 /* bool_var != 1 becomes !bool_var. */
10701 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10702 && code == NE_EXPR)
10703 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10705 /* bool_var == 0 becomes !bool_var. */
10706 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10707 && code == EQ_EXPR)
10708 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10710 /* If this is an equality comparison of the address of a non-weak
10711 object against zero, then we know the result. */
10712 if (TREE_CODE (arg0) == ADDR_EXPR
10713 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10714 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10715 && integer_zerop (arg1))
10716 return constant_boolean_node (code != EQ_EXPR, type);
10718 /* If this is an equality comparison of the address of two non-weak,
10719 unaliased symbols neither of which are extern (since we do not
10720 have access to attributes for externs), then we know the result. */
10721 if (TREE_CODE (arg0) == ADDR_EXPR
10722 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10723 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10724 && ! lookup_attribute ("alias",
10725 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10726 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10727 && TREE_CODE (arg1) == ADDR_EXPR
10728 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10729 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10730 && ! lookup_attribute ("alias",
10731 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10732 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10734 /* We know that we're looking at the address of two
10735 non-weak, unaliased, static _DECL nodes.
10737 It is both wasteful and incorrect to call operand_equal_p
10738 to compare the two ADDR_EXPR nodes. It is wasteful in that
10739 all we need to do is test pointer equality for the arguments
10740 to the two ADDR_EXPR nodes. It is incorrect to use
10741 operand_equal_p as that function is NOT equivalent to a
10742 C equality test. It can in fact return false for two
10743 objects which would test as equal using the C equality
10744 operator. */
10745 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10746 return constant_boolean_node (equal
10747 ? code == EQ_EXPR : code != EQ_EXPR,
10748 type);
10751 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10752 a MINUS_EXPR of a constant, we can convert it into a comparison with
10753 a revised constant as long as no overflow occurs. */
10754 if (TREE_CODE (arg1) == INTEGER_CST
10755 && (TREE_CODE (arg0) == PLUS_EXPR
10756 || TREE_CODE (arg0) == MINUS_EXPR)
10757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10758 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10759 ? MINUS_EXPR : PLUS_EXPR,
10760 fold_convert (TREE_TYPE (arg0), arg1),
10761 TREE_OPERAND (arg0, 1), 0))
10762 && !TREE_OVERFLOW (tem))
10763 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10765 /* Similarly for a NEGATE_EXPR. */
10766 if (TREE_CODE (arg0) == NEGATE_EXPR
10767 && TREE_CODE (arg1) == INTEGER_CST
10768 && 0 != (tem = negate_expr (arg1))
10769 && TREE_CODE (tem) == INTEGER_CST
10770 && !TREE_OVERFLOW (tem))
10771 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10773 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10774 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10775 && TREE_CODE (arg1) == INTEGER_CST
10776 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10777 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10778 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10779 fold_convert (TREE_TYPE (arg0), arg1),
10780 TREE_OPERAND (arg0, 1)));
10782 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10783 for !=. Don't do this for ordered comparisons due to overflow. */
10784 if (TREE_CODE (arg0) == MINUS_EXPR
10785 && integer_zerop (arg1))
10786 return fold_build2 (code, type,
10787 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10789 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10790 if (TREE_CODE (arg0) == ABS_EXPR
10791 && (integer_zerop (arg1) || real_zerop (arg1)))
10792 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10794 /* If this is an EQ or NE comparison with zero and ARG0 is
10795 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10796 two operations, but the latter can be done in one less insn
10797 on machines that have only two-operand insns or on which a
10798 constant cannot be the first operand. */
10799 if (TREE_CODE (arg0) == BIT_AND_EXPR
10800 && integer_zerop (arg1))
10802 tree arg00 = TREE_OPERAND (arg0, 0);
10803 tree arg01 = TREE_OPERAND (arg0, 1);
10804 if (TREE_CODE (arg00) == LSHIFT_EXPR
10805 && integer_onep (TREE_OPERAND (arg00, 0)))
10806 return
10807 fold_build2 (code, type,
10808 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10809 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10810 arg01, TREE_OPERAND (arg00, 1)),
10811 fold_convert (TREE_TYPE (arg0),
10812 integer_one_node)),
10813 arg1);
10814 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10815 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10816 return
10817 fold_build2 (code, type,
10818 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10819 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10820 arg00, TREE_OPERAND (arg01, 1)),
10821 fold_convert (TREE_TYPE (arg0),
10822 integer_one_node)),
10823 arg1);
10826 /* If this is an NE or EQ comparison of zero against the result of a
10827 signed MOD operation whose second operand is a power of 2, make
10828 the MOD operation unsigned since it is simpler and equivalent. */
10829 if (integer_zerop (arg1)
10830 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10831 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10832 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10833 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10834 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10835 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10837 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10838 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10839 fold_convert (newtype,
10840 TREE_OPERAND (arg0, 0)),
10841 fold_convert (newtype,
10842 TREE_OPERAND (arg0, 1)));
10844 return fold_build2 (code, type, newmod,
10845 fold_convert (newtype, arg1));
10848 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10849 C1 is a valid shift constant, and C2 is a power of two, i.e.
10850 a single bit. */
10851 if (TREE_CODE (arg0) == BIT_AND_EXPR
10852 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10853 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10854 == INTEGER_CST
10855 && integer_pow2p (TREE_OPERAND (arg0, 1))
10856 && integer_zerop (arg1))
10858 tree itype = TREE_TYPE (arg0);
10859 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10860 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10862 /* Check for a valid shift count. */
10863 if (TREE_INT_CST_HIGH (arg001) == 0
10864 && TREE_INT_CST_LOW (arg001) < prec)
10866 tree arg01 = TREE_OPERAND (arg0, 1);
10867 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10868 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10869 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10870 can be rewritten as (X & (C2 << C1)) != 0. */
10871 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10873 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10874 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10875 return fold_build2 (code, type, tem, arg1);
10877 /* Otherwise, for signed (arithmetic) shifts,
10878 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10879 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10880 else if (!TYPE_UNSIGNED (itype))
10881 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10882 arg000, build_int_cst (itype, 0));
10883 /* Otherwise, of unsigned (logical) shifts,
10884 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10885 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10886 else
10887 return omit_one_operand (type,
10888 code == EQ_EXPR ? integer_one_node
10889 : integer_zero_node,
10890 arg000);
10894 /* If this is an NE comparison of zero with an AND of one, remove the
10895 comparison since the AND will give the correct value. */
10896 if (code == NE_EXPR
10897 && integer_zerop (arg1)
10898 && TREE_CODE (arg0) == BIT_AND_EXPR
10899 && integer_onep (TREE_OPERAND (arg0, 1)))
10900 return fold_convert (type, arg0);
10902 /* If we have (A & C) == C where C is a power of 2, convert this into
10903 (A & C) != 0. Similarly for NE_EXPR. */
10904 if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 && integer_pow2p (TREE_OPERAND (arg0, 1))
10906 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10907 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10908 arg0, fold_convert (TREE_TYPE (arg0),
10909 integer_zero_node));
10911 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10912 bit, then fold the expression into A < 0 or A >= 0. */
10913 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10914 if (tem)
10915 return tem;
10917 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10918 Similarly for NE_EXPR. */
10919 if (TREE_CODE (arg0) == BIT_AND_EXPR
10920 && TREE_CODE (arg1) == INTEGER_CST
10921 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10923 tree notc = fold_build1 (BIT_NOT_EXPR,
10924 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10925 TREE_OPERAND (arg0, 1));
10926 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10927 arg1, notc);
10928 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10929 if (integer_nonzerop (dandnotc))
10930 return omit_one_operand (type, rslt, arg0);
10933 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10934 Similarly for NE_EXPR. */
10935 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10936 && TREE_CODE (arg1) == INTEGER_CST
10937 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10939 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10940 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10941 TREE_OPERAND (arg0, 1), notd);
10942 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10943 if (integer_nonzerop (candnotd))
10944 return omit_one_operand (type, rslt, arg0);
10947 /* If this is a comparison of a field, we may be able to simplify it. */
10948 if ((TREE_CODE (arg0) == COMPONENT_REF
10949 || TREE_CODE (arg0) == BIT_FIELD_REF)
10950 /* Handle the constant case even without -O
10951 to make sure the warnings are given. */
10952 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10954 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10955 if (t1)
10956 return t1;
10959 /* Optimize comparisons of strlen vs zero to a compare of the
10960 first character of the string vs zero. To wit,
10961 strlen(ptr) == 0 => *ptr == 0
10962 strlen(ptr) != 0 => *ptr != 0
10963 Other cases should reduce to one of these two (or a constant)
10964 due to the return value of strlen being unsigned. */
10965 if (TREE_CODE (arg0) == CALL_EXPR
10966 && integer_zerop (arg1))
10968 tree fndecl = get_callee_fndecl (arg0);
10969 tree arglist;
10971 if (fndecl
10972 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10973 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10974 && (arglist = TREE_OPERAND (arg0, 1))
10975 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10976 && ! TREE_CHAIN (arglist))
10978 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10979 return fold_build2 (code, type, iref,
10980 build_int_cst (TREE_TYPE (iref), 0));
10984 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10985 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10986 if (TREE_CODE (arg0) == RSHIFT_EXPR
10987 && integer_zerop (arg1)
10988 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10990 tree arg00 = TREE_OPERAND (arg0, 0);
10991 tree arg01 = TREE_OPERAND (arg0, 1);
10992 tree itype = TREE_TYPE (arg00);
10993 if (TREE_INT_CST_HIGH (arg01) == 0
10994 && TREE_INT_CST_LOW (arg01)
10995 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10997 if (TYPE_UNSIGNED (itype))
10999 itype = lang_hooks.types.signed_type (itype);
11000 arg00 = fold_convert (itype, arg00);
11002 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11003 type, arg00, build_int_cst (itype, 0));
11007 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11008 if (integer_zerop (arg1)
11009 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11010 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11011 TREE_OPERAND (arg0, 1));
11013 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11014 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11015 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11016 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11017 build_int_cst (TREE_TYPE (arg1), 0));
11018 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11019 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11021 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11022 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11023 build_int_cst (TREE_TYPE (arg1), 0));
11025 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11026 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11027 && TREE_CODE (arg1) == INTEGER_CST
11028 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11029 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11030 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11031 TREE_OPERAND (arg0, 1), arg1));
11033 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11034 (X & C) == 0 when C is a single bit. */
11035 if (TREE_CODE (arg0) == BIT_AND_EXPR
11036 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11037 && integer_zerop (arg1)
11038 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11040 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11041 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11042 TREE_OPERAND (arg0, 1));
11043 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11044 type, tem, arg1);
11047 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11048 constant C is a power of two, i.e. a single bit. */
11049 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11050 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11051 && integer_zerop (arg1)
11052 && integer_pow2p (TREE_OPERAND (arg0, 1))
11053 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11054 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11056 tree arg00 = TREE_OPERAND (arg0, 0);
11057 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11058 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11061 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11062 when is C is a power of two, i.e. a single bit. */
11063 if (TREE_CODE (arg0) == BIT_AND_EXPR
11064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11065 && integer_zerop (arg1)
11066 && integer_pow2p (TREE_OPERAND (arg0, 1))
11067 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11068 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11070 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11071 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11072 arg000, TREE_OPERAND (arg0, 1));
11073 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11074 tem, build_int_cst (TREE_TYPE (tem), 0));
11077 if (integer_zerop (arg1)
11078 && tree_expr_nonzero_p (arg0))
11080 tree res = constant_boolean_node (code==NE_EXPR, type);
11081 return omit_one_operand (type, res, arg0);
11084 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11085 if (TREE_CODE (arg0) == NEGATE_EXPR
11086 && TREE_CODE (arg1) == NEGATE_EXPR)
11087 return fold_build2 (code, type,
11088 TREE_OPERAND (arg0, 0),
11089 TREE_OPERAND (arg1, 0));
11091 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11092 if (TREE_CODE (arg0) == BIT_AND_EXPR
11093 && TREE_CODE (arg1) == BIT_AND_EXPR)
11095 tree arg00 = TREE_OPERAND (arg0, 0);
11096 tree arg01 = TREE_OPERAND (arg0, 1);
11097 tree arg10 = TREE_OPERAND (arg1, 0);
11098 tree arg11 = TREE_OPERAND (arg1, 1);
11099 tree itype = TREE_TYPE (arg0);
11101 if (operand_equal_p (arg01, arg11, 0))
11102 return fold_build2 (code, type,
11103 fold_build2 (BIT_AND_EXPR, itype,
11104 fold_build2 (BIT_XOR_EXPR, itype,
11105 arg00, arg10),
11106 arg01),
11107 build_int_cst (itype, 0));
11109 if (operand_equal_p (arg01, arg10, 0))
11110 return fold_build2 (code, type,
11111 fold_build2 (BIT_AND_EXPR, itype,
11112 fold_build2 (BIT_XOR_EXPR, itype,
11113 arg00, arg11),
11114 arg01),
11115 build_int_cst (itype, 0));
11117 if (operand_equal_p (arg00, arg11, 0))
11118 return fold_build2 (code, type,
11119 fold_build2 (BIT_AND_EXPR, itype,
11120 fold_build2 (BIT_XOR_EXPR, itype,
11121 arg01, arg10),
11122 arg00),
11123 build_int_cst (itype, 0));
11125 if (operand_equal_p (arg00, arg10, 0))
11126 return fold_build2 (code, type,
11127 fold_build2 (BIT_AND_EXPR, itype,
11128 fold_build2 (BIT_XOR_EXPR, itype,
11129 arg01, arg11),
11130 arg00),
11131 build_int_cst (itype, 0));
11134 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11135 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11137 tree arg00 = TREE_OPERAND (arg0, 0);
11138 tree arg01 = TREE_OPERAND (arg0, 1);
11139 tree arg10 = TREE_OPERAND (arg1, 0);
11140 tree arg11 = TREE_OPERAND (arg1, 1);
11141 tree itype = TREE_TYPE (arg0);
11143 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11144 operand_equal_p guarantees no side-effects so we don't need
11145 to use omit_one_operand on Z. */
11146 if (operand_equal_p (arg01, arg11, 0))
11147 return fold_build2 (code, type, arg00, arg10);
11148 if (operand_equal_p (arg01, arg10, 0))
11149 return fold_build2 (code, type, arg00, arg11);
11150 if (operand_equal_p (arg00, arg11, 0))
11151 return fold_build2 (code, type, arg01, arg10);
11152 if (operand_equal_p (arg00, arg10, 0))
11153 return fold_build2 (code, type, arg01, arg11);
11155 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11156 if (TREE_CODE (arg01) == INTEGER_CST
11157 && TREE_CODE (arg11) == INTEGER_CST)
11158 return fold_build2 (code, type,
11159 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11160 fold_build2 (BIT_XOR_EXPR, itype,
11161 arg01, arg11)),
11162 arg10);
11164 return NULL_TREE;
11166 case LT_EXPR:
11167 case GT_EXPR:
11168 case LE_EXPR:
11169 case GE_EXPR:
11170 tem = fold_comparison (code, type, op0, op1);
11171 if (tem != NULL_TREE)
11172 return tem;
11174 /* Transform comparisons of the form X +- C CMP X. */
11175 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11176 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11177 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11178 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11179 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11180 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11181 && !(flag_wrapv || flag_trapv))))
11183 tree arg01 = TREE_OPERAND (arg0, 1);
11184 enum tree_code code0 = TREE_CODE (arg0);
11185 int is_positive;
11187 if (TREE_CODE (arg01) == REAL_CST)
11188 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11189 else
11190 is_positive = tree_int_cst_sgn (arg01);
11192 /* (X - c) > X becomes false. */
11193 if (code == GT_EXPR
11194 && ((code0 == MINUS_EXPR && is_positive >= 0)
11195 || (code0 == PLUS_EXPR && is_positive <= 0)))
11196 return constant_boolean_node (0, type);
11198 /* Likewise (X + c) < X becomes false. */
11199 if (code == LT_EXPR
11200 && ((code0 == PLUS_EXPR && is_positive >= 0)
11201 || (code0 == MINUS_EXPR && is_positive <= 0)))
11202 return constant_boolean_node (0, type);
11204 /* Convert (X - c) <= X to true. */
11205 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11206 && code == LE_EXPR
11207 && ((code0 == MINUS_EXPR && is_positive >= 0)
11208 || (code0 == PLUS_EXPR && is_positive <= 0)))
11209 return constant_boolean_node (1, type);
11211 /* Convert (X + c) >= X to true. */
11212 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11213 && code == GE_EXPR
11214 && ((code0 == PLUS_EXPR && is_positive >= 0)
11215 || (code0 == MINUS_EXPR && is_positive <= 0)))
11216 return constant_boolean_node (1, type);
11218 if (TREE_CODE (arg01) == INTEGER_CST)
11220 /* Convert X + c > X and X - c < X to true for integers. */
11221 if (code == GT_EXPR
11222 && ((code0 == PLUS_EXPR && is_positive > 0)
11223 || (code0 == MINUS_EXPR && is_positive < 0)))
11224 return constant_boolean_node (1, type);
11226 if (code == LT_EXPR
11227 && ((code0 == MINUS_EXPR && is_positive > 0)
11228 || (code0 == PLUS_EXPR && is_positive < 0)))
11229 return constant_boolean_node (1, type);
11231 /* Convert X + c <= X and X - c >= X to false for integers. */
11232 if (code == LE_EXPR
11233 && ((code0 == PLUS_EXPR && is_positive > 0)
11234 || (code0 == MINUS_EXPR && is_positive < 0)))
11235 return constant_boolean_node (0, type);
11237 if (code == GE_EXPR
11238 && ((code0 == MINUS_EXPR && is_positive > 0)
11239 || (code0 == PLUS_EXPR && is_positive < 0)))
11240 return constant_boolean_node (0, type);
11244 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11245 This transformation affects the cases which are handled in later
11246 optimizations involving comparisons with non-negative constants. */
11247 if (TREE_CODE (arg1) == INTEGER_CST
11248 && TREE_CODE (arg0) != INTEGER_CST
11249 && tree_int_cst_sgn (arg1) > 0)
11251 if (code == GE_EXPR)
11253 arg1 = const_binop (MINUS_EXPR, arg1,
11254 build_int_cst (TREE_TYPE (arg1), 1), 0);
11255 return fold_build2 (GT_EXPR, type, arg0,
11256 fold_convert (TREE_TYPE (arg0), arg1));
11258 if (code == LT_EXPR)
11260 arg1 = const_binop (MINUS_EXPR, arg1,
11261 build_int_cst (TREE_TYPE (arg1), 1), 0);
11262 return fold_build2 (LE_EXPR, type, arg0,
11263 fold_convert (TREE_TYPE (arg0), arg1));
11267 /* Comparisons with the highest or lowest possible integer of
11268 the specified precision will have known values. */
11270 tree arg1_type = TREE_TYPE (arg1);
11271 unsigned int width = TYPE_PRECISION (arg1_type);
11273 if (TREE_CODE (arg1) == INTEGER_CST
11274 && !TREE_OVERFLOW (arg1)
11275 && width <= 2 * HOST_BITS_PER_WIDE_INT
11276 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11278 HOST_WIDE_INT signed_max_hi;
11279 unsigned HOST_WIDE_INT signed_max_lo;
11280 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11282 if (width <= HOST_BITS_PER_WIDE_INT)
11284 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11285 - 1;
11286 signed_max_hi = 0;
11287 max_hi = 0;
11289 if (TYPE_UNSIGNED (arg1_type))
11291 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11292 min_lo = 0;
11293 min_hi = 0;
11295 else
11297 max_lo = signed_max_lo;
11298 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11299 min_hi = -1;
11302 else
11304 width -= HOST_BITS_PER_WIDE_INT;
11305 signed_max_lo = -1;
11306 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11307 - 1;
11308 max_lo = -1;
11309 min_lo = 0;
11311 if (TYPE_UNSIGNED (arg1_type))
11313 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11314 min_hi = 0;
11316 else
11318 max_hi = signed_max_hi;
11319 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11323 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11324 && TREE_INT_CST_LOW (arg1) == max_lo)
11325 switch (code)
11327 case GT_EXPR:
11328 return omit_one_operand (type, integer_zero_node, arg0);
11330 case GE_EXPR:
11331 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11333 case LE_EXPR:
11334 return omit_one_operand (type, integer_one_node, arg0);
11336 case LT_EXPR:
11337 return fold_build2 (NE_EXPR, type, arg0, arg1);
11339 /* The GE_EXPR and LT_EXPR cases above are not normally
11340 reached because of previous transformations. */
11342 default:
11343 break;
11345 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11346 == max_hi
11347 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11348 switch (code)
11350 case GT_EXPR:
11351 arg1 = const_binop (PLUS_EXPR, arg1,
11352 build_int_cst (TREE_TYPE (arg1), 1), 0);
11353 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11354 case LE_EXPR:
11355 arg1 = const_binop (PLUS_EXPR, arg1,
11356 build_int_cst (TREE_TYPE (arg1), 1), 0);
11357 return fold_build2 (NE_EXPR, type, arg0, arg1);
11358 default:
11359 break;
11361 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11362 == min_hi
11363 && TREE_INT_CST_LOW (arg1) == min_lo)
11364 switch (code)
11366 case LT_EXPR:
11367 return omit_one_operand (type, integer_zero_node, arg0);
11369 case LE_EXPR:
11370 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11372 case GE_EXPR:
11373 return omit_one_operand (type, integer_one_node, arg0);
11375 case GT_EXPR:
11376 return fold_build2 (NE_EXPR, type, op0, op1);
11378 default:
11379 break;
11381 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11382 == min_hi
11383 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11384 switch (code)
11386 case GE_EXPR:
11387 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11388 return fold_build2 (NE_EXPR, type, arg0, arg1);
11389 case LT_EXPR:
11390 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11391 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11392 default:
11393 break;
11396 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11397 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11398 && TYPE_UNSIGNED (arg1_type)
11399 /* We will flip the signedness of the comparison operator
11400 associated with the mode of arg1, so the sign bit is
11401 specified by this mode. Check that arg1 is the signed
11402 max associated with this sign bit. */
11403 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11404 /* signed_type does not work on pointer types. */
11405 && INTEGRAL_TYPE_P (arg1_type))
11407 /* The following case also applies to X < signed_max+1
11408 and X >= signed_max+1 because previous transformations. */
11409 if (code == LE_EXPR || code == GT_EXPR)
11411 tree st0, st1;
11412 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11413 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11414 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11415 type, fold_convert (st0, arg0),
11416 build_int_cst (st1, 0));
11422 /* If we are comparing an ABS_EXPR with a constant, we can
11423 convert all the cases into explicit comparisons, but they may
11424 well not be faster than doing the ABS and one comparison.
11425 But ABS (X) <= C is a range comparison, which becomes a subtraction
11426 and a comparison, and is probably faster. */
11427 if (code == LE_EXPR
11428 && TREE_CODE (arg1) == INTEGER_CST
11429 && TREE_CODE (arg0) == ABS_EXPR
11430 && ! TREE_SIDE_EFFECTS (arg0)
11431 && (0 != (tem = negate_expr (arg1)))
11432 && TREE_CODE (tem) == INTEGER_CST
11433 && !TREE_OVERFLOW (tem))
11434 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11435 build2 (GE_EXPR, type,
11436 TREE_OPERAND (arg0, 0), tem),
11437 build2 (LE_EXPR, type,
11438 TREE_OPERAND (arg0, 0), arg1));
11440 /* Convert ABS_EXPR<x> >= 0 to true. */
11441 if (code == GE_EXPR
11442 && tree_expr_nonnegative_p (arg0)
11443 && (integer_zerop (arg1)
11444 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11445 && real_zerop (arg1))))
11446 return omit_one_operand (type, integer_one_node, arg0);
11448 /* Convert ABS_EXPR<x> < 0 to false. */
11449 if (code == LT_EXPR
11450 && tree_expr_nonnegative_p (arg0)
11451 && (integer_zerop (arg1) || real_zerop (arg1)))
11452 return omit_one_operand (type, integer_zero_node, arg0);
11454 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11455 and similarly for >= into !=. */
11456 if ((code == LT_EXPR || code == GE_EXPR)
11457 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11458 && TREE_CODE (arg1) == LSHIFT_EXPR
11459 && integer_onep (TREE_OPERAND (arg1, 0)))
11460 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11461 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11462 TREE_OPERAND (arg1, 1)),
11463 build_int_cst (TREE_TYPE (arg0), 0));
11465 if ((code == LT_EXPR || code == GE_EXPR)
11466 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11467 && (TREE_CODE (arg1) == NOP_EXPR
11468 || TREE_CODE (arg1) == CONVERT_EXPR)
11469 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11470 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11471 return
11472 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11473 fold_convert (TREE_TYPE (arg0),
11474 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11475 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11476 1))),
11477 build_int_cst (TREE_TYPE (arg0), 0));
11479 return NULL_TREE;
11481 case UNORDERED_EXPR:
11482 case ORDERED_EXPR:
11483 case UNLT_EXPR:
11484 case UNLE_EXPR:
11485 case UNGT_EXPR:
11486 case UNGE_EXPR:
11487 case UNEQ_EXPR:
11488 case LTGT_EXPR:
11489 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11491 t1 = fold_relational_const (code, type, arg0, arg1);
11492 if (t1 != NULL_TREE)
11493 return t1;
11496 /* If the first operand is NaN, the result is constant. */
11497 if (TREE_CODE (arg0) == REAL_CST
11498 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11499 && (code != LTGT_EXPR || ! flag_trapping_math))
11501 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11502 ? integer_zero_node
11503 : integer_one_node;
11504 return omit_one_operand (type, t1, arg1);
11507 /* If the second operand is NaN, the result is constant. */
11508 if (TREE_CODE (arg1) == REAL_CST
11509 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11510 && (code != LTGT_EXPR || ! flag_trapping_math))
11512 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11513 ? integer_zero_node
11514 : integer_one_node;
11515 return omit_one_operand (type, t1, arg0);
11518 /* Simplify unordered comparison of something with itself. */
11519 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11520 && operand_equal_p (arg0, arg1, 0))
11521 return constant_boolean_node (1, type);
11523 if (code == LTGT_EXPR
11524 && !flag_trapping_math
11525 && operand_equal_p (arg0, arg1, 0))
11526 return constant_boolean_node (0, type);
11528 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11530 tree targ0 = strip_float_extensions (arg0);
11531 tree targ1 = strip_float_extensions (arg1);
11532 tree newtype = TREE_TYPE (targ0);
11534 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11535 newtype = TREE_TYPE (targ1);
11537 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11538 return fold_build2 (code, type, fold_convert (newtype, targ0),
11539 fold_convert (newtype, targ1));
11542 return NULL_TREE;
11544 case COMPOUND_EXPR:
11545 /* When pedantic, a compound expression can be neither an lvalue
11546 nor an integer constant expression. */
11547 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11548 return NULL_TREE;
11549 /* Don't let (0, 0) be null pointer constant. */
11550 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11551 : fold_convert (type, arg1);
11552 return pedantic_non_lvalue (tem);
11554 case COMPLEX_EXPR:
11555 if ((TREE_CODE (arg0) == REAL_CST
11556 && TREE_CODE (arg1) == REAL_CST)
11557 || (TREE_CODE (arg0) == INTEGER_CST
11558 && TREE_CODE (arg1) == INTEGER_CST))
11559 return build_complex (type, arg0, arg1);
11560 return NULL_TREE;
11562 case ASSERT_EXPR:
11563 /* An ASSERT_EXPR should never be passed to fold_binary. */
11564 gcc_unreachable ();
11566 default:
11567 return NULL_TREE;
11568 } /* switch (code) */
11571 /* Callback for walk_tree, looking for LABEL_EXPR.
11572 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11573 Do not check the sub-tree of GOTO_EXPR. */
11575 static tree
11576 contains_label_1 (tree *tp,
11577 int *walk_subtrees,
11578 void *data ATTRIBUTE_UNUSED)
11580 switch (TREE_CODE (*tp))
11582 case LABEL_EXPR:
11583 return *tp;
11584 case GOTO_EXPR:
11585 *walk_subtrees = 0;
11586 /* no break */
11587 default:
11588 return NULL_TREE;
11592 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11593 accessible from outside the sub-tree. Returns NULL_TREE if no
11594 addressable label is found. */
11596 static bool
11597 contains_label_p (tree st)
11599 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11602 /* Fold a ternary expression of code CODE and type TYPE with operands
11603 OP0, OP1, and OP2. Return the folded expression if folding is
11604 successful. Otherwise, return NULL_TREE. */
11606 tree
11607 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11609 tree tem;
11610 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11611 enum tree_code_class kind = TREE_CODE_CLASS (code);
11613 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11614 && TREE_CODE_LENGTH (code) == 3);
11616 /* Strip any conversions that don't change the mode. This is safe
11617 for every expression, except for a comparison expression because
11618 its signedness is derived from its operands. So, in the latter
11619 case, only strip conversions that don't change the signedness.
11621 Note that this is done as an internal manipulation within the
11622 constant folder, in order to find the simplest representation of
11623 the arguments so that their form can be studied. In any cases,
11624 the appropriate type conversions should be put back in the tree
11625 that will get out of the constant folder. */
11626 if (op0)
11628 arg0 = op0;
11629 STRIP_NOPS (arg0);
11632 if (op1)
11634 arg1 = op1;
11635 STRIP_NOPS (arg1);
11638 switch (code)
11640 case COMPONENT_REF:
11641 if (TREE_CODE (arg0) == CONSTRUCTOR
11642 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11644 unsigned HOST_WIDE_INT idx;
11645 tree field, value;
11646 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11647 if (field == arg1)
11648 return value;
11650 return NULL_TREE;
11652 case COND_EXPR:
11653 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11654 so all simple results must be passed through pedantic_non_lvalue. */
11655 if (TREE_CODE (arg0) == INTEGER_CST)
11657 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11658 tem = integer_zerop (arg0) ? op2 : op1;
11659 /* Only optimize constant conditions when the selected branch
11660 has the same type as the COND_EXPR. This avoids optimizing
11661 away "c ? x : throw", where the throw has a void type.
11662 Avoid throwing away that operand which contains label. */
11663 if ((!TREE_SIDE_EFFECTS (unused_op)
11664 || !contains_label_p (unused_op))
11665 && (! VOID_TYPE_P (TREE_TYPE (tem))
11666 || VOID_TYPE_P (type)))
11667 return pedantic_non_lvalue (tem);
11668 return NULL_TREE;
11670 if (operand_equal_p (arg1, op2, 0))
11671 return pedantic_omit_one_operand (type, arg1, arg0);
11673 /* If we have A op B ? A : C, we may be able to convert this to a
11674 simpler expression, depending on the operation and the values
11675 of B and C. Signed zeros prevent all of these transformations,
11676 for reasons given above each one.
11678 Also try swapping the arguments and inverting the conditional. */
11679 if (COMPARISON_CLASS_P (arg0)
11680 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11681 arg1, TREE_OPERAND (arg0, 1))
11682 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11684 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11685 if (tem)
11686 return tem;
11689 if (COMPARISON_CLASS_P (arg0)
11690 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11691 op2,
11692 TREE_OPERAND (arg0, 1))
11693 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11695 tem = fold_truth_not_expr (arg0);
11696 if (tem && COMPARISON_CLASS_P (tem))
11698 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11699 if (tem)
11700 return tem;
11704 /* If the second operand is simpler than the third, swap them
11705 since that produces better jump optimization results. */
11706 if (truth_value_p (TREE_CODE (arg0))
11707 && tree_swap_operands_p (op1, op2, false))
11709 /* See if this can be inverted. If it can't, possibly because
11710 it was a floating-point inequality comparison, don't do
11711 anything. */
11712 tem = fold_truth_not_expr (arg0);
11713 if (tem)
11714 return fold_build3 (code, type, tem, op2, op1);
11717 /* Convert A ? 1 : 0 to simply A. */
11718 if (integer_onep (op1)
11719 && integer_zerop (op2)
11720 /* If we try to convert OP0 to our type, the
11721 call to fold will try to move the conversion inside
11722 a COND, which will recurse. In that case, the COND_EXPR
11723 is probably the best choice, so leave it alone. */
11724 && type == TREE_TYPE (arg0))
11725 return pedantic_non_lvalue (arg0);
11727 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11728 over COND_EXPR in cases such as floating point comparisons. */
11729 if (integer_zerop (op1)
11730 && integer_onep (op2)
11731 && truth_value_p (TREE_CODE (arg0)))
11732 return pedantic_non_lvalue (fold_convert (type,
11733 invert_truthvalue (arg0)));
11735 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11736 if (TREE_CODE (arg0) == LT_EXPR
11737 && integer_zerop (TREE_OPERAND (arg0, 1))
11738 && integer_zerop (op2)
11739 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11741 /* sign_bit_p only checks ARG1 bits within A's precision.
11742 If <sign bit of A> has wider type than A, bits outside
11743 of A's precision in <sign bit of A> need to be checked.
11744 If they are all 0, this optimization needs to be done
11745 in unsigned A's type, if they are all 1 in signed A's type,
11746 otherwise this can't be done. */
11747 if (TYPE_PRECISION (TREE_TYPE (tem))
11748 < TYPE_PRECISION (TREE_TYPE (arg1))
11749 && TYPE_PRECISION (TREE_TYPE (tem))
11750 < TYPE_PRECISION (type))
11752 unsigned HOST_WIDE_INT mask_lo;
11753 HOST_WIDE_INT mask_hi;
11754 int inner_width, outer_width;
11755 tree tem_type;
11757 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11758 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11759 if (outer_width > TYPE_PRECISION (type))
11760 outer_width = TYPE_PRECISION (type);
11762 if (outer_width > HOST_BITS_PER_WIDE_INT)
11764 mask_hi = ((unsigned HOST_WIDE_INT) -1
11765 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11766 mask_lo = -1;
11768 else
11770 mask_hi = 0;
11771 mask_lo = ((unsigned HOST_WIDE_INT) -1
11772 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11774 if (inner_width > HOST_BITS_PER_WIDE_INT)
11776 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11777 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11778 mask_lo = 0;
11780 else
11781 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11782 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11784 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11785 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11787 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11788 tem = fold_convert (tem_type, tem);
11790 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11791 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11793 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11794 tem = fold_convert (tem_type, tem);
11796 else
11797 tem = NULL;
11800 if (tem)
11801 return fold_convert (type,
11802 fold_build2 (BIT_AND_EXPR,
11803 TREE_TYPE (tem), tem,
11804 fold_convert (TREE_TYPE (tem),
11805 arg1)));
11808 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11809 already handled above. */
11810 if (TREE_CODE (arg0) == BIT_AND_EXPR
11811 && integer_onep (TREE_OPERAND (arg0, 1))
11812 && integer_zerop (op2)
11813 && integer_pow2p (arg1))
11815 tree tem = TREE_OPERAND (arg0, 0);
11816 STRIP_NOPS (tem);
11817 if (TREE_CODE (tem) == RSHIFT_EXPR
11818 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11819 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11820 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11821 return fold_build2 (BIT_AND_EXPR, type,
11822 TREE_OPERAND (tem, 0), arg1);
11825 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11826 is probably obsolete because the first operand should be a
11827 truth value (that's why we have the two cases above), but let's
11828 leave it in until we can confirm this for all front-ends. */
11829 if (integer_zerop (op2)
11830 && TREE_CODE (arg0) == NE_EXPR
11831 && integer_zerop (TREE_OPERAND (arg0, 1))
11832 && integer_pow2p (arg1)
11833 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11834 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11835 arg1, OEP_ONLY_CONST))
11836 return pedantic_non_lvalue (fold_convert (type,
11837 TREE_OPERAND (arg0, 0)));
11839 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11840 if (integer_zerop (op2)
11841 && truth_value_p (TREE_CODE (arg0))
11842 && truth_value_p (TREE_CODE (arg1)))
11843 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11844 fold_convert (type, arg0),
11845 arg1);
11847 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11848 if (integer_onep (op2)
11849 && truth_value_p (TREE_CODE (arg0))
11850 && truth_value_p (TREE_CODE (arg1)))
11852 /* Only perform transformation if ARG0 is easily inverted. */
11853 tem = fold_truth_not_expr (arg0);
11854 if (tem)
11855 return fold_build2 (TRUTH_ORIF_EXPR, type,
11856 fold_convert (type, tem),
11857 arg1);
11860 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11861 if (integer_zerop (arg1)
11862 && truth_value_p (TREE_CODE (arg0))
11863 && truth_value_p (TREE_CODE (op2)))
11865 /* Only perform transformation if ARG0 is easily inverted. */
11866 tem = fold_truth_not_expr (arg0);
11867 if (tem)
11868 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11869 fold_convert (type, tem),
11870 op2);
11873 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11874 if (integer_onep (arg1)
11875 && truth_value_p (TREE_CODE (arg0))
11876 && truth_value_p (TREE_CODE (op2)))
11877 return fold_build2 (TRUTH_ORIF_EXPR, type,
11878 fold_convert (type, arg0),
11879 op2);
11881 return NULL_TREE;
11883 case CALL_EXPR:
11884 /* Check for a built-in function. */
11885 if (TREE_CODE (op0) == ADDR_EXPR
11886 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11887 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11888 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11889 return NULL_TREE;
11891 case BIT_FIELD_REF:
11892 if (TREE_CODE (arg0) == VECTOR_CST
11893 && type == TREE_TYPE (TREE_TYPE (arg0))
11894 && host_integerp (arg1, 1)
11895 && host_integerp (op2, 1))
11897 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11898 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11900 if (width != 0
11901 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11902 && (idx % width) == 0
11903 && (idx = idx / width)
11904 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11906 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11907 while (idx-- > 0 && elements)
11908 elements = TREE_CHAIN (elements);
11909 if (elements)
11910 return TREE_VALUE (elements);
11911 else
11912 return fold_convert (type, integer_zero_node);
11915 return NULL_TREE;
11917 default:
11918 return NULL_TREE;
11919 } /* switch (code) */
11922 /* Perform constant folding and related simplification of EXPR.
11923 The related simplifications include x*1 => x, x*0 => 0, etc.,
11924 and application of the associative law.
11925 NOP_EXPR conversions may be removed freely (as long as we
11926 are careful not to change the type of the overall expression).
11927 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11928 but we can constant-fold them if they have constant operands. */
11930 #ifdef ENABLE_FOLD_CHECKING
11931 # define fold(x) fold_1 (x)
11932 static tree fold_1 (tree);
11933 static
11934 #endif
11935 tree
11936 fold (tree expr)
11938 const tree t = expr;
11939 enum tree_code code = TREE_CODE (t);
11940 enum tree_code_class kind = TREE_CODE_CLASS (code);
11941 tree tem;
11943 /* Return right away if a constant. */
11944 if (kind == tcc_constant)
11945 return t;
11947 if (IS_EXPR_CODE_CLASS (kind)
11948 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11950 tree type = TREE_TYPE (t);
11951 tree op0, op1, op2;
11953 switch (TREE_CODE_LENGTH (code))
11955 case 1:
11956 op0 = TREE_OPERAND (t, 0);
11957 tem = fold_unary (code, type, op0);
11958 return tem ? tem : expr;
11959 case 2:
11960 op0 = TREE_OPERAND (t, 0);
11961 op1 = TREE_OPERAND (t, 1);
11962 tem = fold_binary (code, type, op0, op1);
11963 return tem ? tem : expr;
11964 case 3:
11965 op0 = TREE_OPERAND (t, 0);
11966 op1 = TREE_OPERAND (t, 1);
11967 op2 = TREE_OPERAND (t, 2);
11968 tem = fold_ternary (code, type, op0, op1, op2);
11969 return tem ? tem : expr;
11970 default:
11971 break;
11975 switch (code)
11977 case CONST_DECL:
11978 return fold (DECL_INITIAL (t));
11980 default:
11981 return t;
11982 } /* switch (code) */
11985 #ifdef ENABLE_FOLD_CHECKING
11986 #undef fold
11988 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11989 static void fold_check_failed (tree, tree);
11990 void print_fold_checksum (tree);
11992 /* When --enable-checking=fold, compute a digest of expr before
11993 and after actual fold call to see if fold did not accidentally
11994 change original expr. */
11996 tree
11997 fold (tree expr)
11999 tree ret;
12000 struct md5_ctx ctx;
12001 unsigned char checksum_before[16], checksum_after[16];
12002 htab_t ht;
12004 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12005 md5_init_ctx (&ctx);
12006 fold_checksum_tree (expr, &ctx, ht);
12007 md5_finish_ctx (&ctx, checksum_before);
12008 htab_empty (ht);
12010 ret = fold_1 (expr);
12012 md5_init_ctx (&ctx);
12013 fold_checksum_tree (expr, &ctx, ht);
12014 md5_finish_ctx (&ctx, checksum_after);
12015 htab_delete (ht);
12017 if (memcmp (checksum_before, checksum_after, 16))
12018 fold_check_failed (expr, ret);
12020 return ret;
12023 void
12024 print_fold_checksum (tree expr)
12026 struct md5_ctx ctx;
12027 unsigned char checksum[16], cnt;
12028 htab_t ht;
12030 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12031 md5_init_ctx (&ctx);
12032 fold_checksum_tree (expr, &ctx, ht);
12033 md5_finish_ctx (&ctx, checksum);
12034 htab_delete (ht);
12035 for (cnt = 0; cnt < 16; ++cnt)
12036 fprintf (stderr, "%02x", checksum[cnt]);
12037 putc ('\n', stderr);
12040 static void
12041 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12043 internal_error ("fold check: original tree changed by fold");
12046 static void
12047 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12049 void **slot;
12050 enum tree_code code;
12051 struct tree_function_decl buf;
12052 int i, len;
12054 recursive_label:
12056 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12057 <= sizeof (struct tree_function_decl))
12058 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12059 if (expr == NULL)
12060 return;
12061 slot = htab_find_slot (ht, expr, INSERT);
12062 if (*slot != NULL)
12063 return;
12064 *slot = expr;
12065 code = TREE_CODE (expr);
12066 if (TREE_CODE_CLASS (code) == tcc_declaration
12067 && DECL_ASSEMBLER_NAME_SET_P (expr))
12069 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12070 memcpy ((char *) &buf, expr, tree_size (expr));
12071 expr = (tree) &buf;
12072 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12074 else if (TREE_CODE_CLASS (code) == tcc_type
12075 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12076 || TYPE_CACHED_VALUES_P (expr)
12077 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12079 /* Allow these fields to be modified. */
12080 memcpy ((char *) &buf, expr, tree_size (expr));
12081 expr = (tree) &buf;
12082 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12083 TYPE_POINTER_TO (expr) = NULL;
12084 TYPE_REFERENCE_TO (expr) = NULL;
12085 if (TYPE_CACHED_VALUES_P (expr))
12087 TYPE_CACHED_VALUES_P (expr) = 0;
12088 TYPE_CACHED_VALUES (expr) = NULL;
12091 md5_process_bytes (expr, tree_size (expr), ctx);
12092 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12093 if (TREE_CODE_CLASS (code) != tcc_type
12094 && TREE_CODE_CLASS (code) != tcc_declaration
12095 && code != TREE_LIST)
12096 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12097 switch (TREE_CODE_CLASS (code))
12099 case tcc_constant:
12100 switch (code)
12102 case STRING_CST:
12103 md5_process_bytes (TREE_STRING_POINTER (expr),
12104 TREE_STRING_LENGTH (expr), ctx);
12105 break;
12106 case COMPLEX_CST:
12107 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12108 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12109 break;
12110 case VECTOR_CST:
12111 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12112 break;
12113 default:
12114 break;
12116 break;
12117 case tcc_exceptional:
12118 switch (code)
12120 case TREE_LIST:
12121 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12122 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12123 expr = TREE_CHAIN (expr);
12124 goto recursive_label;
12125 break;
12126 case TREE_VEC:
12127 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12128 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12129 break;
12130 default:
12131 break;
12133 break;
12134 case tcc_expression:
12135 case tcc_reference:
12136 case tcc_comparison:
12137 case tcc_unary:
12138 case tcc_binary:
12139 case tcc_statement:
12140 len = TREE_CODE_LENGTH (code);
12141 for (i = 0; i < len; ++i)
12142 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12143 break;
12144 case tcc_declaration:
12145 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12146 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12147 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12149 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12150 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12151 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12152 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12153 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12155 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12156 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12158 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12160 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12161 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12162 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12164 break;
12165 case tcc_type:
12166 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12167 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12168 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12169 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12170 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12171 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12172 if (INTEGRAL_TYPE_P (expr)
12173 || SCALAR_FLOAT_TYPE_P (expr))
12175 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12176 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12178 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12179 if (TREE_CODE (expr) == RECORD_TYPE
12180 || TREE_CODE (expr) == UNION_TYPE
12181 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12182 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12183 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12184 break;
12185 default:
12186 break;
12190 #endif
12192 /* Fold a unary tree expression with code CODE of type TYPE with an
12193 operand OP0. Return a folded expression if successful. Otherwise,
12194 return a tree expression with code CODE of type TYPE with an
12195 operand OP0. */
12197 tree
12198 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12200 tree tem;
12201 #ifdef ENABLE_FOLD_CHECKING
12202 unsigned char checksum_before[16], checksum_after[16];
12203 struct md5_ctx ctx;
12204 htab_t ht;
12206 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12207 md5_init_ctx (&ctx);
12208 fold_checksum_tree (op0, &ctx, ht);
12209 md5_finish_ctx (&ctx, checksum_before);
12210 htab_empty (ht);
12211 #endif
12213 tem = fold_unary (code, type, op0);
12214 if (!tem)
12215 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12217 #ifdef ENABLE_FOLD_CHECKING
12218 md5_init_ctx (&ctx);
12219 fold_checksum_tree (op0, &ctx, ht);
12220 md5_finish_ctx (&ctx, checksum_after);
12221 htab_delete (ht);
12223 if (memcmp (checksum_before, checksum_after, 16))
12224 fold_check_failed (op0, tem);
12225 #endif
12226 return tem;
12229 /* Fold a binary tree expression with code CODE of type TYPE with
12230 operands OP0 and OP1. Return a folded expression if successful.
12231 Otherwise, return a tree expression with code CODE of type TYPE
12232 with operands OP0 and OP1. */
12234 tree
12235 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12236 MEM_STAT_DECL)
12238 tree tem;
12239 #ifdef ENABLE_FOLD_CHECKING
12240 unsigned char checksum_before_op0[16],
12241 checksum_before_op1[16],
12242 checksum_after_op0[16],
12243 checksum_after_op1[16];
12244 struct md5_ctx ctx;
12245 htab_t ht;
12247 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12248 md5_init_ctx (&ctx);
12249 fold_checksum_tree (op0, &ctx, ht);
12250 md5_finish_ctx (&ctx, checksum_before_op0);
12251 htab_empty (ht);
12253 md5_init_ctx (&ctx);
12254 fold_checksum_tree (op1, &ctx, ht);
12255 md5_finish_ctx (&ctx, checksum_before_op1);
12256 htab_empty (ht);
12257 #endif
12259 tem = fold_binary (code, type, op0, op1);
12260 if (!tem)
12261 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12263 #ifdef ENABLE_FOLD_CHECKING
12264 md5_init_ctx (&ctx);
12265 fold_checksum_tree (op0, &ctx, ht);
12266 md5_finish_ctx (&ctx, checksum_after_op0);
12267 htab_empty (ht);
12269 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12270 fold_check_failed (op0, tem);
12272 md5_init_ctx (&ctx);
12273 fold_checksum_tree (op1, &ctx, ht);
12274 md5_finish_ctx (&ctx, checksum_after_op1);
12275 htab_delete (ht);
12277 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12278 fold_check_failed (op1, tem);
12279 #endif
12280 return tem;
12283 /* Fold a ternary tree expression with code CODE of type TYPE with
12284 operands OP0, OP1, and OP2. Return a folded expression if
12285 successful. Otherwise, return a tree expression with code CODE of
12286 type TYPE with operands OP0, OP1, and OP2. */
12288 tree
12289 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12290 MEM_STAT_DECL)
12292 tree tem;
12293 #ifdef ENABLE_FOLD_CHECKING
12294 unsigned char checksum_before_op0[16],
12295 checksum_before_op1[16],
12296 checksum_before_op2[16],
12297 checksum_after_op0[16],
12298 checksum_after_op1[16],
12299 checksum_after_op2[16];
12300 struct md5_ctx ctx;
12301 htab_t ht;
12303 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12304 md5_init_ctx (&ctx);
12305 fold_checksum_tree (op0, &ctx, ht);
12306 md5_finish_ctx (&ctx, checksum_before_op0);
12307 htab_empty (ht);
12309 md5_init_ctx (&ctx);
12310 fold_checksum_tree (op1, &ctx, ht);
12311 md5_finish_ctx (&ctx, checksum_before_op1);
12312 htab_empty (ht);
12314 md5_init_ctx (&ctx);
12315 fold_checksum_tree (op2, &ctx, ht);
12316 md5_finish_ctx (&ctx, checksum_before_op2);
12317 htab_empty (ht);
12318 #endif
12320 tem = fold_ternary (code, type, op0, op1, op2);
12321 if (!tem)
12322 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12324 #ifdef ENABLE_FOLD_CHECKING
12325 md5_init_ctx (&ctx);
12326 fold_checksum_tree (op0, &ctx, ht);
12327 md5_finish_ctx (&ctx, checksum_after_op0);
12328 htab_empty (ht);
12330 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12331 fold_check_failed (op0, tem);
12333 md5_init_ctx (&ctx);
12334 fold_checksum_tree (op1, &ctx, ht);
12335 md5_finish_ctx (&ctx, checksum_after_op1);
12336 htab_empty (ht);
12338 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12339 fold_check_failed (op1, tem);
12341 md5_init_ctx (&ctx);
12342 fold_checksum_tree (op2, &ctx, ht);
12343 md5_finish_ctx (&ctx, checksum_after_op2);
12344 htab_delete (ht);
12346 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12347 fold_check_failed (op2, tem);
12348 #endif
12349 return tem;
12352 /* Perform constant folding and related simplification of initializer
12353 expression EXPR. These behave identically to "fold_buildN" but ignore
12354 potential run-time traps and exceptions that fold must preserve. */
12356 #define START_FOLD_INIT \
12357 int saved_signaling_nans = flag_signaling_nans;\
12358 int saved_trapping_math = flag_trapping_math;\
12359 int saved_rounding_math = flag_rounding_math;\
12360 int saved_trapv = flag_trapv;\
12361 int saved_folding_initializer = folding_initializer;\
12362 flag_signaling_nans = 0;\
12363 flag_trapping_math = 0;\
12364 flag_rounding_math = 0;\
12365 flag_trapv = 0;\
12366 folding_initializer = 1;
12368 #define END_FOLD_INIT \
12369 flag_signaling_nans = saved_signaling_nans;\
12370 flag_trapping_math = saved_trapping_math;\
12371 flag_rounding_math = saved_rounding_math;\
12372 flag_trapv = saved_trapv;\
12373 folding_initializer = saved_folding_initializer;
12375 tree
12376 fold_build1_initializer (enum tree_code code, tree type, tree op)
12378 tree result;
12379 START_FOLD_INIT;
12381 result = fold_build1 (code, type, op);
12383 END_FOLD_INIT;
12384 return result;
12387 tree
12388 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12390 tree result;
12391 START_FOLD_INIT;
12393 result = fold_build2 (code, type, op0, op1);
12395 END_FOLD_INIT;
12396 return result;
12399 tree
12400 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12401 tree op2)
12403 tree result;
12404 START_FOLD_INIT;
12406 result = fold_build3 (code, type, op0, op1, op2);
12408 END_FOLD_INIT;
12409 return result;
12412 #undef START_FOLD_INIT
12413 #undef END_FOLD_INIT
12415 /* Determine if first argument is a multiple of second argument. Return 0 if
12416 it is not, or we cannot easily determined it to be.
12418 An example of the sort of thing we care about (at this point; this routine
12419 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12420 fold cases do now) is discovering that
12422 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12424 is a multiple of
12426 SAVE_EXPR (J * 8)
12428 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12430 This code also handles discovering that
12432 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12434 is a multiple of 8 so we don't have to worry about dealing with a
12435 possible remainder.
12437 Note that we *look* inside a SAVE_EXPR only to determine how it was
12438 calculated; it is not safe for fold to do much of anything else with the
12439 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12440 at run time. For example, the latter example above *cannot* be implemented
12441 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12442 evaluation time of the original SAVE_EXPR is not necessarily the same at
12443 the time the new expression is evaluated. The only optimization of this
12444 sort that would be valid is changing
12446 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12448 divided by 8 to
12450 SAVE_EXPR (I) * SAVE_EXPR (J)
12452 (where the same SAVE_EXPR (J) is used in the original and the
12453 transformed version). */
12455 static int
12456 multiple_of_p (tree type, tree top, tree bottom)
12458 if (operand_equal_p (top, bottom, 0))
12459 return 1;
12461 if (TREE_CODE (type) != INTEGER_TYPE)
12462 return 0;
12464 switch (TREE_CODE (top))
12466 case BIT_AND_EXPR:
12467 /* Bitwise and provides a power of two multiple. If the mask is
12468 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12469 if (!integer_pow2p (bottom))
12470 return 0;
12471 /* FALLTHRU */
12473 case MULT_EXPR:
12474 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12475 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12477 case PLUS_EXPR:
12478 case MINUS_EXPR:
12479 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12480 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12482 case LSHIFT_EXPR:
12483 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12485 tree op1, t1;
12487 op1 = TREE_OPERAND (top, 1);
12488 /* const_binop may not detect overflow correctly,
12489 so check for it explicitly here. */
12490 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12491 > TREE_INT_CST_LOW (op1)
12492 && TREE_INT_CST_HIGH (op1) == 0
12493 && 0 != (t1 = fold_convert (type,
12494 const_binop (LSHIFT_EXPR,
12495 size_one_node,
12496 op1, 0)))
12497 && !TREE_OVERFLOW (t1))
12498 return multiple_of_p (type, t1, bottom);
12500 return 0;
12502 case NOP_EXPR:
12503 /* Can't handle conversions from non-integral or wider integral type. */
12504 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12505 || (TYPE_PRECISION (type)
12506 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12507 return 0;
12509 /* .. fall through ... */
12511 case SAVE_EXPR:
12512 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12514 case INTEGER_CST:
12515 if (TREE_CODE (bottom) != INTEGER_CST
12516 || (TYPE_UNSIGNED (type)
12517 && (tree_int_cst_sgn (top) < 0
12518 || tree_int_cst_sgn (bottom) < 0)))
12519 return 0;
12520 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12521 top, bottom, 0));
12523 default:
12524 return 0;
12528 /* Return true if `t' is known to be non-negative. */
12530 bool
12531 tree_expr_nonnegative_p (tree t)
12533 if (t == error_mark_node)
12534 return false;
12536 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12537 return true;
12539 switch (TREE_CODE (t))
12541 case SSA_NAME:
12542 /* Query VRP to see if it has recorded any information about
12543 the range of this object. */
12544 return ssa_name_nonnegative_p (t);
12546 case ABS_EXPR:
12547 /* We can't return 1 if flag_wrapv is set because
12548 ABS_EXPR<INT_MIN> = INT_MIN. */
12549 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12550 return true;
12551 break;
12553 case INTEGER_CST:
12554 return tree_int_cst_sgn (t) >= 0;
12556 case REAL_CST:
12557 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12559 case PLUS_EXPR:
12560 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12561 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12562 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12564 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12565 both unsigned and at least 2 bits shorter than the result. */
12566 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12567 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12568 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12570 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12571 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12572 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12573 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12575 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12576 TYPE_PRECISION (inner2)) + 1;
12577 return prec < TYPE_PRECISION (TREE_TYPE (t));
12580 break;
12582 case MULT_EXPR:
12583 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12585 /* x * x for floating point x is always non-negative. */
12586 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12587 return true;
12588 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12589 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12592 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12593 both unsigned and their total bits is shorter than the result. */
12594 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12595 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12596 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12598 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12599 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12600 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12601 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12602 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12603 < TYPE_PRECISION (TREE_TYPE (t));
12605 return false;
12607 case BIT_AND_EXPR:
12608 case MAX_EXPR:
12609 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12610 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12612 case BIT_IOR_EXPR:
12613 case BIT_XOR_EXPR:
12614 case MIN_EXPR:
12615 case RDIV_EXPR:
12616 case TRUNC_DIV_EXPR:
12617 case CEIL_DIV_EXPR:
12618 case FLOOR_DIV_EXPR:
12619 case ROUND_DIV_EXPR:
12620 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12621 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12623 case TRUNC_MOD_EXPR:
12624 case CEIL_MOD_EXPR:
12625 case FLOOR_MOD_EXPR:
12626 case ROUND_MOD_EXPR:
12627 case SAVE_EXPR:
12628 case NON_LVALUE_EXPR:
12629 case FLOAT_EXPR:
12630 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12632 case COMPOUND_EXPR:
12633 case MODIFY_EXPR:
12634 case GIMPLE_MODIFY_STMT:
12635 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12637 case BIND_EXPR:
12638 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12640 case COND_EXPR:
12641 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12642 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12644 case NOP_EXPR:
12646 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12647 tree outer_type = TREE_TYPE (t);
12649 if (TREE_CODE (outer_type) == REAL_TYPE)
12651 if (TREE_CODE (inner_type) == REAL_TYPE)
12652 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12653 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12655 if (TYPE_UNSIGNED (inner_type))
12656 return true;
12657 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12660 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12662 if (TREE_CODE (inner_type) == REAL_TYPE)
12663 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12664 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12665 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12666 && TYPE_UNSIGNED (inner_type);
12669 break;
12671 case TARGET_EXPR:
12673 tree temp = TARGET_EXPR_SLOT (t);
12674 t = TARGET_EXPR_INITIAL (t);
12676 /* If the initializer is non-void, then it's a normal expression
12677 that will be assigned to the slot. */
12678 if (!VOID_TYPE_P (t))
12679 return tree_expr_nonnegative_p (t);
12681 /* Otherwise, the initializer sets the slot in some way. One common
12682 way is an assignment statement at the end of the initializer. */
12683 while (1)
12685 if (TREE_CODE (t) == BIND_EXPR)
12686 t = expr_last (BIND_EXPR_BODY (t));
12687 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12688 || TREE_CODE (t) == TRY_CATCH_EXPR)
12689 t = expr_last (TREE_OPERAND (t, 0));
12690 else if (TREE_CODE (t) == STATEMENT_LIST)
12691 t = expr_last (t);
12692 else
12693 break;
12695 if ((TREE_CODE (t) == MODIFY_EXPR
12696 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12697 && GENERIC_TREE_OPERAND (t, 0) == temp)
12698 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12700 return false;
12703 case CALL_EXPR:
12705 tree fndecl = get_callee_fndecl (t);
12706 tree arglist = TREE_OPERAND (t, 1);
12707 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12708 switch (DECL_FUNCTION_CODE (fndecl))
12710 CASE_FLT_FN (BUILT_IN_ACOS):
12711 CASE_FLT_FN (BUILT_IN_ACOSH):
12712 CASE_FLT_FN (BUILT_IN_CABS):
12713 CASE_FLT_FN (BUILT_IN_COSH):
12714 CASE_FLT_FN (BUILT_IN_ERFC):
12715 CASE_FLT_FN (BUILT_IN_EXP):
12716 CASE_FLT_FN (BUILT_IN_EXP10):
12717 CASE_FLT_FN (BUILT_IN_EXP2):
12718 CASE_FLT_FN (BUILT_IN_FABS):
12719 CASE_FLT_FN (BUILT_IN_FDIM):
12720 CASE_FLT_FN (BUILT_IN_HYPOT):
12721 CASE_FLT_FN (BUILT_IN_POW10):
12722 CASE_INT_FN (BUILT_IN_FFS):
12723 CASE_INT_FN (BUILT_IN_PARITY):
12724 CASE_INT_FN (BUILT_IN_POPCOUNT):
12725 case BUILT_IN_BSWAP32:
12726 case BUILT_IN_BSWAP64:
12727 /* Always true. */
12728 return true;
12730 CASE_FLT_FN (BUILT_IN_SQRT):
12731 /* sqrt(-0.0) is -0.0. */
12732 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12733 return true;
12734 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12736 CASE_FLT_FN (BUILT_IN_ASINH):
12737 CASE_FLT_FN (BUILT_IN_ATAN):
12738 CASE_FLT_FN (BUILT_IN_ATANH):
12739 CASE_FLT_FN (BUILT_IN_CBRT):
12740 CASE_FLT_FN (BUILT_IN_CEIL):
12741 CASE_FLT_FN (BUILT_IN_ERF):
12742 CASE_FLT_FN (BUILT_IN_EXPM1):
12743 CASE_FLT_FN (BUILT_IN_FLOOR):
12744 CASE_FLT_FN (BUILT_IN_FMOD):
12745 CASE_FLT_FN (BUILT_IN_FREXP):
12746 CASE_FLT_FN (BUILT_IN_LCEIL):
12747 CASE_FLT_FN (BUILT_IN_LDEXP):
12748 CASE_FLT_FN (BUILT_IN_LFLOOR):
12749 CASE_FLT_FN (BUILT_IN_LLCEIL):
12750 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12751 CASE_FLT_FN (BUILT_IN_LLRINT):
12752 CASE_FLT_FN (BUILT_IN_LLROUND):
12753 CASE_FLT_FN (BUILT_IN_LRINT):
12754 CASE_FLT_FN (BUILT_IN_LROUND):
12755 CASE_FLT_FN (BUILT_IN_MODF):
12756 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12757 CASE_FLT_FN (BUILT_IN_RINT):
12758 CASE_FLT_FN (BUILT_IN_ROUND):
12759 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12760 CASE_FLT_FN (BUILT_IN_SINH):
12761 CASE_FLT_FN (BUILT_IN_TANH):
12762 CASE_FLT_FN (BUILT_IN_TRUNC):
12763 /* True if the 1st argument is nonnegative. */
12764 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12766 CASE_FLT_FN (BUILT_IN_FMAX):
12767 /* True if the 1st OR 2nd arguments are nonnegative. */
12768 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12769 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12771 CASE_FLT_FN (BUILT_IN_FMIN):
12772 /* True if the 1st AND 2nd arguments are nonnegative. */
12773 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12774 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12776 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12777 /* True if the 2nd argument is nonnegative. */
12778 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12780 CASE_FLT_FN (BUILT_IN_POWI):
12781 /* True if the 1st argument is nonnegative or the second
12782 argument is an even integer. */
12783 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12785 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12786 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12787 return true;
12789 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12791 CASE_FLT_FN (BUILT_IN_POW):
12792 /* True if the 1st argument is nonnegative or the second
12793 argument is an even integer valued real. */
12794 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12796 REAL_VALUE_TYPE c;
12797 HOST_WIDE_INT n;
12799 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12800 n = real_to_integer (&c);
12801 if ((n & 1) == 0)
12803 REAL_VALUE_TYPE cint;
12804 real_from_integer (&cint, VOIDmode, n,
12805 n < 0 ? -1 : 0, 0);
12806 if (real_identical (&c, &cint))
12807 return true;
12810 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12812 default:
12813 break;
12817 /* ... fall through ... */
12819 default:
12820 if (truth_value_p (TREE_CODE (t)))
12821 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12822 return true;
12825 /* We don't know sign of `t', so be conservative and return false. */
12826 return false;
12829 /* Return true when T is an address and is known to be nonzero.
12830 For floating point we further ensure that T is not denormal.
12831 Similar logic is present in nonzero_address in rtlanal.h. */
12833 bool
12834 tree_expr_nonzero_p (tree t)
12836 tree type = TREE_TYPE (t);
12838 /* Doing something useful for floating point would need more work. */
12839 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12840 return false;
12842 switch (TREE_CODE (t))
12844 case SSA_NAME:
12845 /* Query VRP to see if it has recorded any information about
12846 the range of this object. */
12847 return ssa_name_nonzero_p (t);
12849 case ABS_EXPR:
12850 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12852 case INTEGER_CST:
12853 return !integer_zerop (t);
12855 case PLUS_EXPR:
12856 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12858 /* With the presence of negative values it is hard
12859 to say something. */
12860 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12861 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12862 return false;
12863 /* One of operands must be positive and the other non-negative. */
12864 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12865 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12867 break;
12869 case MULT_EXPR:
12870 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12872 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12873 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12875 break;
12877 case NOP_EXPR:
12879 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12880 tree outer_type = TREE_TYPE (t);
12882 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12883 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12885 break;
12887 case ADDR_EXPR:
12889 tree base = get_base_address (TREE_OPERAND (t, 0));
12891 if (!base)
12892 return false;
12894 /* Weak declarations may link to NULL. */
12895 if (VAR_OR_FUNCTION_DECL_P (base))
12896 return !DECL_WEAK (base);
12898 /* Constants are never weak. */
12899 if (CONSTANT_CLASS_P (base))
12900 return true;
12902 return false;
12905 case COND_EXPR:
12906 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12907 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12909 case MIN_EXPR:
12910 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12911 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12913 case MAX_EXPR:
12914 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12916 /* When both operands are nonzero, then MAX must be too. */
12917 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12918 return true;
12920 /* MAX where operand 0 is positive is positive. */
12921 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12923 /* MAX where operand 1 is positive is positive. */
12924 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12925 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12926 return true;
12927 break;
12929 case COMPOUND_EXPR:
12930 case MODIFY_EXPR:
12931 case GIMPLE_MODIFY_STMT:
12932 case BIND_EXPR:
12933 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12935 case SAVE_EXPR:
12936 case NON_LVALUE_EXPR:
12937 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12939 case BIT_IOR_EXPR:
12940 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12941 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12943 case CALL_EXPR:
12944 return alloca_call_p (t);
12946 default:
12947 break;
12949 return false;
12952 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12953 attempt to fold the expression to a constant without modifying TYPE,
12954 OP0 or OP1.
12956 If the expression could be simplified to a constant, then return
12957 the constant. If the expression would not be simplified to a
12958 constant, then return NULL_TREE. */
12960 tree
12961 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12963 tree tem = fold_binary (code, type, op0, op1);
12964 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12967 /* Given the components of a unary expression CODE, TYPE and OP0,
12968 attempt to fold the expression to a constant without modifying
12969 TYPE or OP0.
12971 If the expression could be simplified to a constant, then return
12972 the constant. If the expression would not be simplified to a
12973 constant, then return NULL_TREE. */
12975 tree
12976 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12978 tree tem = fold_unary (code, type, op0);
12979 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12982 /* If EXP represents referencing an element in a constant string
12983 (either via pointer arithmetic or array indexing), return the
12984 tree representing the value accessed, otherwise return NULL. */
12986 tree
12987 fold_read_from_constant_string (tree exp)
12989 if ((TREE_CODE (exp) == INDIRECT_REF
12990 || TREE_CODE (exp) == ARRAY_REF)
12991 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12993 tree exp1 = TREE_OPERAND (exp, 0);
12994 tree index;
12995 tree string;
12997 if (TREE_CODE (exp) == INDIRECT_REF)
12998 string = string_constant (exp1, &index);
12999 else
13001 tree low_bound = array_ref_low_bound (exp);
13002 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13004 /* Optimize the special-case of a zero lower bound.
13006 We convert the low_bound to sizetype to avoid some problems
13007 with constant folding. (E.g. suppose the lower bound is 1,
13008 and its mode is QI. Without the conversion,l (ARRAY
13009 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13010 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13011 if (! integer_zerop (low_bound))
13012 index = size_diffop (index, fold_convert (sizetype, low_bound));
13014 string = exp1;
13017 if (string
13018 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13019 && TREE_CODE (string) == STRING_CST
13020 && TREE_CODE (index) == INTEGER_CST
13021 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13022 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13023 == MODE_INT)
13024 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13025 return fold_convert (TREE_TYPE (exp),
13026 build_int_cst (NULL_TREE,
13027 (TREE_STRING_POINTER (string)
13028 [TREE_INT_CST_LOW (index)])));
13030 return NULL;
13033 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13034 an integer constant or real constant.
13036 TYPE is the type of the result. */
13038 static tree
13039 fold_negate_const (tree arg0, tree type)
13041 tree t = NULL_TREE;
13043 switch (TREE_CODE (arg0))
13045 case INTEGER_CST:
13047 unsigned HOST_WIDE_INT low;
13048 HOST_WIDE_INT high;
13049 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13050 TREE_INT_CST_HIGH (arg0),
13051 &low, &high);
13052 t = force_fit_type_double (type, low, high, 1,
13053 (overflow | TREE_OVERFLOW (arg0))
13054 && !TYPE_UNSIGNED (type));
13055 break;
13058 case REAL_CST:
13059 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13060 break;
13062 default:
13063 gcc_unreachable ();
13066 return t;
13069 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13070 an integer constant or real constant.
13072 TYPE is the type of the result. */
13074 tree
13075 fold_abs_const (tree arg0, tree type)
13077 tree t = NULL_TREE;
13079 switch (TREE_CODE (arg0))
13081 case INTEGER_CST:
13082 /* If the value is unsigned, then the absolute value is
13083 the same as the ordinary value. */
13084 if (TYPE_UNSIGNED (type))
13085 t = arg0;
13086 /* Similarly, if the value is non-negative. */
13087 else if (INT_CST_LT (integer_minus_one_node, arg0))
13088 t = arg0;
13089 /* If the value is negative, then the absolute value is
13090 its negation. */
13091 else
13093 unsigned HOST_WIDE_INT low;
13094 HOST_WIDE_INT high;
13095 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13096 TREE_INT_CST_HIGH (arg0),
13097 &low, &high);
13098 t = force_fit_type_double (type, low, high, -1,
13099 overflow | TREE_OVERFLOW (arg0));
13101 break;
13103 case REAL_CST:
13104 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13105 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13106 else
13107 t = arg0;
13108 break;
13110 default:
13111 gcc_unreachable ();
13114 return t;
13117 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13118 constant. TYPE is the type of the result. */
13120 static tree
13121 fold_not_const (tree arg0, tree type)
13123 tree t = NULL_TREE;
13125 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13127 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13128 ~TREE_INT_CST_HIGH (arg0), 0,
13129 TREE_OVERFLOW (arg0));
13131 return t;
13134 /* Given CODE, a relational operator, the target type, TYPE and two
13135 constant operands OP0 and OP1, return the result of the
13136 relational operation. If the result is not a compile time
13137 constant, then return NULL_TREE. */
13139 static tree
13140 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13142 int result, invert;
13144 /* From here on, the only cases we handle are when the result is
13145 known to be a constant. */
13147 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13149 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13150 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13152 /* Handle the cases where either operand is a NaN. */
13153 if (real_isnan (c0) || real_isnan (c1))
13155 switch (code)
13157 case EQ_EXPR:
13158 case ORDERED_EXPR:
13159 result = 0;
13160 break;
13162 case NE_EXPR:
13163 case UNORDERED_EXPR:
13164 case UNLT_EXPR:
13165 case UNLE_EXPR:
13166 case UNGT_EXPR:
13167 case UNGE_EXPR:
13168 case UNEQ_EXPR:
13169 result = 1;
13170 break;
13172 case LT_EXPR:
13173 case LE_EXPR:
13174 case GT_EXPR:
13175 case GE_EXPR:
13176 case LTGT_EXPR:
13177 if (flag_trapping_math)
13178 return NULL_TREE;
13179 result = 0;
13180 break;
13182 default:
13183 gcc_unreachable ();
13186 return constant_boolean_node (result, type);
13189 return constant_boolean_node (real_compare (code, c0, c1), type);
13192 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13194 To compute GT, swap the arguments and do LT.
13195 To compute GE, do LT and invert the result.
13196 To compute LE, swap the arguments, do LT and invert the result.
13197 To compute NE, do EQ and invert the result.
13199 Therefore, the code below must handle only EQ and LT. */
13201 if (code == LE_EXPR || code == GT_EXPR)
13203 tree tem = op0;
13204 op0 = op1;
13205 op1 = tem;
13206 code = swap_tree_comparison (code);
13209 /* Note that it is safe to invert for real values here because we
13210 have already handled the one case that it matters. */
13212 invert = 0;
13213 if (code == NE_EXPR || code == GE_EXPR)
13215 invert = 1;
13216 code = invert_tree_comparison (code, false);
13219 /* Compute a result for LT or EQ if args permit;
13220 Otherwise return T. */
13221 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13223 if (code == EQ_EXPR)
13224 result = tree_int_cst_equal (op0, op1);
13225 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13226 result = INT_CST_LT_UNSIGNED (op0, op1);
13227 else
13228 result = INT_CST_LT (op0, op1);
13230 else
13231 return NULL_TREE;
13233 if (invert)
13234 result ^= 1;
13235 return constant_boolean_node (result, type);
13238 /* Build an expression for the a clean point containing EXPR with type TYPE.
13239 Don't build a cleanup point expression for EXPR which don't have side
13240 effects. */
13242 tree
13243 fold_build_cleanup_point_expr (tree type, tree expr)
13245 /* If the expression does not have side effects then we don't have to wrap
13246 it with a cleanup point expression. */
13247 if (!TREE_SIDE_EFFECTS (expr))
13248 return expr;
13250 /* If the expression is a return, check to see if the expression inside the
13251 return has no side effects or the right hand side of the modify expression
13252 inside the return. If either don't have side effects set we don't need to
13253 wrap the expression in a cleanup point expression. Note we don't check the
13254 left hand side of the modify because it should always be a return decl. */
13255 if (TREE_CODE (expr) == RETURN_EXPR)
13257 tree op = TREE_OPERAND (expr, 0);
13258 if (!op || !TREE_SIDE_EFFECTS (op))
13259 return expr;
13260 op = TREE_OPERAND (op, 1);
13261 if (!TREE_SIDE_EFFECTS (op))
13262 return expr;
13265 return build1 (CLEANUP_POINT_EXPR, type, expr);
13268 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13269 avoid confusing the gimplify process. */
13271 tree
13272 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13274 /* The size of the object is not relevant when talking about its address. */
13275 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13276 t = TREE_OPERAND (t, 0);
13278 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13279 if (TREE_CODE (t) == INDIRECT_REF
13280 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13282 t = TREE_OPERAND (t, 0);
13283 if (TREE_TYPE (t) != ptrtype)
13284 t = build1 (NOP_EXPR, ptrtype, t);
13286 else
13288 tree base = t;
13290 while (handled_component_p (base))
13291 base = TREE_OPERAND (base, 0);
13292 if (DECL_P (base))
13293 TREE_ADDRESSABLE (base) = 1;
13295 t = build1 (ADDR_EXPR, ptrtype, t);
13298 return t;
13301 tree
13302 build_fold_addr_expr (tree t)
13304 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13307 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13308 of an indirection through OP0, or NULL_TREE if no simplification is
13309 possible. */
13311 tree
13312 fold_indirect_ref_1 (tree type, tree op0)
13314 tree sub = op0;
13315 tree subtype;
13317 STRIP_NOPS (sub);
13318 subtype = TREE_TYPE (sub);
13319 if (!POINTER_TYPE_P (subtype))
13320 return NULL_TREE;
13322 if (TREE_CODE (sub) == ADDR_EXPR)
13324 tree op = TREE_OPERAND (sub, 0);
13325 tree optype = TREE_TYPE (op);
13326 /* *&CONST_DECL -> to the value of the const decl. */
13327 if (TREE_CODE (op) == CONST_DECL)
13328 return DECL_INITIAL (op);
13329 /* *&p => p; make sure to handle *&"str"[cst] here. */
13330 if (type == optype)
13332 tree fop = fold_read_from_constant_string (op);
13333 if (fop)
13334 return fop;
13335 else
13336 return op;
13338 /* *(foo *)&fooarray => fooarray[0] */
13339 else if (TREE_CODE (optype) == ARRAY_TYPE
13340 && type == TREE_TYPE (optype))
13342 tree type_domain = TYPE_DOMAIN (optype);
13343 tree min_val = size_zero_node;
13344 if (type_domain && TYPE_MIN_VALUE (type_domain))
13345 min_val = TYPE_MIN_VALUE (type_domain);
13346 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13348 /* *(foo *)&complexfoo => __real__ complexfoo */
13349 else if (TREE_CODE (optype) == COMPLEX_TYPE
13350 && type == TREE_TYPE (optype))
13351 return fold_build1 (REALPART_EXPR, type, op);
13352 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13353 else if (TREE_CODE (optype) == VECTOR_TYPE
13354 && type == TREE_TYPE (optype))
13356 tree part_width = TYPE_SIZE (type);
13357 tree index = bitsize_int (0);
13358 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13362 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13363 if (TREE_CODE (sub) == PLUS_EXPR
13364 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13366 tree op00 = TREE_OPERAND (sub, 0);
13367 tree op01 = TREE_OPERAND (sub, 1);
13368 tree op00type;
13370 STRIP_NOPS (op00);
13371 op00type = TREE_TYPE (op00);
13372 if (TREE_CODE (op00) == ADDR_EXPR
13373 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13374 && type == TREE_TYPE (TREE_TYPE (op00type)))
13376 tree size = TYPE_SIZE_UNIT (type);
13377 if (tree_int_cst_equal (size, op01))
13378 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13382 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13383 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13384 && type == TREE_TYPE (TREE_TYPE (subtype)))
13386 tree type_domain;
13387 tree min_val = size_zero_node;
13388 sub = build_fold_indirect_ref (sub);
13389 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13390 if (type_domain && TYPE_MIN_VALUE (type_domain))
13391 min_val = TYPE_MIN_VALUE (type_domain);
13392 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13395 return NULL_TREE;
13398 /* Builds an expression for an indirection through T, simplifying some
13399 cases. */
13401 tree
13402 build_fold_indirect_ref (tree t)
13404 tree type = TREE_TYPE (TREE_TYPE (t));
13405 tree sub = fold_indirect_ref_1 (type, t);
13407 if (sub)
13408 return sub;
13409 else
13410 return build1 (INDIRECT_REF, type, t);
13413 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13415 tree
13416 fold_indirect_ref (tree t)
13418 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13420 if (sub)
13421 return sub;
13422 else
13423 return t;
13426 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13427 whose result is ignored. The type of the returned tree need not be
13428 the same as the original expression. */
13430 tree
13431 fold_ignored_result (tree t)
13433 if (!TREE_SIDE_EFFECTS (t))
13434 return integer_zero_node;
13436 for (;;)
13437 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13439 case tcc_unary:
13440 t = TREE_OPERAND (t, 0);
13441 break;
13443 case tcc_binary:
13444 case tcc_comparison:
13445 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13446 t = TREE_OPERAND (t, 0);
13447 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13448 t = TREE_OPERAND (t, 1);
13449 else
13450 return t;
13451 break;
13453 case tcc_expression:
13454 switch (TREE_CODE (t))
13456 case COMPOUND_EXPR:
13457 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13458 return t;
13459 t = TREE_OPERAND (t, 0);
13460 break;
13462 case COND_EXPR:
13463 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13464 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13465 return t;
13466 t = TREE_OPERAND (t, 0);
13467 break;
13469 default:
13470 return t;
13472 break;
13474 default:
13475 return t;
13479 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13480 This can only be applied to objects of a sizetype. */
13482 tree
13483 round_up (tree value, int divisor)
13485 tree div = NULL_TREE;
13487 gcc_assert (divisor > 0);
13488 if (divisor == 1)
13489 return value;
13491 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13492 have to do anything. Only do this when we are not given a const,
13493 because in that case, this check is more expensive than just
13494 doing it. */
13495 if (TREE_CODE (value) != INTEGER_CST)
13497 div = build_int_cst (TREE_TYPE (value), divisor);
13499 if (multiple_of_p (TREE_TYPE (value), value, div))
13500 return value;
13503 /* If divisor is a power of two, simplify this to bit manipulation. */
13504 if (divisor == (divisor & -divisor))
13506 tree t;
13508 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13509 value = size_binop (PLUS_EXPR, value, t);
13510 t = build_int_cst (TREE_TYPE (value), -divisor);
13511 value = size_binop (BIT_AND_EXPR, value, t);
13513 else
13515 if (!div)
13516 div = build_int_cst (TREE_TYPE (value), divisor);
13517 value = size_binop (CEIL_DIV_EXPR, value, div);
13518 value = size_binop (MULT_EXPR, value, div);
13521 return value;
13524 /* Likewise, but round down. */
13526 tree
13527 round_down (tree value, int divisor)
13529 tree div = NULL_TREE;
13531 gcc_assert (divisor > 0);
13532 if (divisor == 1)
13533 return value;
13535 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13536 have to do anything. Only do this when we are not given a const,
13537 because in that case, this check is more expensive than just
13538 doing it. */
13539 if (TREE_CODE (value) != INTEGER_CST)
13541 div = build_int_cst (TREE_TYPE (value), divisor);
13543 if (multiple_of_p (TREE_TYPE (value), value, div))
13544 return value;
13547 /* If divisor is a power of two, simplify this to bit manipulation. */
13548 if (divisor == (divisor & -divisor))
13550 tree t;
13552 t = build_int_cst (TREE_TYPE (value), -divisor);
13553 value = size_binop (BIT_AND_EXPR, value, t);
13555 else
13557 if (!div)
13558 div = build_int_cst (TREE_TYPE (value), divisor);
13559 value = size_binop (FLOOR_DIV_EXPR, value, div);
13560 value = size_binop (MULT_EXPR, value, div);
13563 return value;
13566 /* Returns the pointer to the base of the object addressed by EXP and
13567 extracts the information about the offset of the access, storing it
13568 to PBITPOS and POFFSET. */
13570 static tree
13571 split_address_to_core_and_offset (tree exp,
13572 HOST_WIDE_INT *pbitpos, tree *poffset)
13574 tree core;
13575 enum machine_mode mode;
13576 int unsignedp, volatilep;
13577 HOST_WIDE_INT bitsize;
13579 if (TREE_CODE (exp) == ADDR_EXPR)
13581 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13582 poffset, &mode, &unsignedp, &volatilep,
13583 false);
13584 core = build_fold_addr_expr (core);
13586 else
13588 core = exp;
13589 *pbitpos = 0;
13590 *poffset = NULL_TREE;
13593 return core;
13596 /* Returns true if addresses of E1 and E2 differ by a constant, false
13597 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13599 bool
13600 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13602 tree core1, core2;
13603 HOST_WIDE_INT bitpos1, bitpos2;
13604 tree toffset1, toffset2, tdiff, type;
13606 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13607 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13609 if (bitpos1 % BITS_PER_UNIT != 0
13610 || bitpos2 % BITS_PER_UNIT != 0
13611 || !operand_equal_p (core1, core2, 0))
13612 return false;
13614 if (toffset1 && toffset2)
13616 type = TREE_TYPE (toffset1);
13617 if (type != TREE_TYPE (toffset2))
13618 toffset2 = fold_convert (type, toffset2);
13620 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13621 if (!cst_and_fits_in_hwi (tdiff))
13622 return false;
13624 *diff = int_cst_value (tdiff);
13626 else if (toffset1 || toffset2)
13628 /* If only one of the offsets is non-constant, the difference cannot
13629 be a constant. */
13630 return false;
13632 else
13633 *diff = 0;
13635 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13636 return true;
13639 /* Simplify the floating point expression EXP when the sign of the
13640 result is not significant. Return NULL_TREE if no simplification
13641 is possible. */
13643 tree
13644 fold_strip_sign_ops (tree exp)
13646 tree arg0, arg1;
13648 switch (TREE_CODE (exp))
13650 case ABS_EXPR:
13651 case NEGATE_EXPR:
13652 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13653 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13655 case MULT_EXPR:
13656 case RDIV_EXPR:
13657 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13658 return NULL_TREE;
13659 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13660 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13661 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13662 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13663 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13664 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13665 break;
13667 case COMPOUND_EXPR:
13668 arg0 = TREE_OPERAND (exp, 0);
13669 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13670 if (arg1)
13671 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13672 break;
13674 case COND_EXPR:
13675 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13676 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13677 if (arg0 || arg1)
13678 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13679 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13680 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13681 break;
13683 case CALL_EXPR:
13685 const enum built_in_function fcode = builtin_mathfn_code (exp);
13686 switch (fcode)
13688 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13689 /* Strip copysign function call, return the 1st argument. */
13690 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13691 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13692 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13694 default:
13695 /* Strip sign ops from the argument of "odd" math functions. */
13696 if (negate_mathfn_p (fcode))
13698 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13699 if (arg0)
13700 return build_function_call_expr (get_callee_fndecl (exp),
13701 build_tree_list (NULL_TREE,
13702 arg0));
13704 break;
13707 break;
13709 default:
13710 break;
13712 return NULL_TREE;