Add patch 5/6 for full power7/VSX support
[official-gcc.git] / gcc / fold-const.c
blob803c7a549afbfa4ec6293bc3ec835abfb5ec1b97
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127 tree, tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
142 tree, tree, tree);
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
155 addition.
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec;
210 int sign_extended_type;
212 if (POINTER_TYPE_P (type)
213 || TREE_CODE (type) == OFFSET_TYPE)
214 prec = POINTER_SIZE;
215 else
216 prec = TYPE_PRECISION (type);
218 /* Size types *are* sign extended. */
219 sign_extended_type = (!TYPE_UNSIGNED (type)
220 || (TREE_CODE (type) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (type)));
223 /* First clear all bits that are beyond the type's precision. */
224 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 else
230 h1 = 0;
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
235 /* Then do sign extension if necessary. */
236 if (!sign_extended_type)
237 /* No sign extension */;
238 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
239 /* Correct width already. */;
240 else if (prec > HOST_BITS_PER_WIDE_INT)
242 /* Sign extend top half? */
243 if (h1 & ((unsigned HOST_WIDE_INT)1
244 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
245 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
247 else if (prec == HOST_BITS_PER_WIDE_INT)
249 if ((HOST_WIDE_INT)l1 < 0)
250 h1 = -1;
252 else
254 /* Sign extend bottom half? */
255 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
257 h1 = -1;
258 l1 |= (HOST_WIDE_INT)(-1) << prec;
262 *lv = l1;
263 *hv = h1;
265 /* If the value didn't fit, signal overflow. */
266 return l1 != low0 || h1 != high0;
269 /* We force the double-int HIGH:LOW to the range of the type TYPE by
270 sign or zero extending it.
271 OVERFLOWABLE indicates if we are interested
272 in overflow of the value, when >0 we are only interested in signed
273 overflow, for <0 we are interested in any overflow. OVERFLOWED
274 indicates whether overflow has already occurred. CONST_OVERFLOWED
275 indicates whether constant overflow has already occurred. We force
276 T's value to be within range of T's type (by setting to 0 or 1 all
277 the bits outside the type's range). We set TREE_OVERFLOWED if,
278 OVERFLOWED is nonzero,
279 or OVERFLOWABLE is >0 and signed overflow occurs
280 or OVERFLOWABLE is <0 and any overflow occurs
281 We return a new tree node for the extended double-int. The node
282 is shared if no overflow flags are set. */
284 tree
285 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
286 HOST_WIDE_INT high, int overflowable,
287 bool overflowed)
289 int sign_extended_type;
290 bool overflow;
292 /* Size types *are* sign extended. */
293 sign_extended_type = (!TYPE_UNSIGNED (type)
294 || (TREE_CODE (type) == INTEGER_TYPE
295 && TYPE_IS_SIZETYPE (type)));
297 overflow = fit_double_type (low, high, &low, &high, type);
299 /* If we need to set overflow flags, return a new unshared node. */
300 if (overflowed || overflow)
302 if (overflowed
303 || overflowable < 0
304 || (overflowable > 0 && sign_extended_type))
306 tree t = make_node (INTEGER_CST);
307 TREE_INT_CST_LOW (t) = low;
308 TREE_INT_CST_HIGH (t) = high;
309 TREE_TYPE (t) = type;
310 TREE_OVERFLOW (t) = 1;
311 return t;
315 /* Else build a shared node. */
316 return build_int_cst_wide (type, low, high);
319 /* Add two doubleword integers with doubleword result.
320 Return nonzero if the operation overflows according to UNSIGNED_P.
321 Each argument is given as two `HOST_WIDE_INT' pieces.
322 One argument is L1 and H1; the other, L2 and H2.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
328 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
329 bool unsigned_p)
331 unsigned HOST_WIDE_INT l;
332 HOST_WIDE_INT h;
334 l = l1 + l2;
335 h = h1 + h2 + (l < l1);
337 *lv = l;
338 *hv = h;
340 if (unsigned_p)
341 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
342 else
343 return OVERFLOW_SUM_SIGN (h1, h2, h);
346 /* Negate a doubleword integer with doubleword result.
347 Return nonzero if the operation overflows, assuming it's signed.
348 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
355 if (l1 == 0)
357 *lv = 0;
358 *hv = - h1;
359 return (*hv & h1) < 0;
361 else
363 *lv = -l1;
364 *hv = ~h1;
365 return 0;
369 /* Multiply two doubleword integers with doubleword result.
370 Return nonzero if the operation overflows according to UNSIGNED_P.
371 Each argument is given as two `HOST_WIDE_INT' pieces.
372 One argument is L1 and H1; the other, L2 and H2.
373 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
377 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
378 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 bool unsigned_p)
381 HOST_WIDE_INT arg1[4];
382 HOST_WIDE_INT arg2[4];
383 HOST_WIDE_INT prod[4 * 2];
384 unsigned HOST_WIDE_INT carry;
385 int i, j, k;
386 unsigned HOST_WIDE_INT toplow, neglow;
387 HOST_WIDE_INT tophigh, neghigh;
389 encode (arg1, l1, h1);
390 encode (arg2, l2, h2);
392 memset (prod, 0, sizeof prod);
394 for (i = 0; i < 4; i++)
396 carry = 0;
397 for (j = 0; j < 4; j++)
399 k = i + j;
400 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
401 carry += arg1[i] * arg2[j];
402 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
403 carry += prod[k];
404 prod[k] = LOWPART (carry);
405 carry = HIGHPART (carry);
407 prod[i + 4] = carry;
410 decode (prod, lv, hv);
411 decode (prod + 4, &toplow, &tophigh);
413 /* Unsigned overflow is immediate. */
414 if (unsigned_p)
415 return (toplow | tophigh) != 0;
417 /* Check for signed overflow by calculating the signed representation of the
418 top half of the result; it should agree with the low half's sign bit. */
419 if (h1 < 0)
421 neg_double (l2, h2, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 if (h2 < 0)
426 neg_double (l1, h1, &neglow, &neghigh);
427 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
429 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
432 /* Shift the doubleword integer in L1, H1 left by COUNT places
433 keeping only PREC bits of result.
434 Shift right if COUNT is negative.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438 void
439 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
443 unsigned HOST_WIDE_INT signmask;
445 if (count < 0)
447 rshift_double (l1, h1, -count, prec, lv, hv, arith);
448 return;
451 if (SHIFT_COUNT_TRUNCATED)
452 count %= prec;
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
458 *hv = 0;
459 *lv = 0;
461 else if (count >= HOST_BITS_PER_WIDE_INT)
463 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
464 *lv = 0;
466 else
468 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
469 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
470 *lv = l1 << count;
473 /* Sign extend all bits that are beyond the precision. */
475 signmask = -((prec > HOST_BITS_PER_WIDE_INT
476 ? ((unsigned HOST_WIDE_INT) *hv
477 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
478 : (*lv >> (prec - 1))) & 1);
480 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if (prec >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
487 else
489 *hv = signmask;
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
491 *lv |= signmask << prec;
495 /* Shift the doubleword integer in L1, H1 right by COUNT places
496 keeping only PREC bits of result. COUNT must be positive.
497 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
500 void
501 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 int arith)
506 unsigned HOST_WIDE_INT signmask;
508 signmask = (arith
509 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 : 0);
512 if (SHIFT_COUNT_TRUNCATED)
513 count %= prec;
515 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
517 /* Shifting by the host word size is undefined according to the
518 ANSI standard, so we must handle this as a special case. */
519 *hv = 0;
520 *lv = 0;
522 else if (count >= HOST_BITS_PER_WIDE_INT)
524 *hv = 0;
525 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 else
529 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
530 *lv = ((l1 >> count)
531 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
534 /* Zero / sign extend all bits that are beyond the precision. */
536 if (count >= (HOST_WIDE_INT)prec)
538 *hv = signmask;
539 *lv = signmask;
541 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
543 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
545 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
546 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
548 else
550 *hv = signmask;
551 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
552 *lv |= signmask << (prec - count);
556 /* Rotate the doubleword integer in L1, H1 left by COUNT places
557 keeping only PREC bits of result.
558 Rotate right if COUNT is negative.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 void
562 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
569 count %= prec;
570 if (count < 0)
571 count += prec;
573 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 *lv = s1l | s2l;
576 *hv = s1h | s2h;
579 /* Rotate the doubleword integer in L1, H1 left by COUNT places
580 keeping only PREC bits of result. COUNT must be positive.
581 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
583 void
584 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
585 HOST_WIDE_INT count, unsigned int prec,
586 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
588 unsigned HOST_WIDE_INT s1l, s2l;
589 HOST_WIDE_INT s1h, s2h;
591 count %= prec;
592 if (count < 0)
593 count += prec;
595 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
596 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
597 *lv = s1l | s2l;
598 *hv = s1h | s2h;
601 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
602 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
603 CODE is a tree code for a kind of division, one of
604 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
605 or EXACT_DIV_EXPR
606 It controls how the quotient is rounded to an integer.
607 Return nonzero if the operation overflows.
608 UNS nonzero says do unsigned division. */
611 div_and_round_double (enum tree_code code, int uns,
612 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
613 HOST_WIDE_INT hnum_orig,
614 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
615 HOST_WIDE_INT hden_orig,
616 unsigned HOST_WIDE_INT *lquo,
617 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
618 HOST_WIDE_INT *hrem)
620 int quo_neg = 0;
621 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
622 HOST_WIDE_INT den[4], quo[4];
623 int i, j;
624 unsigned HOST_WIDE_INT work;
625 unsigned HOST_WIDE_INT carry = 0;
626 unsigned HOST_WIDE_INT lnum = lnum_orig;
627 HOST_WIDE_INT hnum = hnum_orig;
628 unsigned HOST_WIDE_INT lden = lden_orig;
629 HOST_WIDE_INT hden = hden_orig;
630 int overflow = 0;
632 if (hden == 0 && lden == 0)
633 overflow = 1, lden = 1;
635 /* Calculate quotient sign and convert operands to unsigned. */
636 if (!uns)
638 if (hnum < 0)
640 quo_neg = ~ quo_neg;
641 /* (minimum integer) / (-1) is the only overflow case. */
642 if (neg_double (lnum, hnum, &lnum, &hnum)
643 && ((HOST_WIDE_INT) lden & hden) == -1)
644 overflow = 1;
646 if (hden < 0)
648 quo_neg = ~ quo_neg;
649 neg_double (lden, hden, &lden, &hden);
653 if (hnum == 0 && hden == 0)
654 { /* single precision */
655 *hquo = *hrem = 0;
656 /* This unsigned division rounds toward zero. */
657 *lquo = lnum / lden;
658 goto finish_up;
661 if (hnum == 0)
662 { /* trivial case: dividend < divisor */
663 /* hden != 0 already checked. */
664 *hquo = *lquo = 0;
665 *hrem = hnum;
666 *lrem = lnum;
667 goto finish_up;
670 memset (quo, 0, sizeof quo);
672 memset (num, 0, sizeof num); /* to zero 9th element */
673 memset (den, 0, sizeof den);
675 encode (num, lnum, hnum);
676 encode (den, lden, hden);
678 /* Special code for when the divisor < BASE. */
679 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
681 /* hnum != 0 already checked. */
682 for (i = 4 - 1; i >= 0; i--)
684 work = num[i] + carry * BASE;
685 quo[i] = work / lden;
686 carry = work % lden;
689 else
691 /* Full double precision division,
692 with thanks to Don Knuth's "Seminumerical Algorithms". */
693 int num_hi_sig, den_hi_sig;
694 unsigned HOST_WIDE_INT quo_est, scale;
696 /* Find the highest nonzero divisor digit. */
697 for (i = 4 - 1;; i--)
698 if (den[i] != 0)
700 den_hi_sig = i;
701 break;
704 /* Insure that the first digit of the divisor is at least BASE/2.
705 This is required by the quotient digit estimation algorithm. */
707 scale = BASE / (den[den_hi_sig] + 1);
708 if (scale > 1)
709 { /* scale divisor and dividend */
710 carry = 0;
711 for (i = 0; i <= 4 - 1; i++)
713 work = (num[i] * scale) + carry;
714 num[i] = LOWPART (work);
715 carry = HIGHPART (work);
718 num[4] = carry;
719 carry = 0;
720 for (i = 0; i <= 4 - 1; i++)
722 work = (den[i] * scale) + carry;
723 den[i] = LOWPART (work);
724 carry = HIGHPART (work);
725 if (den[i] != 0) den_hi_sig = i;
729 num_hi_sig = 4;
731 /* Main loop */
732 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
734 /* Guess the next quotient digit, quo_est, by dividing the first
735 two remaining dividend digits by the high order quotient digit.
736 quo_est is never low and is at most 2 high. */
737 unsigned HOST_WIDE_INT tmp;
739 num_hi_sig = i + den_hi_sig + 1;
740 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
741 if (num[num_hi_sig] != den[den_hi_sig])
742 quo_est = work / den[den_hi_sig];
743 else
744 quo_est = BASE - 1;
746 /* Refine quo_est so it's usually correct, and at most one high. */
747 tmp = work - quo_est * den[den_hi_sig];
748 if (tmp < BASE
749 && (den[den_hi_sig - 1] * quo_est
750 > (tmp * BASE + num[num_hi_sig - 2])))
751 quo_est--;
753 /* Try QUO_EST as the quotient digit, by multiplying the
754 divisor by QUO_EST and subtracting from the remaining dividend.
755 Keep in mind that QUO_EST is the I - 1st digit. */
757 carry = 0;
758 for (j = 0; j <= den_hi_sig; j++)
760 work = quo_est * den[j] + carry;
761 carry = HIGHPART (work);
762 work = num[i + j] - LOWPART (work);
763 num[i + j] = LOWPART (work);
764 carry += HIGHPART (work) != 0;
767 /* If quo_est was high by one, then num[i] went negative and
768 we need to correct things. */
769 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
771 quo_est--;
772 carry = 0; /* add divisor back in */
773 for (j = 0; j <= den_hi_sig; j++)
775 work = num[i + j] + den[j] + carry;
776 carry = HIGHPART (work);
777 num[i + j] = LOWPART (work);
780 num [num_hi_sig] += carry;
783 /* Store the quotient digit. */
784 quo[i] = quo_est;
788 decode (quo, lquo, hquo);
790 finish_up:
791 /* If result is negative, make it so. */
792 if (quo_neg)
793 neg_double (*lquo, *hquo, lquo, hquo);
795 /* Compute trial remainder: rem = num - (quo * den) */
796 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
797 neg_double (*lrem, *hrem, lrem, hrem);
798 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
800 switch (code)
802 case TRUNC_DIV_EXPR:
803 case TRUNC_MOD_EXPR: /* round toward zero */
804 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
805 return overflow;
807 case FLOOR_DIV_EXPR:
808 case FLOOR_MOD_EXPR: /* round toward negative infinity */
809 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 lquo, hquo);
815 else
816 return overflow;
817 break;
819 case CEIL_DIV_EXPR:
820 case CEIL_MOD_EXPR: /* round toward positive infinity */
821 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 lquo, hquo);
826 else
827 return overflow;
828 break;
830 case ROUND_DIV_EXPR:
831 case ROUND_MOD_EXPR: /* round to closest integer */
833 unsigned HOST_WIDE_INT labs_rem = *lrem;
834 HOST_WIDE_INT habs_rem = *hrem;
835 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
836 HOST_WIDE_INT habs_den = hden, htwice;
838 /* Get absolute values. */
839 if (*hrem < 0)
840 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
841 if (hden < 0)
842 neg_double (lden, hden, &labs_den, &habs_den);
844 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
845 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
846 labs_rem, habs_rem, &ltwice, &htwice);
848 if (((unsigned HOST_WIDE_INT) habs_den
849 < (unsigned HOST_WIDE_INT) htwice)
850 || (((unsigned HOST_WIDE_INT) habs_den
851 == (unsigned HOST_WIDE_INT) htwice)
852 && (labs_den <= ltwice)))
854 if (*hquo < 0)
855 /* quo = quo - 1; */
856 add_double (*lquo, *hquo,
857 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 else
859 /* quo = quo + 1; */
860 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
861 lquo, hquo);
863 else
864 return overflow;
866 break;
868 default:
869 gcc_unreachable ();
872 /* Compute true remainder: rem = num - (quo * den) */
873 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
874 neg_double (*lrem, *hrem, lrem, hrem);
875 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
876 return overflow;
879 /* If ARG2 divides ARG1 with zero remainder, carries out the division
880 of type CODE and returns the quotient.
881 Otherwise returns NULL_TREE. */
883 tree
884 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
886 unsigned HOST_WIDE_INT int1l, int2l;
887 HOST_WIDE_INT int1h, int2h;
888 unsigned HOST_WIDE_INT quol, reml;
889 HOST_WIDE_INT quoh, remh;
890 tree type = TREE_TYPE (arg1);
891 int uns = TYPE_UNSIGNED (type);
893 int1l = TREE_INT_CST_LOW (arg1);
894 int1h = TREE_INT_CST_HIGH (arg1);
895 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
896 &obj[some_exotic_number]. */
897 if (POINTER_TYPE_P (type))
899 uns = false;
900 type = signed_type_for (type);
901 fit_double_type (int1l, int1h, &int1l, &int1h,
902 type);
904 else
905 fit_double_type (int1l, int1h, &int1l, &int1h, type);
906 int2l = TREE_INT_CST_LOW (arg2);
907 int2h = TREE_INT_CST_HIGH (arg2);
909 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
910 &quol, &quoh, &reml, &remh);
911 if (remh != 0 || reml != 0)
912 return NULL_TREE;
914 return build_int_cst_wide (type, quol, quoh);
917 /* This is nonzero if we should defer warnings about undefined
918 overflow. This facility exists because these warnings are a
919 special case. The code to estimate loop iterations does not want
920 to issue any warnings, since it works with expressions which do not
921 occur in user code. Various bits of cleanup code call fold(), but
922 only use the result if it has certain characteristics (e.g., is a
923 constant); that code only wants to issue a warning if the result is
924 used. */
926 static int fold_deferring_overflow_warnings;
928 /* If a warning about undefined overflow is deferred, this is the
929 warning. Note that this may cause us to turn two warnings into
930 one, but that is fine since it is sufficient to only give one
931 warning per expression. */
933 static const char* fold_deferred_overflow_warning;
935 /* If a warning about undefined overflow is deferred, this is the
936 level at which the warning should be emitted. */
938 static enum warn_strict_overflow_code fold_deferred_overflow_code;
940 /* Start deferring overflow warnings. We could use a stack here to
941 permit nested calls, but at present it is not necessary. */
943 void
944 fold_defer_overflow_warnings (void)
946 ++fold_deferring_overflow_warnings;
949 /* Stop deferring overflow warnings. If there is a pending warning,
950 and ISSUE is true, then issue the warning if appropriate. STMT is
951 the statement with which the warning should be associated (used for
952 location information); STMT may be NULL. CODE is the level of the
953 warning--a warn_strict_overflow_code value. This function will use
954 the smaller of CODE and the deferred code when deciding whether to
955 issue the warning. CODE may be zero to mean to always use the
956 deferred code. */
958 void
959 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
961 const char *warnmsg;
962 location_t locus;
964 gcc_assert (fold_deferring_overflow_warnings > 0);
965 --fold_deferring_overflow_warnings;
966 if (fold_deferring_overflow_warnings > 0)
968 if (fold_deferred_overflow_warning != NULL
969 && code != 0
970 && code < (int) fold_deferred_overflow_code)
971 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
972 return;
975 warnmsg = fold_deferred_overflow_warning;
976 fold_deferred_overflow_warning = NULL;
978 if (!issue || warnmsg == NULL)
979 return;
981 if (gimple_no_warning_p (stmt))
982 return;
984 /* Use the smallest code level when deciding to issue the
985 warning. */
986 if (code == 0 || code > (int) fold_deferred_overflow_code)
987 code = fold_deferred_overflow_code;
989 if (!issue_strict_overflow_warning (code))
990 return;
992 if (stmt == NULL)
993 locus = input_location;
994 else
995 locus = gimple_location (stmt);
996 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
999 /* Stop deferring overflow warnings, ignoring any deferred
1000 warnings. */
1002 void
1003 fold_undefer_and_ignore_overflow_warnings (void)
1005 fold_undefer_overflow_warnings (false, NULL, 0);
1008 /* Whether we are deferring overflow warnings. */
1010 bool
1011 fold_deferring_overflow_warnings_p (void)
1013 return fold_deferring_overflow_warnings > 0;
1016 /* This is called when we fold something based on the fact that signed
1017 overflow is undefined. */
1019 static void
1020 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1022 if (fold_deferring_overflow_warnings > 0)
1024 if (fold_deferred_overflow_warning == NULL
1025 || wc < fold_deferred_overflow_code)
1027 fold_deferred_overflow_warning = gmsgid;
1028 fold_deferred_overflow_code = wc;
1031 else if (issue_strict_overflow_warning (wc))
1032 warning (OPT_Wstrict_overflow, gmsgid);
1035 /* Return true if the built-in mathematical function specified by CODE
1036 is odd, i.e. -f(x) == f(-x). */
1038 static bool
1039 negate_mathfn_p (enum built_in_function code)
1041 switch (code)
1043 CASE_FLT_FN (BUILT_IN_ASIN):
1044 CASE_FLT_FN (BUILT_IN_ASINH):
1045 CASE_FLT_FN (BUILT_IN_ATAN):
1046 CASE_FLT_FN (BUILT_IN_ATANH):
1047 CASE_FLT_FN (BUILT_IN_CASIN):
1048 CASE_FLT_FN (BUILT_IN_CASINH):
1049 CASE_FLT_FN (BUILT_IN_CATAN):
1050 CASE_FLT_FN (BUILT_IN_CATANH):
1051 CASE_FLT_FN (BUILT_IN_CBRT):
1052 CASE_FLT_FN (BUILT_IN_CPROJ):
1053 CASE_FLT_FN (BUILT_IN_CSIN):
1054 CASE_FLT_FN (BUILT_IN_CSINH):
1055 CASE_FLT_FN (BUILT_IN_CTAN):
1056 CASE_FLT_FN (BUILT_IN_CTANH):
1057 CASE_FLT_FN (BUILT_IN_ERF):
1058 CASE_FLT_FN (BUILT_IN_LLROUND):
1059 CASE_FLT_FN (BUILT_IN_LROUND):
1060 CASE_FLT_FN (BUILT_IN_ROUND):
1061 CASE_FLT_FN (BUILT_IN_SIN):
1062 CASE_FLT_FN (BUILT_IN_SINH):
1063 CASE_FLT_FN (BUILT_IN_TAN):
1064 CASE_FLT_FN (BUILT_IN_TANH):
1065 CASE_FLT_FN (BUILT_IN_TRUNC):
1066 return true;
1068 CASE_FLT_FN (BUILT_IN_LLRINT):
1069 CASE_FLT_FN (BUILT_IN_LRINT):
1070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1071 CASE_FLT_FN (BUILT_IN_RINT):
1072 return !flag_rounding_math;
1074 default:
1075 break;
1077 return false;
1080 /* Check whether we may negate an integer constant T without causing
1081 overflow. */
1083 bool
1084 may_negate_without_overflow_p (const_tree t)
1086 unsigned HOST_WIDE_INT val;
1087 unsigned int prec;
1088 tree type;
1090 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1092 type = TREE_TYPE (t);
1093 if (TYPE_UNSIGNED (type))
1094 return false;
1096 prec = TYPE_PRECISION (type);
1097 if (prec > HOST_BITS_PER_WIDE_INT)
1099 if (TREE_INT_CST_LOW (t) != 0)
1100 return true;
1101 prec -= HOST_BITS_PER_WIDE_INT;
1102 val = TREE_INT_CST_HIGH (t);
1104 else
1105 val = TREE_INT_CST_LOW (t);
1106 if (prec < HOST_BITS_PER_WIDE_INT)
1107 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1108 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1111 /* Determine whether an expression T can be cheaply negated using
1112 the function negate_expr without introducing undefined overflow. */
1114 static bool
1115 negate_expr_p (tree t)
1117 tree type;
1119 if (t == 0)
1120 return false;
1122 type = TREE_TYPE (t);
1124 STRIP_SIGN_NOPS (t);
1125 switch (TREE_CODE (t))
1127 case INTEGER_CST:
1128 if (TYPE_OVERFLOW_WRAPS (type))
1129 return true;
1131 /* Check that -CST will not overflow type. */
1132 return may_negate_without_overflow_p (t);
1133 case BIT_NOT_EXPR:
1134 return (INTEGRAL_TYPE_P (type)
1135 && TYPE_OVERFLOW_WRAPS (type));
1137 case FIXED_CST:
1138 case REAL_CST:
1139 case NEGATE_EXPR:
1140 return true;
1142 case COMPLEX_CST:
1143 return negate_expr_p (TREE_REALPART (t))
1144 && negate_expr_p (TREE_IMAGPART (t));
1146 case COMPLEX_EXPR:
1147 return negate_expr_p (TREE_OPERAND (t, 0))
1148 && negate_expr_p (TREE_OPERAND (t, 1));
1150 case CONJ_EXPR:
1151 return negate_expr_p (TREE_OPERAND (t, 0));
1153 case PLUS_EXPR:
1154 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1155 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1156 return false;
1157 /* -(A + B) -> (-B) - A. */
1158 if (negate_expr_p (TREE_OPERAND (t, 1))
1159 && reorder_operands_p (TREE_OPERAND (t, 0),
1160 TREE_OPERAND (t, 1)))
1161 return true;
1162 /* -(A + B) -> (-A) - B. */
1163 return negate_expr_p (TREE_OPERAND (t, 0));
1165 case MINUS_EXPR:
1166 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1167 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1169 && reorder_operands_p (TREE_OPERAND (t, 0),
1170 TREE_OPERAND (t, 1));
1172 case MULT_EXPR:
1173 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1174 break;
1176 /* Fall through. */
1178 case RDIV_EXPR:
1179 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1180 return negate_expr_p (TREE_OPERAND (t, 1))
1181 || negate_expr_p (TREE_OPERAND (t, 0));
1182 break;
1184 case TRUNC_DIV_EXPR:
1185 case ROUND_DIV_EXPR:
1186 case FLOOR_DIV_EXPR:
1187 case CEIL_DIV_EXPR:
1188 case EXACT_DIV_EXPR:
1189 /* In general we can't negate A / B, because if A is INT_MIN and
1190 B is 1, we may turn this into INT_MIN / -1 which is undefined
1191 and actually traps on some architectures. But if overflow is
1192 undefined, we can negate, because - (INT_MIN / 1) is an
1193 overflow. */
1194 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1195 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1196 break;
1197 return negate_expr_p (TREE_OPERAND (t, 1))
1198 || negate_expr_p (TREE_OPERAND (t, 0));
1200 case NOP_EXPR:
1201 /* Negate -((double)float) as (double)(-float). */
1202 if (TREE_CODE (type) == REAL_TYPE)
1204 tree tem = strip_float_extensions (t);
1205 if (tem != t)
1206 return negate_expr_p (tem);
1208 break;
1210 case CALL_EXPR:
1211 /* Negate -f(x) as f(-x). */
1212 if (negate_mathfn_p (builtin_mathfn_code (t)))
1213 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1214 break;
1216 case RSHIFT_EXPR:
1217 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1218 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1220 tree op1 = TREE_OPERAND (t, 1);
1221 if (TREE_INT_CST_HIGH (op1) == 0
1222 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1223 == TREE_INT_CST_LOW (op1))
1224 return true;
1226 break;
1228 default:
1229 break;
1231 return false;
1234 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1235 simplification is possible.
1236 If negate_expr_p would return true for T, NULL_TREE will never be
1237 returned. */
1239 static tree
1240 fold_negate_expr (location_t loc, tree t)
1242 tree type = TREE_TYPE (t);
1243 tree tem;
1245 switch (TREE_CODE (t))
1247 /* Convert - (~A) to A + 1. */
1248 case BIT_NOT_EXPR:
1249 if (INTEGRAL_TYPE_P (type))
1250 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1251 build_int_cst (type, 1));
1252 break;
1254 case INTEGER_CST:
1255 tem = fold_negate_const (t, type);
1256 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1257 || !TYPE_OVERFLOW_TRAPS (type))
1258 return tem;
1259 break;
1261 case REAL_CST:
1262 tem = fold_negate_const (t, type);
1263 /* Two's complement FP formats, such as c4x, may overflow. */
1264 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1265 return tem;
1266 break;
1268 case FIXED_CST:
1269 tem = fold_negate_const (t, type);
1270 return tem;
1272 case COMPLEX_CST:
1274 tree rpart = negate_expr (TREE_REALPART (t));
1275 tree ipart = negate_expr (TREE_IMAGPART (t));
1277 if ((TREE_CODE (rpart) == REAL_CST
1278 && TREE_CODE (ipart) == REAL_CST)
1279 || (TREE_CODE (rpart) == INTEGER_CST
1280 && TREE_CODE (ipart) == INTEGER_CST))
1281 return build_complex (type, rpart, ipart);
1283 break;
1285 case COMPLEX_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1288 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1289 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1290 break;
1292 case CONJ_EXPR:
1293 if (negate_expr_p (t))
1294 return fold_build1_loc (loc, CONJ_EXPR, type,
1295 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1296 break;
1298 case NEGATE_EXPR:
1299 return TREE_OPERAND (t, 0);
1301 case PLUS_EXPR:
1302 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1305 /* -(A + B) -> (-B) - A. */
1306 if (negate_expr_p (TREE_OPERAND (t, 1))
1307 && reorder_operands_p (TREE_OPERAND (t, 0),
1308 TREE_OPERAND (t, 1)))
1310 tem = negate_expr (TREE_OPERAND (t, 1));
1311 return fold_build2_loc (loc, MINUS_EXPR, type,
1312 tem, TREE_OPERAND (t, 0));
1315 /* -(A + B) -> (-A) - B. */
1316 if (negate_expr_p (TREE_OPERAND (t, 0)))
1318 tem = negate_expr (TREE_OPERAND (t, 0));
1319 return fold_build2_loc (loc, MINUS_EXPR, type,
1320 tem, TREE_OPERAND (t, 1));
1323 break;
1325 case MINUS_EXPR:
1326 /* - (A - B) -> B - A */
1327 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1328 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1329 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1330 return fold_build2_loc (loc, MINUS_EXPR, type,
1331 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1332 break;
1334 case MULT_EXPR:
1335 if (TYPE_UNSIGNED (type))
1336 break;
1338 /* Fall through. */
1340 case RDIV_EXPR:
1341 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1343 tem = TREE_OPERAND (t, 1);
1344 if (negate_expr_p (tem))
1345 return fold_build2_loc (loc, TREE_CODE (t), type,
1346 TREE_OPERAND (t, 0), negate_expr (tem));
1347 tem = TREE_OPERAND (t, 0);
1348 if (negate_expr_p (tem))
1349 return fold_build2_loc (loc, TREE_CODE (t), type,
1350 negate_expr (tem), TREE_OPERAND (t, 1));
1352 break;
1354 case TRUNC_DIV_EXPR:
1355 case ROUND_DIV_EXPR:
1356 case FLOOR_DIV_EXPR:
1357 case CEIL_DIV_EXPR:
1358 case EXACT_DIV_EXPR:
1359 /* In general we can't negate A / B, because if A is INT_MIN and
1360 B is 1, we may turn this into INT_MIN / -1 which is undefined
1361 and actually traps on some architectures. But if overflow is
1362 undefined, we can negate, because - (INT_MIN / 1) is an
1363 overflow. */
1364 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1366 const char * const warnmsg = G_("assuming signed overflow does not "
1367 "occur when negating a division");
1368 tem = TREE_OPERAND (t, 1);
1369 if (negate_expr_p (tem))
1371 if (INTEGRAL_TYPE_P (type)
1372 && (TREE_CODE (tem) != INTEGER_CST
1373 || integer_onep (tem)))
1374 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1375 return fold_build2_loc (loc, TREE_CODE (t), type,
1376 TREE_OPERAND (t, 0), negate_expr (tem));
1378 tem = TREE_OPERAND (t, 0);
1379 if (negate_expr_p (tem))
1381 if (INTEGRAL_TYPE_P (type)
1382 && (TREE_CODE (tem) != INTEGER_CST
1383 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1385 return fold_build2_loc (loc, TREE_CODE (t), type,
1386 negate_expr (tem), TREE_OPERAND (t, 1));
1389 break;
1391 case NOP_EXPR:
1392 /* Convert -((double)float) into (double)(-float). */
1393 if (TREE_CODE (type) == REAL_TYPE)
1395 tem = strip_float_extensions (t);
1396 if (tem != t && negate_expr_p (tem))
1397 return fold_convert_loc (loc, type, negate_expr (tem));
1399 break;
1401 case CALL_EXPR:
1402 /* Negate -f(x) as f(-x). */
1403 if (negate_mathfn_p (builtin_mathfn_code (t))
1404 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1406 tree fndecl, arg;
1408 fndecl = get_callee_fndecl (t);
1409 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1410 return build_call_expr_loc (loc, fndecl, 1, arg);
1412 break;
1414 case RSHIFT_EXPR:
1415 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1416 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1418 tree op1 = TREE_OPERAND (t, 1);
1419 if (TREE_INT_CST_HIGH (op1) == 0
1420 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1421 == TREE_INT_CST_LOW (op1))
1423 tree ntype = TYPE_UNSIGNED (type)
1424 ? signed_type_for (type)
1425 : unsigned_type_for (type);
1426 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1427 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1428 return fold_convert_loc (loc, type, temp);
1431 break;
1433 default:
1434 break;
1437 return NULL_TREE;
1440 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1441 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1442 return NULL_TREE. */
1444 static tree
1445 negate_expr (tree t)
1447 tree type, tem;
1448 location_t loc;
1450 if (t == NULL_TREE)
1451 return NULL_TREE;
1453 loc = EXPR_LOCATION (t);
1454 type = TREE_TYPE (t);
1455 STRIP_SIGN_NOPS (t);
1457 tem = fold_negate_expr (loc, t);
1458 if (!tem)
1460 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1461 SET_EXPR_LOCATION (tem, loc);
1463 return fold_convert_loc (loc, type, tem);
1466 /* Split a tree IN into a constant, literal and variable parts that could be
1467 combined with CODE to make IN. "constant" means an expression with
1468 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1469 commutative arithmetic operation. Store the constant part into *CONP,
1470 the literal in *LITP and return the variable part. If a part isn't
1471 present, set it to null. If the tree does not decompose in this way,
1472 return the entire tree as the variable part and the other parts as null.
1474 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1475 case, we negate an operand that was subtracted. Except if it is a
1476 literal for which we use *MINUS_LITP instead.
1478 If NEGATE_P is true, we are negating all of IN, again except a literal
1479 for which we use *MINUS_LITP instead.
1481 If IN is itself a literal or constant, return it as appropriate.
1483 Note that we do not guarantee that any of the three values will be the
1484 same type as IN, but they will have the same signedness and mode. */
1486 static tree
1487 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1488 tree *minus_litp, int negate_p)
1490 tree var = 0;
1492 *conp = 0;
1493 *litp = 0;
1494 *minus_litp = 0;
1496 /* Strip any conversions that don't change the machine mode or signedness. */
1497 STRIP_SIGN_NOPS (in);
1499 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1500 || TREE_CODE (in) == FIXED_CST)
1501 *litp = in;
1502 else if (TREE_CODE (in) == code
1503 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1504 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1505 /* We can associate addition and subtraction together (even
1506 though the C standard doesn't say so) for integers because
1507 the value is not affected. For reals, the value might be
1508 affected, so we can't. */
1509 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1510 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1512 tree op0 = TREE_OPERAND (in, 0);
1513 tree op1 = TREE_OPERAND (in, 1);
1514 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1515 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1517 /* First see if either of the operands is a literal, then a constant. */
1518 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1519 || TREE_CODE (op0) == FIXED_CST)
1520 *litp = op0, op0 = 0;
1521 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1522 || TREE_CODE (op1) == FIXED_CST)
1523 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1525 if (op0 != 0 && TREE_CONSTANT (op0))
1526 *conp = op0, op0 = 0;
1527 else if (op1 != 0 && TREE_CONSTANT (op1))
1528 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1530 /* If we haven't dealt with either operand, this is not a case we can
1531 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1532 if (op0 != 0 && op1 != 0)
1533 var = in;
1534 else if (op0 != 0)
1535 var = op0;
1536 else
1537 var = op1, neg_var_p = neg1_p;
1539 /* Now do any needed negations. */
1540 if (neg_litp_p)
1541 *minus_litp = *litp, *litp = 0;
1542 if (neg_conp_p)
1543 *conp = negate_expr (*conp);
1544 if (neg_var_p)
1545 var = negate_expr (var);
1547 else if (TREE_CONSTANT (in))
1548 *conp = in;
1549 else
1550 var = in;
1552 if (negate_p)
1554 if (*litp)
1555 *minus_litp = *litp, *litp = 0;
1556 else if (*minus_litp)
1557 *litp = *minus_litp, *minus_litp = 0;
1558 *conp = negate_expr (*conp);
1559 var = negate_expr (var);
1562 return var;
1565 /* Re-associate trees split by the above function. T1 and T2 are
1566 either expressions to associate or null. Return the new
1567 expression, if any. LOC is the location of the new expression. If
1568 we build an operation, do it in TYPE and with CODE. */
1570 static tree
1571 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1573 tree tem;
1575 if (t1 == 0)
1576 return t2;
1577 else if (t2 == 0)
1578 return t1;
1580 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1581 try to fold this since we will have infinite recursion. But do
1582 deal with any NEGATE_EXPRs. */
1583 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1584 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1586 if (code == PLUS_EXPR)
1588 if (TREE_CODE (t1) == NEGATE_EXPR)
1589 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1590 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1591 else if (TREE_CODE (t2) == NEGATE_EXPR)
1592 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1593 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1594 else if (integer_zerop (t2))
1595 return fold_convert_loc (loc, type, t1);
1597 else if (code == MINUS_EXPR)
1599 if (integer_zerop (t2))
1600 return fold_convert_loc (loc, type, t1);
1603 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1604 fold_convert_loc (loc, type, t2));
1605 goto associate_trees_exit;
1608 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1609 fold_convert_loc (loc, type, t2));
1610 associate_trees_exit:
1611 protected_set_expr_location (tem, loc);
1612 return tem;
1615 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1616 for use in int_const_binop, size_binop and size_diffop. */
1618 static bool
1619 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1621 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1622 return false;
1623 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1624 return false;
1626 switch (code)
1628 case LSHIFT_EXPR:
1629 case RSHIFT_EXPR:
1630 case LROTATE_EXPR:
1631 case RROTATE_EXPR:
1632 return true;
1634 default:
1635 break;
1638 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1639 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1640 && TYPE_MODE (type1) == TYPE_MODE (type2);
1644 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1645 to produce a new constant. Return NULL_TREE if we don't know how
1646 to evaluate CODE at compile-time.
1648 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1650 tree
1651 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1653 unsigned HOST_WIDE_INT int1l, int2l;
1654 HOST_WIDE_INT int1h, int2h;
1655 unsigned HOST_WIDE_INT low;
1656 HOST_WIDE_INT hi;
1657 unsigned HOST_WIDE_INT garbagel;
1658 HOST_WIDE_INT garbageh;
1659 tree t;
1660 tree type = TREE_TYPE (arg1);
1661 int uns = TYPE_UNSIGNED (type);
1662 int is_sizetype
1663 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1664 int overflow = 0;
1666 int1l = TREE_INT_CST_LOW (arg1);
1667 int1h = TREE_INT_CST_HIGH (arg1);
1668 int2l = TREE_INT_CST_LOW (arg2);
1669 int2h = TREE_INT_CST_HIGH (arg2);
1671 switch (code)
1673 case BIT_IOR_EXPR:
1674 low = int1l | int2l, hi = int1h | int2h;
1675 break;
1677 case BIT_XOR_EXPR:
1678 low = int1l ^ int2l, hi = int1h ^ int2h;
1679 break;
1681 case BIT_AND_EXPR:
1682 low = int1l & int2l, hi = int1h & int2h;
1683 break;
1685 case RSHIFT_EXPR:
1686 int2l = -int2l;
1687 case LSHIFT_EXPR:
1688 /* It's unclear from the C standard whether shifts can overflow.
1689 The following code ignores overflow; perhaps a C standard
1690 interpretation ruling is needed. */
1691 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1692 &low, &hi, !uns);
1693 break;
1695 case RROTATE_EXPR:
1696 int2l = - int2l;
1697 case LROTATE_EXPR:
1698 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1699 &low, &hi);
1700 break;
1702 case PLUS_EXPR:
1703 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1704 break;
1706 case MINUS_EXPR:
1707 neg_double (int2l, int2h, &low, &hi);
1708 add_double (int1l, int1h, low, hi, &low, &hi);
1709 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1710 break;
1712 case MULT_EXPR:
1713 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1714 break;
1716 case TRUNC_DIV_EXPR:
1717 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1718 case EXACT_DIV_EXPR:
1719 /* This is a shortcut for a common special case. */
1720 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1721 && !TREE_OVERFLOW (arg1)
1722 && !TREE_OVERFLOW (arg2)
1723 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1725 if (code == CEIL_DIV_EXPR)
1726 int1l += int2l - 1;
1728 low = int1l / int2l, hi = 0;
1729 break;
1732 /* ... fall through ... */
1734 case ROUND_DIV_EXPR:
1735 if (int2h == 0 && int2l == 0)
1736 return NULL_TREE;
1737 if (int2h == 0 && int2l == 1)
1739 low = int1l, hi = int1h;
1740 break;
1742 if (int1l == int2l && int1h == int2h
1743 && ! (int1l == 0 && int1h == 0))
1745 low = 1, hi = 0;
1746 break;
1748 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1749 &low, &hi, &garbagel, &garbageh);
1750 break;
1752 case TRUNC_MOD_EXPR:
1753 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1754 /* This is a shortcut for a common special case. */
1755 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1756 && !TREE_OVERFLOW (arg1)
1757 && !TREE_OVERFLOW (arg2)
1758 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1760 if (code == CEIL_MOD_EXPR)
1761 int1l += int2l - 1;
1762 low = int1l % int2l, hi = 0;
1763 break;
1766 /* ... fall through ... */
1768 case ROUND_MOD_EXPR:
1769 if (int2h == 0 && int2l == 0)
1770 return NULL_TREE;
1771 overflow = div_and_round_double (code, uns,
1772 int1l, int1h, int2l, int2h,
1773 &garbagel, &garbageh, &low, &hi);
1774 break;
1776 case MIN_EXPR:
1777 case MAX_EXPR:
1778 if (uns)
1779 low = (((unsigned HOST_WIDE_INT) int1h
1780 < (unsigned HOST_WIDE_INT) int2h)
1781 || (((unsigned HOST_WIDE_INT) int1h
1782 == (unsigned HOST_WIDE_INT) int2h)
1783 && int1l < int2l));
1784 else
1785 low = (int1h < int2h
1786 || (int1h == int2h && int1l < int2l));
1788 if (low == (code == MIN_EXPR))
1789 low = int1l, hi = int1h;
1790 else
1791 low = int2l, hi = int2h;
1792 break;
1794 default:
1795 return NULL_TREE;
1798 if (notrunc)
1800 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1802 /* Propagate overflow flags ourselves. */
1803 if (((!uns || is_sizetype) && overflow)
1804 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1806 t = copy_node (t);
1807 TREE_OVERFLOW (t) = 1;
1810 else
1811 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1812 ((!uns || is_sizetype) && overflow)
1813 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1815 return t;
1818 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1819 constant. We assume ARG1 and ARG2 have the same data type, or at least
1820 are the same kind of constant and the same machine mode. Return zero if
1821 combining the constants is not allowed in the current operating mode.
1823 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1825 static tree
1826 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1828 /* Sanity check for the recursive cases. */
1829 if (!arg1 || !arg2)
1830 return NULL_TREE;
1832 STRIP_NOPS (arg1);
1833 STRIP_NOPS (arg2);
1835 if (TREE_CODE (arg1) == INTEGER_CST)
1836 return int_const_binop (code, arg1, arg2, notrunc);
1838 if (TREE_CODE (arg1) == REAL_CST)
1840 enum machine_mode mode;
1841 REAL_VALUE_TYPE d1;
1842 REAL_VALUE_TYPE d2;
1843 REAL_VALUE_TYPE value;
1844 REAL_VALUE_TYPE result;
1845 bool inexact;
1846 tree t, type;
1848 /* The following codes are handled by real_arithmetic. */
1849 switch (code)
1851 case PLUS_EXPR:
1852 case MINUS_EXPR:
1853 case MULT_EXPR:
1854 case RDIV_EXPR:
1855 case MIN_EXPR:
1856 case MAX_EXPR:
1857 break;
1859 default:
1860 return NULL_TREE;
1863 d1 = TREE_REAL_CST (arg1);
1864 d2 = TREE_REAL_CST (arg2);
1866 type = TREE_TYPE (arg1);
1867 mode = TYPE_MODE (type);
1869 /* Don't perform operation if we honor signaling NaNs and
1870 either operand is a NaN. */
1871 if (HONOR_SNANS (mode)
1872 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1873 return NULL_TREE;
1875 /* Don't perform operation if it would raise a division
1876 by zero exception. */
1877 if (code == RDIV_EXPR
1878 && REAL_VALUES_EQUAL (d2, dconst0)
1879 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1880 return NULL_TREE;
1882 /* If either operand is a NaN, just return it. Otherwise, set up
1883 for floating-point trap; we return an overflow. */
1884 if (REAL_VALUE_ISNAN (d1))
1885 return arg1;
1886 else if (REAL_VALUE_ISNAN (d2))
1887 return arg2;
1889 inexact = real_arithmetic (&value, code, &d1, &d2);
1890 real_convert (&result, mode, &value);
1892 /* Don't constant fold this floating point operation if
1893 the result has overflowed and flag_trapping_math. */
1894 if (flag_trapping_math
1895 && MODE_HAS_INFINITIES (mode)
1896 && REAL_VALUE_ISINF (result)
1897 && !REAL_VALUE_ISINF (d1)
1898 && !REAL_VALUE_ISINF (d2))
1899 return NULL_TREE;
1901 /* Don't constant fold this floating point operation if the
1902 result may dependent upon the run-time rounding mode and
1903 flag_rounding_math is set, or if GCC's software emulation
1904 is unable to accurately represent the result. */
1905 if ((flag_rounding_math
1906 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1907 && (inexact || !real_identical (&result, &value)))
1908 return NULL_TREE;
1910 t = build_real (type, result);
1912 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1913 return t;
1916 if (TREE_CODE (arg1) == FIXED_CST)
1918 FIXED_VALUE_TYPE f1;
1919 FIXED_VALUE_TYPE f2;
1920 FIXED_VALUE_TYPE result;
1921 tree t, type;
1922 int sat_p;
1923 bool overflow_p;
1925 /* The following codes are handled by fixed_arithmetic. */
1926 switch (code)
1928 case PLUS_EXPR:
1929 case MINUS_EXPR:
1930 case MULT_EXPR:
1931 case TRUNC_DIV_EXPR:
1932 f2 = TREE_FIXED_CST (arg2);
1933 break;
1935 case LSHIFT_EXPR:
1936 case RSHIFT_EXPR:
1937 f2.data.high = TREE_INT_CST_HIGH (arg2);
1938 f2.data.low = TREE_INT_CST_LOW (arg2);
1939 f2.mode = SImode;
1940 break;
1942 default:
1943 return NULL_TREE;
1946 f1 = TREE_FIXED_CST (arg1);
1947 type = TREE_TYPE (arg1);
1948 sat_p = TYPE_SATURATING (type);
1949 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1950 t = build_fixed (type, result);
1951 /* Propagate overflow flags. */
1952 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1953 TREE_OVERFLOW (t) = 1;
1954 return t;
1957 if (TREE_CODE (arg1) == COMPLEX_CST)
1959 tree type = TREE_TYPE (arg1);
1960 tree r1 = TREE_REALPART (arg1);
1961 tree i1 = TREE_IMAGPART (arg1);
1962 tree r2 = TREE_REALPART (arg2);
1963 tree i2 = TREE_IMAGPART (arg2);
1964 tree real, imag;
1966 switch (code)
1968 case PLUS_EXPR:
1969 case MINUS_EXPR:
1970 real = const_binop (code, r1, r2, notrunc);
1971 imag = const_binop (code, i1, i2, notrunc);
1972 break;
1974 case MULT_EXPR:
1975 real = const_binop (MINUS_EXPR,
1976 const_binop (MULT_EXPR, r1, r2, notrunc),
1977 const_binop (MULT_EXPR, i1, i2, notrunc),
1978 notrunc);
1979 imag = const_binop (PLUS_EXPR,
1980 const_binop (MULT_EXPR, r1, i2, notrunc),
1981 const_binop (MULT_EXPR, i1, r2, notrunc),
1982 notrunc);
1983 break;
1985 case RDIV_EXPR:
1987 tree magsquared
1988 = const_binop (PLUS_EXPR,
1989 const_binop (MULT_EXPR, r2, r2, notrunc),
1990 const_binop (MULT_EXPR, i2, i2, notrunc),
1991 notrunc);
1992 tree t1
1993 = const_binop (PLUS_EXPR,
1994 const_binop (MULT_EXPR, r1, r2, notrunc),
1995 const_binop (MULT_EXPR, i1, i2, notrunc),
1996 notrunc);
1997 tree t2
1998 = const_binop (MINUS_EXPR,
1999 const_binop (MULT_EXPR, i1, r2, notrunc),
2000 const_binop (MULT_EXPR, r1, i2, notrunc),
2001 notrunc);
2003 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
2004 code = TRUNC_DIV_EXPR;
2006 real = const_binop (code, t1, magsquared, notrunc);
2007 imag = const_binop (code, t2, magsquared, notrunc);
2009 break;
2011 default:
2012 return NULL_TREE;
2015 if (real && imag)
2016 return build_complex (type, real, imag);
2019 if (TREE_CODE (arg1) == VECTOR_CST)
2021 tree type = TREE_TYPE(arg1);
2022 int count = TYPE_VECTOR_SUBPARTS (type), i;
2023 tree elements1, elements2, list = NULL_TREE;
2025 if(TREE_CODE(arg2) != VECTOR_CST)
2026 return NULL_TREE;
2028 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2029 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2031 for (i = 0; i < count; i++)
2033 tree elem1, elem2, elem;
2035 /* The trailing elements can be empty and should be treated as 0 */
2036 if(!elements1)
2037 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2038 else
2040 elem1 = TREE_VALUE(elements1);
2041 elements1 = TREE_CHAIN (elements1);
2044 if(!elements2)
2045 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2046 else
2048 elem2 = TREE_VALUE(elements2);
2049 elements2 = TREE_CHAIN (elements2);
2052 elem = const_binop (code, elem1, elem2, notrunc);
2054 /* It is possible that const_binop cannot handle the given
2055 code and return NULL_TREE */
2056 if(elem == NULL_TREE)
2057 return NULL_TREE;
2059 list = tree_cons (NULL_TREE, elem, list);
2061 return build_vector(type, nreverse(list));
2063 return NULL_TREE;
2066 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2067 indicates which particular sizetype to create. */
2069 tree
2070 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2072 return build_int_cst (sizetype_tab[(int) kind], number);
2075 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2076 is a tree code. The type of the result is taken from the operands.
2077 Both must be equivalent integer types, ala int_binop_types_match_p.
2078 If the operands are constant, so is the result. */
2080 tree
2081 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2083 tree type = TREE_TYPE (arg0);
2085 if (arg0 == error_mark_node || arg1 == error_mark_node)
2086 return error_mark_node;
2088 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2089 TREE_TYPE (arg1)));
2091 /* Handle the special case of two integer constants faster. */
2092 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2094 /* And some specific cases even faster than that. */
2095 if (code == PLUS_EXPR)
2097 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2098 return arg1;
2099 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2100 return arg0;
2102 else if (code == MINUS_EXPR)
2104 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2105 return arg0;
2107 else if (code == MULT_EXPR)
2109 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2110 return arg1;
2113 /* Handle general case of two integer constants. */
2114 return int_const_binop (code, arg0, arg1, 0);
2117 return fold_build2_loc (loc, code, type, arg0, arg1);
2120 /* Given two values, either both of sizetype or both of bitsizetype,
2121 compute the difference between the two values. Return the value
2122 in signed type corresponding to the type of the operands. */
2124 tree
2125 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2127 tree type = TREE_TYPE (arg0);
2128 tree ctype;
2130 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2131 TREE_TYPE (arg1)));
2133 /* If the type is already signed, just do the simple thing. */
2134 if (!TYPE_UNSIGNED (type))
2135 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2137 if (type == sizetype)
2138 ctype = ssizetype;
2139 else if (type == bitsizetype)
2140 ctype = sbitsizetype;
2141 else
2142 ctype = signed_type_for (type);
2144 /* If either operand is not a constant, do the conversions to the signed
2145 type and subtract. The hardware will do the right thing with any
2146 overflow in the subtraction. */
2147 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2148 return size_binop_loc (loc, MINUS_EXPR,
2149 fold_convert_loc (loc, ctype, arg0),
2150 fold_convert_loc (loc, ctype, arg1));
2152 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2153 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2154 overflow) and negate (which can't either). Special-case a result
2155 of zero while we're here. */
2156 if (tree_int_cst_equal (arg0, arg1))
2157 return build_int_cst (ctype, 0);
2158 else if (tree_int_cst_lt (arg1, arg0))
2159 return fold_convert_loc (loc, ctype,
2160 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2161 else
2162 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2163 fold_convert_loc (loc, ctype,
2164 size_binop_loc (loc,
2165 MINUS_EXPR,
2166 arg1, arg0)));
2169 /* A subroutine of fold_convert_const handling conversions of an
2170 INTEGER_CST to another integer type. */
2172 static tree
2173 fold_convert_const_int_from_int (tree type, const_tree arg1)
2175 tree t;
2177 /* Given an integer constant, make new constant with new type,
2178 appropriately sign-extended or truncated. */
2179 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2180 TREE_INT_CST_HIGH (arg1),
2181 /* Don't set the overflow when
2182 converting from a pointer, */
2183 !POINTER_TYPE_P (TREE_TYPE (arg1))
2184 /* or to a sizetype with same signedness
2185 and the precision is unchanged.
2186 ??? sizetype is always sign-extended,
2187 but its signedness depends on the
2188 frontend. Thus we see spurious overflows
2189 here if we do not check this. */
2190 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2191 == TYPE_PRECISION (type))
2192 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2193 == TYPE_UNSIGNED (type))
2194 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2196 || (TREE_CODE (type) == INTEGER_TYPE
2197 && TYPE_IS_SIZETYPE (type)))),
2198 (TREE_INT_CST_HIGH (arg1) < 0
2199 && (TYPE_UNSIGNED (type)
2200 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2201 | TREE_OVERFLOW (arg1));
2203 return t;
2206 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2207 to an integer type. */
2209 static tree
2210 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2212 int overflow = 0;
2213 tree t;
2215 /* The following code implements the floating point to integer
2216 conversion rules required by the Java Language Specification,
2217 that IEEE NaNs are mapped to zero and values that overflow
2218 the target precision saturate, i.e. values greater than
2219 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2220 are mapped to INT_MIN. These semantics are allowed by the
2221 C and C++ standards that simply state that the behavior of
2222 FP-to-integer conversion is unspecified upon overflow. */
2224 HOST_WIDE_INT high, low;
2225 REAL_VALUE_TYPE r;
2226 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2228 switch (code)
2230 case FIX_TRUNC_EXPR:
2231 real_trunc (&r, VOIDmode, &x);
2232 break;
2234 default:
2235 gcc_unreachable ();
2238 /* If R is NaN, return zero and show we have an overflow. */
2239 if (REAL_VALUE_ISNAN (r))
2241 overflow = 1;
2242 high = 0;
2243 low = 0;
2246 /* See if R is less than the lower bound or greater than the
2247 upper bound. */
2249 if (! overflow)
2251 tree lt = TYPE_MIN_VALUE (type);
2252 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2253 if (REAL_VALUES_LESS (r, l))
2255 overflow = 1;
2256 high = TREE_INT_CST_HIGH (lt);
2257 low = TREE_INT_CST_LOW (lt);
2261 if (! overflow)
2263 tree ut = TYPE_MAX_VALUE (type);
2264 if (ut)
2266 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2267 if (REAL_VALUES_LESS (u, r))
2269 overflow = 1;
2270 high = TREE_INT_CST_HIGH (ut);
2271 low = TREE_INT_CST_LOW (ut);
2276 if (! overflow)
2277 REAL_VALUE_TO_INT (&low, &high, r);
2279 t = force_fit_type_double (type, low, high, -1,
2280 overflow | TREE_OVERFLOW (arg1));
2281 return t;
2284 /* A subroutine of fold_convert_const handling conversions of a
2285 FIXED_CST to an integer type. */
2287 static tree
2288 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2290 tree t;
2291 double_int temp, temp_trunc;
2292 unsigned int mode;
2294 /* Right shift FIXED_CST to temp by fbit. */
2295 temp = TREE_FIXED_CST (arg1).data;
2296 mode = TREE_FIXED_CST (arg1).mode;
2297 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2299 lshift_double (temp.low, temp.high,
2300 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2301 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2303 /* Left shift temp to temp_trunc by fbit. */
2304 lshift_double (temp.low, temp.high,
2305 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2306 &temp_trunc.low, &temp_trunc.high,
2307 SIGNED_FIXED_POINT_MODE_P (mode));
2309 else
2311 temp.low = 0;
2312 temp.high = 0;
2313 temp_trunc.low = 0;
2314 temp_trunc.high = 0;
2317 /* If FIXED_CST is negative, we need to round the value toward 0.
2318 By checking if the fractional bits are not zero to add 1 to temp. */
2319 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2320 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2322 double_int one;
2323 one.low = 1;
2324 one.high = 0;
2325 temp = double_int_add (temp, one);
2328 /* Given a fixed-point constant, make new constant with new type,
2329 appropriately sign-extended or truncated. */
2330 t = force_fit_type_double (type, temp.low, temp.high, -1,
2331 (temp.high < 0
2332 && (TYPE_UNSIGNED (type)
2333 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2334 | TREE_OVERFLOW (arg1));
2336 return t;
2339 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2340 to another floating point type. */
2342 static tree
2343 fold_convert_const_real_from_real (tree type, const_tree arg1)
2345 REAL_VALUE_TYPE value;
2346 tree t;
2348 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2349 t = build_real (type, value);
2351 /* If converting an infinity or NAN to a representation that doesn't
2352 have one, set the overflow bit so that we can produce some kind of
2353 error message at the appropriate point if necessary. It's not the
2354 most user-friendly message, but it's better than nothing. */
2355 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2356 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2357 TREE_OVERFLOW (t) = 1;
2358 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2359 && !MODE_HAS_NANS (TYPE_MODE (type)))
2360 TREE_OVERFLOW (t) = 1;
2361 /* Regular overflow, conversion produced an infinity in a mode that
2362 can't represent them. */
2363 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2364 && REAL_VALUE_ISINF (value)
2365 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2366 TREE_OVERFLOW (t) = 1;
2367 else
2368 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2369 return t;
2372 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2373 to a floating point type. */
2375 static tree
2376 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2378 REAL_VALUE_TYPE value;
2379 tree t;
2381 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2382 t = build_real (type, value);
2384 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2385 return t;
2388 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2389 to another fixed-point type. */
2391 static tree
2392 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2394 FIXED_VALUE_TYPE value;
2395 tree t;
2396 bool overflow_p;
2398 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2399 TYPE_SATURATING (type));
2400 t = build_fixed (type, value);
2402 /* Propagate overflow flags. */
2403 if (overflow_p | TREE_OVERFLOW (arg1))
2404 TREE_OVERFLOW (t) = 1;
2405 return t;
2408 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2409 to a fixed-point type. */
2411 static tree
2412 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2414 FIXED_VALUE_TYPE value;
2415 tree t;
2416 bool overflow_p;
2418 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2419 TREE_INT_CST (arg1),
2420 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2421 TYPE_SATURATING (type));
2422 t = build_fixed (type, value);
2424 /* Propagate overflow flags. */
2425 if (overflow_p | TREE_OVERFLOW (arg1))
2426 TREE_OVERFLOW (t) = 1;
2427 return t;
2430 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2431 to a fixed-point type. */
2433 static tree
2434 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2436 FIXED_VALUE_TYPE value;
2437 tree t;
2438 bool overflow_p;
2440 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2441 &TREE_REAL_CST (arg1),
2442 TYPE_SATURATING (type));
2443 t = build_fixed (type, value);
2445 /* Propagate overflow flags. */
2446 if (overflow_p | TREE_OVERFLOW (arg1))
2447 TREE_OVERFLOW (t) = 1;
2448 return t;
2451 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2452 type TYPE. If no simplification can be done return NULL_TREE. */
2454 static tree
2455 fold_convert_const (enum tree_code code, tree type, tree arg1)
2457 if (TREE_TYPE (arg1) == type)
2458 return arg1;
2460 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2461 || TREE_CODE (type) == OFFSET_TYPE)
2463 if (TREE_CODE (arg1) == INTEGER_CST)
2464 return fold_convert_const_int_from_int (type, arg1);
2465 else if (TREE_CODE (arg1) == REAL_CST)
2466 return fold_convert_const_int_from_real (code, type, arg1);
2467 else if (TREE_CODE (arg1) == FIXED_CST)
2468 return fold_convert_const_int_from_fixed (type, arg1);
2470 else if (TREE_CODE (type) == REAL_TYPE)
2472 if (TREE_CODE (arg1) == INTEGER_CST)
2473 return build_real_from_int_cst (type, arg1);
2474 else if (TREE_CODE (arg1) == REAL_CST)
2475 return fold_convert_const_real_from_real (type, arg1);
2476 else if (TREE_CODE (arg1) == FIXED_CST)
2477 return fold_convert_const_real_from_fixed (type, arg1);
2479 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2481 if (TREE_CODE (arg1) == FIXED_CST)
2482 return fold_convert_const_fixed_from_fixed (type, arg1);
2483 else if (TREE_CODE (arg1) == INTEGER_CST)
2484 return fold_convert_const_fixed_from_int (type, arg1);
2485 else if (TREE_CODE (arg1) == REAL_CST)
2486 return fold_convert_const_fixed_from_real (type, arg1);
2488 return NULL_TREE;
2491 /* Construct a vector of zero elements of vector type TYPE. */
2493 static tree
2494 build_zero_vector (tree type)
2496 tree elem, list;
2497 int i, units;
2499 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2500 units = TYPE_VECTOR_SUBPARTS (type);
2502 list = NULL_TREE;
2503 for (i = 0; i < units; i++)
2504 list = tree_cons (NULL_TREE, elem, list);
2505 return build_vector (type, list);
2508 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2510 bool
2511 fold_convertible_p (const_tree type, const_tree arg)
2513 tree orig = TREE_TYPE (arg);
2515 if (type == orig)
2516 return true;
2518 if (TREE_CODE (arg) == ERROR_MARK
2519 || TREE_CODE (type) == ERROR_MARK
2520 || TREE_CODE (orig) == ERROR_MARK)
2521 return false;
2523 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2524 return true;
2526 switch (TREE_CODE (type))
2528 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2529 case POINTER_TYPE: case REFERENCE_TYPE:
2530 case OFFSET_TYPE:
2531 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2532 || TREE_CODE (orig) == OFFSET_TYPE)
2533 return true;
2534 return (TREE_CODE (orig) == VECTOR_TYPE
2535 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2537 case REAL_TYPE:
2538 case FIXED_POINT_TYPE:
2539 case COMPLEX_TYPE:
2540 case VECTOR_TYPE:
2541 case VOID_TYPE:
2542 return TREE_CODE (type) == TREE_CODE (orig);
2544 default:
2545 return false;
2549 /* Convert expression ARG to type TYPE. Used by the middle-end for
2550 simple conversions in preference to calling the front-end's convert. */
2552 tree
2553 fold_convert_loc (location_t loc, tree type, tree arg)
2555 tree orig = TREE_TYPE (arg);
2556 tree tem;
2558 if (type == orig)
2559 return arg;
2561 if (TREE_CODE (arg) == ERROR_MARK
2562 || TREE_CODE (type) == ERROR_MARK
2563 || TREE_CODE (orig) == ERROR_MARK)
2564 return error_mark_node;
2566 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2567 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2569 switch (TREE_CODE (type))
2571 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2572 case POINTER_TYPE: case REFERENCE_TYPE:
2573 case OFFSET_TYPE:
2574 if (TREE_CODE (arg) == INTEGER_CST)
2576 tem = fold_convert_const (NOP_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2578 return tem;
2580 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2581 || TREE_CODE (orig) == OFFSET_TYPE)
2582 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2583 if (TREE_CODE (orig) == COMPLEX_TYPE)
2584 return fold_convert_loc (loc, type,
2585 fold_build1_loc (loc, REALPART_EXPR,
2586 TREE_TYPE (orig), arg));
2587 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2588 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2589 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2591 case REAL_TYPE:
2592 if (TREE_CODE (arg) == INTEGER_CST)
2594 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2595 if (tem != NULL_TREE)
2596 return tem;
2598 else if (TREE_CODE (arg) == REAL_CST)
2600 tem = fold_convert_const (NOP_EXPR, type, arg);
2601 if (tem != NULL_TREE)
2602 return tem;
2604 else if (TREE_CODE (arg) == FIXED_CST)
2606 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2607 if (tem != NULL_TREE)
2608 return tem;
2611 switch (TREE_CODE (orig))
2613 case INTEGER_TYPE:
2614 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2615 case POINTER_TYPE: case REFERENCE_TYPE:
2616 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2618 case REAL_TYPE:
2619 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2621 case FIXED_POINT_TYPE:
2622 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2624 case COMPLEX_TYPE:
2625 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2626 return fold_convert_loc (loc, type, tem);
2628 default:
2629 gcc_unreachable ();
2632 case FIXED_POINT_TYPE:
2633 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2634 || TREE_CODE (arg) == REAL_CST)
2636 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2637 if (tem != NULL_TREE)
2638 goto fold_convert_exit;
2641 switch (TREE_CODE (orig))
2643 case FIXED_POINT_TYPE:
2644 case INTEGER_TYPE:
2645 case ENUMERAL_TYPE:
2646 case BOOLEAN_TYPE:
2647 case REAL_TYPE:
2648 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2650 case COMPLEX_TYPE:
2651 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2652 return fold_convert_loc (loc, type, tem);
2654 default:
2655 gcc_unreachable ();
2658 case COMPLEX_TYPE:
2659 switch (TREE_CODE (orig))
2661 case INTEGER_TYPE:
2662 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2663 case POINTER_TYPE: case REFERENCE_TYPE:
2664 case REAL_TYPE:
2665 case FIXED_POINT_TYPE:
2666 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2667 fold_convert_loc (loc, TREE_TYPE (type), arg),
2668 fold_convert_loc (loc, TREE_TYPE (type),
2669 integer_zero_node));
2670 case COMPLEX_TYPE:
2672 tree rpart, ipart;
2674 if (TREE_CODE (arg) == COMPLEX_EXPR)
2676 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2677 TREE_OPERAND (arg, 0));
2678 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2679 TREE_OPERAND (arg, 1));
2680 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2683 arg = save_expr (arg);
2684 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2685 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2686 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2687 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2688 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2691 default:
2692 gcc_unreachable ();
2695 case VECTOR_TYPE:
2696 if (integer_zerop (arg))
2697 return build_zero_vector (type);
2698 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2699 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2700 || TREE_CODE (orig) == VECTOR_TYPE);
2701 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2703 case VOID_TYPE:
2704 tem = fold_ignored_result (arg);
2705 if (TREE_CODE (tem) == MODIFY_EXPR)
2706 goto fold_convert_exit;
2707 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2709 default:
2710 gcc_unreachable ();
2712 fold_convert_exit:
2713 protected_set_expr_location (tem, loc);
2714 return tem;
2717 /* Return false if expr can be assumed not to be an lvalue, true
2718 otherwise. */
2720 static bool
2721 maybe_lvalue_p (const_tree x)
2723 /* We only need to wrap lvalue tree codes. */
2724 switch (TREE_CODE (x))
2726 case VAR_DECL:
2727 case PARM_DECL:
2728 case RESULT_DECL:
2729 case LABEL_DECL:
2730 case FUNCTION_DECL:
2731 case SSA_NAME:
2733 case COMPONENT_REF:
2734 case INDIRECT_REF:
2735 case ALIGN_INDIRECT_REF:
2736 case MISALIGNED_INDIRECT_REF:
2737 case ARRAY_REF:
2738 case ARRAY_RANGE_REF:
2739 case BIT_FIELD_REF:
2740 case OBJ_TYPE_REF:
2742 case REALPART_EXPR:
2743 case IMAGPART_EXPR:
2744 case PREINCREMENT_EXPR:
2745 case PREDECREMENT_EXPR:
2746 case SAVE_EXPR:
2747 case TRY_CATCH_EXPR:
2748 case WITH_CLEANUP_EXPR:
2749 case COMPOUND_EXPR:
2750 case MODIFY_EXPR:
2751 case TARGET_EXPR:
2752 case COND_EXPR:
2753 case BIND_EXPR:
2754 case MIN_EXPR:
2755 case MAX_EXPR:
2756 break;
2758 default:
2759 /* Assume the worst for front-end tree codes. */
2760 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2761 break;
2762 return false;
2765 return true;
2768 /* Return an expr equal to X but certainly not valid as an lvalue. */
2770 tree
2771 non_lvalue_loc (location_t loc, tree x)
2773 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2774 us. */
2775 if (in_gimple_form)
2776 return x;
2778 if (! maybe_lvalue_p (x))
2779 return x;
2780 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2781 SET_EXPR_LOCATION (x, loc);
2782 return x;
2785 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2786 Zero means allow extended lvalues. */
2788 int pedantic_lvalues;
2790 /* When pedantic, return an expr equal to X but certainly not valid as a
2791 pedantic lvalue. Otherwise, return X. */
2793 static tree
2794 pedantic_non_lvalue_loc (location_t loc, tree x)
2796 if (pedantic_lvalues)
2797 return non_lvalue_loc (loc, x);
2798 protected_set_expr_location (x, loc);
2799 return x;
2802 /* Given a tree comparison code, return the code that is the logical inverse
2803 of the given code. It is not safe to do this for floating-point
2804 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2805 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2807 enum tree_code
2808 invert_tree_comparison (enum tree_code code, bool honor_nans)
2810 if (honor_nans && flag_trapping_math)
2811 return ERROR_MARK;
2813 switch (code)
2815 case EQ_EXPR:
2816 return NE_EXPR;
2817 case NE_EXPR:
2818 return EQ_EXPR;
2819 case GT_EXPR:
2820 return honor_nans ? UNLE_EXPR : LE_EXPR;
2821 case GE_EXPR:
2822 return honor_nans ? UNLT_EXPR : LT_EXPR;
2823 case LT_EXPR:
2824 return honor_nans ? UNGE_EXPR : GE_EXPR;
2825 case LE_EXPR:
2826 return honor_nans ? UNGT_EXPR : GT_EXPR;
2827 case LTGT_EXPR:
2828 return UNEQ_EXPR;
2829 case UNEQ_EXPR:
2830 return LTGT_EXPR;
2831 case UNGT_EXPR:
2832 return LE_EXPR;
2833 case UNGE_EXPR:
2834 return LT_EXPR;
2835 case UNLT_EXPR:
2836 return GE_EXPR;
2837 case UNLE_EXPR:
2838 return GT_EXPR;
2839 case ORDERED_EXPR:
2840 return UNORDERED_EXPR;
2841 case UNORDERED_EXPR:
2842 return ORDERED_EXPR;
2843 default:
2844 gcc_unreachable ();
2848 /* Similar, but return the comparison that results if the operands are
2849 swapped. This is safe for floating-point. */
2851 enum tree_code
2852 swap_tree_comparison (enum tree_code code)
2854 switch (code)
2856 case EQ_EXPR:
2857 case NE_EXPR:
2858 case ORDERED_EXPR:
2859 case UNORDERED_EXPR:
2860 case LTGT_EXPR:
2861 case UNEQ_EXPR:
2862 return code;
2863 case GT_EXPR:
2864 return LT_EXPR;
2865 case GE_EXPR:
2866 return LE_EXPR;
2867 case LT_EXPR:
2868 return GT_EXPR;
2869 case LE_EXPR:
2870 return GE_EXPR;
2871 case UNGT_EXPR:
2872 return UNLT_EXPR;
2873 case UNGE_EXPR:
2874 return UNLE_EXPR;
2875 case UNLT_EXPR:
2876 return UNGT_EXPR;
2877 case UNLE_EXPR:
2878 return UNGE_EXPR;
2879 default:
2880 gcc_unreachable ();
2885 /* Convert a comparison tree code from an enum tree_code representation
2886 into a compcode bit-based encoding. This function is the inverse of
2887 compcode_to_comparison. */
2889 static enum comparison_code
2890 comparison_to_compcode (enum tree_code code)
2892 switch (code)
2894 case LT_EXPR:
2895 return COMPCODE_LT;
2896 case EQ_EXPR:
2897 return COMPCODE_EQ;
2898 case LE_EXPR:
2899 return COMPCODE_LE;
2900 case GT_EXPR:
2901 return COMPCODE_GT;
2902 case NE_EXPR:
2903 return COMPCODE_NE;
2904 case GE_EXPR:
2905 return COMPCODE_GE;
2906 case ORDERED_EXPR:
2907 return COMPCODE_ORD;
2908 case UNORDERED_EXPR:
2909 return COMPCODE_UNORD;
2910 case UNLT_EXPR:
2911 return COMPCODE_UNLT;
2912 case UNEQ_EXPR:
2913 return COMPCODE_UNEQ;
2914 case UNLE_EXPR:
2915 return COMPCODE_UNLE;
2916 case UNGT_EXPR:
2917 return COMPCODE_UNGT;
2918 case LTGT_EXPR:
2919 return COMPCODE_LTGT;
2920 case UNGE_EXPR:
2921 return COMPCODE_UNGE;
2922 default:
2923 gcc_unreachable ();
2927 /* Convert a compcode bit-based encoding of a comparison operator back
2928 to GCC's enum tree_code representation. This function is the
2929 inverse of comparison_to_compcode. */
2931 static enum tree_code
2932 compcode_to_comparison (enum comparison_code code)
2934 switch (code)
2936 case COMPCODE_LT:
2937 return LT_EXPR;
2938 case COMPCODE_EQ:
2939 return EQ_EXPR;
2940 case COMPCODE_LE:
2941 return LE_EXPR;
2942 case COMPCODE_GT:
2943 return GT_EXPR;
2944 case COMPCODE_NE:
2945 return NE_EXPR;
2946 case COMPCODE_GE:
2947 return GE_EXPR;
2948 case COMPCODE_ORD:
2949 return ORDERED_EXPR;
2950 case COMPCODE_UNORD:
2951 return UNORDERED_EXPR;
2952 case COMPCODE_UNLT:
2953 return UNLT_EXPR;
2954 case COMPCODE_UNEQ:
2955 return UNEQ_EXPR;
2956 case COMPCODE_UNLE:
2957 return UNLE_EXPR;
2958 case COMPCODE_UNGT:
2959 return UNGT_EXPR;
2960 case COMPCODE_LTGT:
2961 return LTGT_EXPR;
2962 case COMPCODE_UNGE:
2963 return UNGE_EXPR;
2964 default:
2965 gcc_unreachable ();
2969 /* Return a tree for the comparison which is the combination of
2970 doing the AND or OR (depending on CODE) of the two operations LCODE
2971 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2972 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2973 if this makes the transformation invalid. */
2975 tree
2976 combine_comparisons (location_t loc,
2977 enum tree_code code, enum tree_code lcode,
2978 enum tree_code rcode, tree truth_type,
2979 tree ll_arg, tree lr_arg)
2981 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2982 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2983 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2984 int compcode;
2986 switch (code)
2988 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2989 compcode = lcompcode & rcompcode;
2990 break;
2992 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2993 compcode = lcompcode | rcompcode;
2994 break;
2996 default:
2997 return NULL_TREE;
3000 if (!honor_nans)
3002 /* Eliminate unordered comparisons, as well as LTGT and ORD
3003 which are not used unless the mode has NaNs. */
3004 compcode &= ~COMPCODE_UNORD;
3005 if (compcode == COMPCODE_LTGT)
3006 compcode = COMPCODE_NE;
3007 else if (compcode == COMPCODE_ORD)
3008 compcode = COMPCODE_TRUE;
3010 else if (flag_trapping_math)
3012 /* Check that the original operation and the optimized ones will trap
3013 under the same condition. */
3014 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3015 && (lcompcode != COMPCODE_EQ)
3016 && (lcompcode != COMPCODE_ORD);
3017 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3018 && (rcompcode != COMPCODE_EQ)
3019 && (rcompcode != COMPCODE_ORD);
3020 bool trap = (compcode & COMPCODE_UNORD) == 0
3021 && (compcode != COMPCODE_EQ)
3022 && (compcode != COMPCODE_ORD);
3024 /* In a short-circuited boolean expression the LHS might be
3025 such that the RHS, if evaluated, will never trap. For
3026 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3027 if neither x nor y is NaN. (This is a mixed blessing: for
3028 example, the expression above will never trap, hence
3029 optimizing it to x < y would be invalid). */
3030 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3031 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3032 rtrap = false;
3034 /* If the comparison was short-circuited, and only the RHS
3035 trapped, we may now generate a spurious trap. */
3036 if (rtrap && !ltrap
3037 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3038 return NULL_TREE;
3040 /* If we changed the conditions that cause a trap, we lose. */
3041 if ((ltrap || rtrap) != trap)
3042 return NULL_TREE;
3045 if (compcode == COMPCODE_TRUE)
3046 return constant_boolean_node (true, truth_type);
3047 else if (compcode == COMPCODE_FALSE)
3048 return constant_boolean_node (false, truth_type);
3049 else
3051 enum tree_code tcode;
3053 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3054 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3058 /* Return nonzero if two operands (typically of the same tree node)
3059 are necessarily equal. If either argument has side-effects this
3060 function returns zero. FLAGS modifies behavior as follows:
3062 If OEP_ONLY_CONST is set, only return nonzero for constants.
3063 This function tests whether the operands are indistinguishable;
3064 it does not test whether they are equal using C's == operation.
3065 The distinction is important for IEEE floating point, because
3066 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3067 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3069 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3070 even though it may hold multiple values during a function.
3071 This is because a GCC tree node guarantees that nothing else is
3072 executed between the evaluation of its "operands" (which may often
3073 be evaluated in arbitrary order). Hence if the operands themselves
3074 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3075 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3076 unset means assuming isochronic (or instantaneous) tree equivalence.
3077 Unless comparing arbitrary expression trees, such as from different
3078 statements, this flag can usually be left unset.
3080 If OEP_PURE_SAME is set, then pure functions with identical arguments
3081 are considered the same. It is used when the caller has other ways
3082 to ensure that global memory is unchanged in between. */
3085 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3087 /* If either is ERROR_MARK, they aren't equal. */
3088 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3089 return 0;
3091 /* Check equality of integer constants before bailing out due to
3092 precision differences. */
3093 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3094 return tree_int_cst_equal (arg0, arg1);
3096 /* If both types don't have the same signedness, then we can't consider
3097 them equal. We must check this before the STRIP_NOPS calls
3098 because they may change the signedness of the arguments. As pointers
3099 strictly don't have a signedness, require either two pointers or
3100 two non-pointers as well. */
3101 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3102 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3103 return 0;
3105 /* If both types don't have the same precision, then it is not safe
3106 to strip NOPs. */
3107 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3108 return 0;
3110 STRIP_NOPS (arg0);
3111 STRIP_NOPS (arg1);
3113 /* In case both args are comparisons but with different comparison
3114 code, try to swap the comparison operands of one arg to produce
3115 a match and compare that variant. */
3116 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3117 && COMPARISON_CLASS_P (arg0)
3118 && COMPARISON_CLASS_P (arg1))
3120 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3122 if (TREE_CODE (arg0) == swap_code)
3123 return operand_equal_p (TREE_OPERAND (arg0, 0),
3124 TREE_OPERAND (arg1, 1), flags)
3125 && operand_equal_p (TREE_OPERAND (arg0, 1),
3126 TREE_OPERAND (arg1, 0), flags);
3129 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3130 /* This is needed for conversions and for COMPONENT_REF.
3131 Might as well play it safe and always test this. */
3132 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3133 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3134 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3135 return 0;
3137 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3138 We don't care about side effects in that case because the SAVE_EXPR
3139 takes care of that for us. In all other cases, two expressions are
3140 equal if they have no side effects. If we have two identical
3141 expressions with side effects that should be treated the same due
3142 to the only side effects being identical SAVE_EXPR's, that will
3143 be detected in the recursive calls below. */
3144 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3145 && (TREE_CODE (arg0) == SAVE_EXPR
3146 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3147 return 1;
3149 /* Next handle constant cases, those for which we can return 1 even
3150 if ONLY_CONST is set. */
3151 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3152 switch (TREE_CODE (arg0))
3154 case INTEGER_CST:
3155 return tree_int_cst_equal (arg0, arg1);
3157 case FIXED_CST:
3158 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3159 TREE_FIXED_CST (arg1));
3161 case REAL_CST:
3162 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3163 TREE_REAL_CST (arg1)))
3164 return 1;
3167 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3169 /* If we do not distinguish between signed and unsigned zero,
3170 consider them equal. */
3171 if (real_zerop (arg0) && real_zerop (arg1))
3172 return 1;
3174 return 0;
3176 case VECTOR_CST:
3178 tree v1, v2;
3180 v1 = TREE_VECTOR_CST_ELTS (arg0);
3181 v2 = TREE_VECTOR_CST_ELTS (arg1);
3182 while (v1 && v2)
3184 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3185 flags))
3186 return 0;
3187 v1 = TREE_CHAIN (v1);
3188 v2 = TREE_CHAIN (v2);
3191 return v1 == v2;
3194 case COMPLEX_CST:
3195 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3196 flags)
3197 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3198 flags));
3200 case STRING_CST:
3201 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3202 && ! memcmp (TREE_STRING_POINTER (arg0),
3203 TREE_STRING_POINTER (arg1),
3204 TREE_STRING_LENGTH (arg0)));
3206 case ADDR_EXPR:
3207 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3209 default:
3210 break;
3213 if (flags & OEP_ONLY_CONST)
3214 return 0;
3216 /* Define macros to test an operand from arg0 and arg1 for equality and a
3217 variant that allows null and views null as being different from any
3218 non-null value. In the latter case, if either is null, the both
3219 must be; otherwise, do the normal comparison. */
3220 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3221 TREE_OPERAND (arg1, N), flags)
3223 #define OP_SAME_WITH_NULL(N) \
3224 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3225 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3227 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3229 case tcc_unary:
3230 /* Two conversions are equal only if signedness and modes match. */
3231 switch (TREE_CODE (arg0))
3233 CASE_CONVERT:
3234 case FIX_TRUNC_EXPR:
3235 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3236 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3237 return 0;
3238 break;
3239 default:
3240 break;
3243 return OP_SAME (0);
3246 case tcc_comparison:
3247 case tcc_binary:
3248 if (OP_SAME (0) && OP_SAME (1))
3249 return 1;
3251 /* For commutative ops, allow the other order. */
3252 return (commutative_tree_code (TREE_CODE (arg0))
3253 && operand_equal_p (TREE_OPERAND (arg0, 0),
3254 TREE_OPERAND (arg1, 1), flags)
3255 && operand_equal_p (TREE_OPERAND (arg0, 1),
3256 TREE_OPERAND (arg1, 0), flags));
3258 case tcc_reference:
3259 /* If either of the pointer (or reference) expressions we are
3260 dereferencing contain a side effect, these cannot be equal. */
3261 if (TREE_SIDE_EFFECTS (arg0)
3262 || TREE_SIDE_EFFECTS (arg1))
3263 return 0;
3265 switch (TREE_CODE (arg0))
3267 case INDIRECT_REF:
3268 case ALIGN_INDIRECT_REF:
3269 case MISALIGNED_INDIRECT_REF:
3270 case REALPART_EXPR:
3271 case IMAGPART_EXPR:
3272 return OP_SAME (0);
3274 case ARRAY_REF:
3275 case ARRAY_RANGE_REF:
3276 /* Operands 2 and 3 may be null.
3277 Compare the array index by value if it is constant first as we
3278 may have different types but same value here. */
3279 return (OP_SAME (0)
3280 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3281 TREE_OPERAND (arg1, 1))
3282 || OP_SAME (1))
3283 && OP_SAME_WITH_NULL (2)
3284 && OP_SAME_WITH_NULL (3));
3286 case COMPONENT_REF:
3287 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3288 may be NULL when we're called to compare MEM_EXPRs. */
3289 return OP_SAME_WITH_NULL (0)
3290 && OP_SAME (1)
3291 && OP_SAME_WITH_NULL (2);
3293 case BIT_FIELD_REF:
3294 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3296 default:
3297 return 0;
3300 case tcc_expression:
3301 switch (TREE_CODE (arg0))
3303 case ADDR_EXPR:
3304 case TRUTH_NOT_EXPR:
3305 return OP_SAME (0);
3307 case TRUTH_ANDIF_EXPR:
3308 case TRUTH_ORIF_EXPR:
3309 return OP_SAME (0) && OP_SAME (1);
3311 case TRUTH_AND_EXPR:
3312 case TRUTH_OR_EXPR:
3313 case TRUTH_XOR_EXPR:
3314 if (OP_SAME (0) && OP_SAME (1))
3315 return 1;
3317 /* Otherwise take into account this is a commutative operation. */
3318 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3319 TREE_OPERAND (arg1, 1), flags)
3320 && operand_equal_p (TREE_OPERAND (arg0, 1),
3321 TREE_OPERAND (arg1, 0), flags));
3323 case COND_EXPR:
3324 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3326 default:
3327 return 0;
3330 case tcc_vl_exp:
3331 switch (TREE_CODE (arg0))
3333 case CALL_EXPR:
3334 /* If the CALL_EXPRs call different functions, then they
3335 clearly can not be equal. */
3336 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3337 flags))
3338 return 0;
3341 unsigned int cef = call_expr_flags (arg0);
3342 if (flags & OEP_PURE_SAME)
3343 cef &= ECF_CONST | ECF_PURE;
3344 else
3345 cef &= ECF_CONST;
3346 if (!cef)
3347 return 0;
3350 /* Now see if all the arguments are the same. */
3352 const_call_expr_arg_iterator iter0, iter1;
3353 const_tree a0, a1;
3354 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3355 a1 = first_const_call_expr_arg (arg1, &iter1);
3356 a0 && a1;
3357 a0 = next_const_call_expr_arg (&iter0),
3358 a1 = next_const_call_expr_arg (&iter1))
3359 if (! operand_equal_p (a0, a1, flags))
3360 return 0;
3362 /* If we get here and both argument lists are exhausted
3363 then the CALL_EXPRs are equal. */
3364 return ! (a0 || a1);
3366 default:
3367 return 0;
3370 case tcc_declaration:
3371 /* Consider __builtin_sqrt equal to sqrt. */
3372 return (TREE_CODE (arg0) == FUNCTION_DECL
3373 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3374 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3375 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3377 default:
3378 return 0;
3381 #undef OP_SAME
3382 #undef OP_SAME_WITH_NULL
3385 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3386 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3388 When in doubt, return 0. */
3390 static int
3391 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3393 int unsignedp1, unsignedpo;
3394 tree primarg0, primarg1, primother;
3395 unsigned int correct_width;
3397 if (operand_equal_p (arg0, arg1, 0))
3398 return 1;
3400 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3401 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3402 return 0;
3404 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3405 and see if the inner values are the same. This removes any
3406 signedness comparison, which doesn't matter here. */
3407 primarg0 = arg0, primarg1 = arg1;
3408 STRIP_NOPS (primarg0);
3409 STRIP_NOPS (primarg1);
3410 if (operand_equal_p (primarg0, primarg1, 0))
3411 return 1;
3413 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3414 actual comparison operand, ARG0.
3416 First throw away any conversions to wider types
3417 already present in the operands. */
3419 primarg1 = get_narrower (arg1, &unsignedp1);
3420 primother = get_narrower (other, &unsignedpo);
3422 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3423 if (unsignedp1 == unsignedpo
3424 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3425 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3427 tree type = TREE_TYPE (arg0);
3429 /* Make sure shorter operand is extended the right way
3430 to match the longer operand. */
3431 primarg1 = fold_convert (signed_or_unsigned_type_for
3432 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3434 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3435 return 1;
3438 return 0;
3441 /* See if ARG is an expression that is either a comparison or is performing
3442 arithmetic on comparisons. The comparisons must only be comparing
3443 two different values, which will be stored in *CVAL1 and *CVAL2; if
3444 they are nonzero it means that some operands have already been found.
3445 No variables may be used anywhere else in the expression except in the
3446 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3447 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3449 If this is true, return 1. Otherwise, return zero. */
3451 static int
3452 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3454 enum tree_code code = TREE_CODE (arg);
3455 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3457 /* We can handle some of the tcc_expression cases here. */
3458 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3459 tclass = tcc_unary;
3460 else if (tclass == tcc_expression
3461 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3462 || code == COMPOUND_EXPR))
3463 tclass = tcc_binary;
3465 else if (tclass == tcc_expression && code == SAVE_EXPR
3466 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3468 /* If we've already found a CVAL1 or CVAL2, this expression is
3469 two complex to handle. */
3470 if (*cval1 || *cval2)
3471 return 0;
3473 tclass = tcc_unary;
3474 *save_p = 1;
3477 switch (tclass)
3479 case tcc_unary:
3480 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3482 case tcc_binary:
3483 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3484 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3485 cval1, cval2, save_p));
3487 case tcc_constant:
3488 return 1;
3490 case tcc_expression:
3491 if (code == COND_EXPR)
3492 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3493 cval1, cval2, save_p)
3494 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3495 cval1, cval2, save_p)
3496 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3497 cval1, cval2, save_p));
3498 return 0;
3500 case tcc_comparison:
3501 /* First see if we can handle the first operand, then the second. For
3502 the second operand, we know *CVAL1 can't be zero. It must be that
3503 one side of the comparison is each of the values; test for the
3504 case where this isn't true by failing if the two operands
3505 are the same. */
3507 if (operand_equal_p (TREE_OPERAND (arg, 0),
3508 TREE_OPERAND (arg, 1), 0))
3509 return 0;
3511 if (*cval1 == 0)
3512 *cval1 = TREE_OPERAND (arg, 0);
3513 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3515 else if (*cval2 == 0)
3516 *cval2 = TREE_OPERAND (arg, 0);
3517 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3519 else
3520 return 0;
3522 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3524 else if (*cval2 == 0)
3525 *cval2 = TREE_OPERAND (arg, 1);
3526 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3528 else
3529 return 0;
3531 return 1;
3533 default:
3534 return 0;
3538 /* ARG is a tree that is known to contain just arithmetic operations and
3539 comparisons. Evaluate the operations in the tree substituting NEW0 for
3540 any occurrence of OLD0 as an operand of a comparison and likewise for
3541 NEW1 and OLD1. */
3543 static tree
3544 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3545 tree old1, tree new1)
3547 tree type = TREE_TYPE (arg);
3548 enum tree_code code = TREE_CODE (arg);
3549 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3551 /* We can handle some of the tcc_expression cases here. */
3552 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3553 tclass = tcc_unary;
3554 else if (tclass == tcc_expression
3555 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3556 tclass = tcc_binary;
3558 switch (tclass)
3560 case tcc_unary:
3561 return fold_build1_loc (loc, code, type,
3562 eval_subst (loc, TREE_OPERAND (arg, 0),
3563 old0, new0, old1, new1));
3565 case tcc_binary:
3566 return fold_build2_loc (loc, code, type,
3567 eval_subst (loc, TREE_OPERAND (arg, 0),
3568 old0, new0, old1, new1),
3569 eval_subst (loc, TREE_OPERAND (arg, 1),
3570 old0, new0, old1, new1));
3572 case tcc_expression:
3573 switch (code)
3575 case SAVE_EXPR:
3576 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3577 old1, new1);
3579 case COMPOUND_EXPR:
3580 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3581 old1, new1);
3583 case COND_EXPR:
3584 return fold_build3_loc (loc, code, type,
3585 eval_subst (loc, TREE_OPERAND (arg, 0),
3586 old0, new0, old1, new1),
3587 eval_subst (loc, TREE_OPERAND (arg, 1),
3588 old0, new0, old1, new1),
3589 eval_subst (loc, TREE_OPERAND (arg, 2),
3590 old0, new0, old1, new1));
3591 default:
3592 break;
3594 /* Fall through - ??? */
3596 case tcc_comparison:
3598 tree arg0 = TREE_OPERAND (arg, 0);
3599 tree arg1 = TREE_OPERAND (arg, 1);
3601 /* We need to check both for exact equality and tree equality. The
3602 former will be true if the operand has a side-effect. In that
3603 case, we know the operand occurred exactly once. */
3605 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3606 arg0 = new0;
3607 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3608 arg0 = new1;
3610 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3611 arg1 = new0;
3612 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3613 arg1 = new1;
3615 return fold_build2_loc (loc, code, type, arg0, arg1);
3618 default:
3619 return arg;
3623 /* Return a tree for the case when the result of an expression is RESULT
3624 converted to TYPE and OMITTED was previously an operand of the expression
3625 but is now not needed (e.g., we folded OMITTED * 0).
3627 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3628 the conversion of RESULT to TYPE. */
3630 tree
3631 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3633 tree t = fold_convert_loc (loc, type, result);
3635 /* If the resulting operand is an empty statement, just return the omitted
3636 statement casted to void. */
3637 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3639 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3640 goto omit_one_operand_exit;
3643 if (TREE_SIDE_EFFECTS (omitted))
3645 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3646 goto omit_one_operand_exit;
3649 return non_lvalue_loc (loc, t);
3651 omit_one_operand_exit:
3652 protected_set_expr_location (t, loc);
3653 return t;
3656 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3658 static tree
3659 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3660 tree omitted)
3662 tree t = fold_convert_loc (loc, type, result);
3664 /* If the resulting operand is an empty statement, just return the omitted
3665 statement casted to void. */
3666 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3668 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3669 goto pedantic_omit_one_operand_exit;
3672 if (TREE_SIDE_EFFECTS (omitted))
3674 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3675 goto pedantic_omit_one_operand_exit;
3678 return pedantic_non_lvalue_loc (loc, t);
3680 pedantic_omit_one_operand_exit:
3681 protected_set_expr_location (t, loc);
3682 return t;
3685 /* Return a tree for the case when the result of an expression is RESULT
3686 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3687 of the expression but are now not needed.
3689 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3690 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3691 evaluated before OMITTED2. Otherwise, if neither has side effects,
3692 just do the conversion of RESULT to TYPE. */
3694 tree
3695 omit_two_operands_loc (location_t loc, tree type, tree result,
3696 tree omitted1, tree omitted2)
3698 tree t = fold_convert_loc (loc, type, result);
3700 if (TREE_SIDE_EFFECTS (omitted2))
3702 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3703 SET_EXPR_LOCATION (t, loc);
3705 if (TREE_SIDE_EFFECTS (omitted1))
3707 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3708 SET_EXPR_LOCATION (t, loc);
3711 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3715 /* Return a simplified tree node for the truth-negation of ARG. This
3716 never alters ARG itself. We assume that ARG is an operation that
3717 returns a truth value (0 or 1).
3719 FIXME: one would think we would fold the result, but it causes
3720 problems with the dominator optimizer. */
3722 tree
3723 fold_truth_not_expr (location_t loc, tree arg)
3725 tree t, type = TREE_TYPE (arg);
3726 enum tree_code code = TREE_CODE (arg);
3727 location_t loc1, loc2;
3729 /* If this is a comparison, we can simply invert it, except for
3730 floating-point non-equality comparisons, in which case we just
3731 enclose a TRUTH_NOT_EXPR around what we have. */
3733 if (TREE_CODE_CLASS (code) == tcc_comparison)
3735 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3736 if (FLOAT_TYPE_P (op_type)
3737 && flag_trapping_math
3738 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3739 && code != NE_EXPR && code != EQ_EXPR)
3740 return NULL_TREE;
3742 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3743 if (code == ERROR_MARK)
3744 return NULL_TREE;
3746 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3747 SET_EXPR_LOCATION (t, loc);
3748 return t;
3751 switch (code)
3753 case INTEGER_CST:
3754 return constant_boolean_node (integer_zerop (arg), type);
3756 case TRUTH_AND_EXPR:
3757 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3758 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3759 if (loc1 == UNKNOWN_LOCATION)
3760 loc1 = loc;
3761 if (loc2 == UNKNOWN_LOCATION)
3762 loc2 = loc;
3763 t = build2 (TRUTH_OR_EXPR, type,
3764 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3765 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3766 break;
3768 case TRUTH_OR_EXPR:
3769 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3770 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3771 if (loc1 == UNKNOWN_LOCATION)
3772 loc1 = loc;
3773 if (loc2 == UNKNOWN_LOCATION)
3774 loc2 = loc;
3775 t = build2 (TRUTH_AND_EXPR, type,
3776 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3777 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3778 break;
3780 case TRUTH_XOR_EXPR:
3781 /* Here we can invert either operand. We invert the first operand
3782 unless the second operand is a TRUTH_NOT_EXPR in which case our
3783 result is the XOR of the first operand with the inside of the
3784 negation of the second operand. */
3786 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3787 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3788 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3789 else
3790 t = build2 (TRUTH_XOR_EXPR, type,
3791 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3792 TREE_OPERAND (arg, 1));
3793 break;
3795 case TRUTH_ANDIF_EXPR:
3796 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3797 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3798 if (loc1 == UNKNOWN_LOCATION)
3799 loc1 = loc;
3800 if (loc2 == UNKNOWN_LOCATION)
3801 loc2 = loc;
3802 t = build2 (TRUTH_ORIF_EXPR, type,
3803 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3804 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3805 break;
3807 case TRUTH_ORIF_EXPR:
3808 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3809 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3810 if (loc1 == UNKNOWN_LOCATION)
3811 loc1 = loc;
3812 if (loc2 == UNKNOWN_LOCATION)
3813 loc2 = loc;
3814 t = build2 (TRUTH_ANDIF_EXPR, type,
3815 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3816 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3817 break;
3819 case TRUTH_NOT_EXPR:
3820 return TREE_OPERAND (arg, 0);
3822 case COND_EXPR:
3824 tree arg1 = TREE_OPERAND (arg, 1);
3825 tree arg2 = TREE_OPERAND (arg, 2);
3827 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3828 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3829 if (loc1 == UNKNOWN_LOCATION)
3830 loc1 = loc;
3831 if (loc2 == UNKNOWN_LOCATION)
3832 loc2 = loc;
3834 /* A COND_EXPR may have a throw as one operand, which
3835 then has void type. Just leave void operands
3836 as they are. */
3837 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3838 VOID_TYPE_P (TREE_TYPE (arg1))
3839 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3840 VOID_TYPE_P (TREE_TYPE (arg2))
3841 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3842 break;
3845 case COMPOUND_EXPR:
3846 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3847 if (loc1 == UNKNOWN_LOCATION)
3848 loc1 = loc;
3849 t = build2 (COMPOUND_EXPR, type,
3850 TREE_OPERAND (arg, 0),
3851 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3852 break;
3854 case NON_LVALUE_EXPR:
3855 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3856 if (loc1 == UNKNOWN_LOCATION)
3857 loc1 = loc;
3858 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3860 CASE_CONVERT:
3861 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3863 t = build1 (TRUTH_NOT_EXPR, type, arg);
3864 break;
3867 /* ... fall through ... */
3869 case FLOAT_EXPR:
3870 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3871 if (loc1 == UNKNOWN_LOCATION)
3872 loc1 = loc;
3873 t = build1 (TREE_CODE (arg), type,
3874 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3875 break;
3877 case BIT_AND_EXPR:
3878 if (!integer_onep (TREE_OPERAND (arg, 1)))
3879 return NULL_TREE;
3880 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3881 break;
3883 case SAVE_EXPR:
3884 t = build1 (TRUTH_NOT_EXPR, type, arg);
3885 break;
3887 case CLEANUP_POINT_EXPR:
3888 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3889 if (loc1 == UNKNOWN_LOCATION)
3890 loc1 = loc;
3891 t = build1 (CLEANUP_POINT_EXPR, type,
3892 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3893 break;
3895 default:
3896 t = NULL_TREE;
3897 break;
3900 if (t)
3901 SET_EXPR_LOCATION (t, loc);
3903 return t;
3906 /* Return a simplified tree node for the truth-negation of ARG. This
3907 never alters ARG itself. We assume that ARG is an operation that
3908 returns a truth value (0 or 1).
3910 FIXME: one would think we would fold the result, but it causes
3911 problems with the dominator optimizer. */
3913 tree
3914 invert_truthvalue_loc (location_t loc, tree arg)
3916 tree tem;
3918 if (TREE_CODE (arg) == ERROR_MARK)
3919 return arg;
3921 tem = fold_truth_not_expr (loc, arg);
3922 if (!tem)
3924 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3925 SET_EXPR_LOCATION (tem, loc);
3928 return tem;
3931 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3932 operands are another bit-wise operation with a common input. If so,
3933 distribute the bit operations to save an operation and possibly two if
3934 constants are involved. For example, convert
3935 (A | B) & (A | C) into A | (B & C)
3936 Further simplification will occur if B and C are constants.
3938 If this optimization cannot be done, 0 will be returned. */
3940 static tree
3941 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3942 tree arg0, tree arg1)
3944 tree common;
3945 tree left, right;
3947 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3948 || TREE_CODE (arg0) == code
3949 || (TREE_CODE (arg0) != BIT_AND_EXPR
3950 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3951 return 0;
3953 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3955 common = TREE_OPERAND (arg0, 0);
3956 left = TREE_OPERAND (arg0, 1);
3957 right = TREE_OPERAND (arg1, 1);
3959 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3961 common = TREE_OPERAND (arg0, 0);
3962 left = TREE_OPERAND (arg0, 1);
3963 right = TREE_OPERAND (arg1, 0);
3965 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3967 common = TREE_OPERAND (arg0, 1);
3968 left = TREE_OPERAND (arg0, 0);
3969 right = TREE_OPERAND (arg1, 1);
3971 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3973 common = TREE_OPERAND (arg0, 1);
3974 left = TREE_OPERAND (arg0, 0);
3975 right = TREE_OPERAND (arg1, 0);
3977 else
3978 return 0;
3980 common = fold_convert_loc (loc, type, common);
3981 left = fold_convert_loc (loc, type, left);
3982 right = fold_convert_loc (loc, type, right);
3983 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3984 fold_build2_loc (loc, code, type, left, right));
3987 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3988 with code CODE. This optimization is unsafe. */
3989 static tree
3990 distribute_real_division (location_t loc, enum tree_code code, tree type,
3991 tree arg0, tree arg1)
3993 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3994 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3996 /* (A / C) +- (B / C) -> (A +- B) / C. */
3997 if (mul0 == mul1
3998 && operand_equal_p (TREE_OPERAND (arg0, 1),
3999 TREE_OPERAND (arg1, 1), 0))
4000 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4001 fold_build2_loc (loc, code, type,
4002 TREE_OPERAND (arg0, 0),
4003 TREE_OPERAND (arg1, 0)),
4004 TREE_OPERAND (arg0, 1));
4006 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4007 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4008 TREE_OPERAND (arg1, 0), 0)
4009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4010 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4012 REAL_VALUE_TYPE r0, r1;
4013 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4014 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4015 if (!mul0)
4016 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4017 if (!mul1)
4018 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4019 real_arithmetic (&r0, code, &r0, &r1);
4020 return fold_build2_loc (loc, MULT_EXPR, type,
4021 TREE_OPERAND (arg0, 0),
4022 build_real (type, r0));
4025 return NULL_TREE;
4028 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4029 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4031 static tree
4032 make_bit_field_ref (location_t loc, tree inner, tree type,
4033 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4035 tree result, bftype;
4037 if (bitpos == 0)
4039 tree size = TYPE_SIZE (TREE_TYPE (inner));
4040 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4041 || POINTER_TYPE_P (TREE_TYPE (inner)))
4042 && host_integerp (size, 0)
4043 && tree_low_cst (size, 0) == bitsize)
4044 return fold_convert_loc (loc, type, inner);
4047 bftype = type;
4048 if (TYPE_PRECISION (bftype) != bitsize
4049 || TYPE_UNSIGNED (bftype) == !unsignedp)
4050 bftype = build_nonstandard_integer_type (bitsize, 0);
4052 result = build3 (BIT_FIELD_REF, bftype, inner,
4053 size_int (bitsize), bitsize_int (bitpos));
4054 SET_EXPR_LOCATION (result, loc);
4056 if (bftype != type)
4057 result = fold_convert_loc (loc, type, result);
4059 return result;
4062 /* Optimize a bit-field compare.
4064 There are two cases: First is a compare against a constant and the
4065 second is a comparison of two items where the fields are at the same
4066 bit position relative to the start of a chunk (byte, halfword, word)
4067 large enough to contain it. In these cases we can avoid the shift
4068 implicit in bitfield extractions.
4070 For constants, we emit a compare of the shifted constant with the
4071 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4072 compared. For two fields at the same position, we do the ANDs with the
4073 similar mask and compare the result of the ANDs.
4075 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4076 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4077 are the left and right operands of the comparison, respectively.
4079 If the optimization described above can be done, we return the resulting
4080 tree. Otherwise we return zero. */
4082 static tree
4083 optimize_bit_field_compare (location_t loc, enum tree_code code,
4084 tree compare_type, tree lhs, tree rhs)
4086 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4087 tree type = TREE_TYPE (lhs);
4088 tree signed_type, unsigned_type;
4089 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4090 enum machine_mode lmode, rmode, nmode;
4091 int lunsignedp, runsignedp;
4092 int lvolatilep = 0, rvolatilep = 0;
4093 tree linner, rinner = NULL_TREE;
4094 tree mask;
4095 tree offset;
4097 /* Get all the information about the extractions being done. If the bit size
4098 if the same as the size of the underlying object, we aren't doing an
4099 extraction at all and so can do nothing. We also don't want to
4100 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4101 then will no longer be able to replace it. */
4102 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4103 &lunsignedp, &lvolatilep, false);
4104 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4105 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4106 return 0;
4108 if (!const_p)
4110 /* If this is not a constant, we can only do something if bit positions,
4111 sizes, and signedness are the same. */
4112 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4113 &runsignedp, &rvolatilep, false);
4115 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4116 || lunsignedp != runsignedp || offset != 0
4117 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4118 return 0;
4121 /* See if we can find a mode to refer to this field. We should be able to,
4122 but fail if we can't. */
4123 nmode = get_best_mode (lbitsize, lbitpos,
4124 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4125 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4126 TYPE_ALIGN (TREE_TYPE (rinner))),
4127 word_mode, lvolatilep || rvolatilep);
4128 if (nmode == VOIDmode)
4129 return 0;
4131 /* Set signed and unsigned types of the precision of this mode for the
4132 shifts below. */
4133 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4134 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4136 /* Compute the bit position and size for the new reference and our offset
4137 within it. If the new reference is the same size as the original, we
4138 won't optimize anything, so return zero. */
4139 nbitsize = GET_MODE_BITSIZE (nmode);
4140 nbitpos = lbitpos & ~ (nbitsize - 1);
4141 lbitpos -= nbitpos;
4142 if (nbitsize == lbitsize)
4143 return 0;
4145 if (BYTES_BIG_ENDIAN)
4146 lbitpos = nbitsize - lbitsize - lbitpos;
4148 /* Make the mask to be used against the extracted field. */
4149 mask = build_int_cst_type (unsigned_type, -1);
4150 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4151 mask = const_binop (RSHIFT_EXPR, mask,
4152 size_int (nbitsize - lbitsize - lbitpos), 0);
4154 if (! const_p)
4155 /* If not comparing with constant, just rework the comparison
4156 and return. */
4157 return fold_build2_loc (loc, code, compare_type,
4158 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4159 make_bit_field_ref (loc, linner,
4160 unsigned_type,
4161 nbitsize, nbitpos,
4163 mask),
4164 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4165 make_bit_field_ref (loc, rinner,
4166 unsigned_type,
4167 nbitsize, nbitpos,
4169 mask));
4171 /* Otherwise, we are handling the constant case. See if the constant is too
4172 big for the field. Warn and return a tree of for 0 (false) if so. We do
4173 this not only for its own sake, but to avoid having to test for this
4174 error case below. If we didn't, we might generate wrong code.
4176 For unsigned fields, the constant shifted right by the field length should
4177 be all zero. For signed fields, the high-order bits should agree with
4178 the sign bit. */
4180 if (lunsignedp)
4182 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4183 fold_convert_loc (loc,
4184 unsigned_type, rhs),
4185 size_int (lbitsize), 0)))
4187 warning (0, "comparison is always %d due to width of bit-field",
4188 code == NE_EXPR);
4189 return constant_boolean_node (code == NE_EXPR, compare_type);
4192 else
4194 tree tem = const_binop (RSHIFT_EXPR,
4195 fold_convert_loc (loc, signed_type, rhs),
4196 size_int (lbitsize - 1), 0);
4197 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4199 warning (0, "comparison is always %d due to width of bit-field",
4200 code == NE_EXPR);
4201 return constant_boolean_node (code == NE_EXPR, compare_type);
4205 /* Single-bit compares should always be against zero. */
4206 if (lbitsize == 1 && ! integer_zerop (rhs))
4208 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4209 rhs = build_int_cst (type, 0);
4212 /* Make a new bitfield reference, shift the constant over the
4213 appropriate number of bits and mask it with the computed mask
4214 (in case this was a signed field). If we changed it, make a new one. */
4215 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4216 if (lvolatilep)
4218 TREE_SIDE_EFFECTS (lhs) = 1;
4219 TREE_THIS_VOLATILE (lhs) = 1;
4222 rhs = const_binop (BIT_AND_EXPR,
4223 const_binop (LSHIFT_EXPR,
4224 fold_convert_loc (loc, unsigned_type, rhs),
4225 size_int (lbitpos), 0),
4226 mask, 0);
4228 lhs = build2 (code, compare_type,
4229 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4230 rhs);
4231 SET_EXPR_LOCATION (lhs, loc);
4232 return lhs;
4235 /* Subroutine for fold_truthop: decode a field reference.
4237 If EXP is a comparison reference, we return the innermost reference.
4239 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4240 set to the starting bit number.
4242 If the innermost field can be completely contained in a mode-sized
4243 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4245 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4246 otherwise it is not changed.
4248 *PUNSIGNEDP is set to the signedness of the field.
4250 *PMASK is set to the mask used. This is either contained in a
4251 BIT_AND_EXPR or derived from the width of the field.
4253 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4255 Return 0 if this is not a component reference or is one that we can't
4256 do anything with. */
4258 static tree
4259 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4260 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4261 int *punsignedp, int *pvolatilep,
4262 tree *pmask, tree *pand_mask)
4264 tree outer_type = 0;
4265 tree and_mask = 0;
4266 tree mask, inner, offset;
4267 tree unsigned_type;
4268 unsigned int precision;
4270 /* All the optimizations using this function assume integer fields.
4271 There are problems with FP fields since the type_for_size call
4272 below can fail for, e.g., XFmode. */
4273 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4274 return 0;
4276 /* We are interested in the bare arrangement of bits, so strip everything
4277 that doesn't affect the machine mode. However, record the type of the
4278 outermost expression if it may matter below. */
4279 if (CONVERT_EXPR_P (exp)
4280 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4281 outer_type = TREE_TYPE (exp);
4282 STRIP_NOPS (exp);
4284 if (TREE_CODE (exp) == BIT_AND_EXPR)
4286 and_mask = TREE_OPERAND (exp, 1);
4287 exp = TREE_OPERAND (exp, 0);
4288 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4289 if (TREE_CODE (and_mask) != INTEGER_CST)
4290 return 0;
4293 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4294 punsignedp, pvolatilep, false);
4295 if ((inner == exp && and_mask == 0)
4296 || *pbitsize < 0 || offset != 0
4297 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4298 return 0;
4300 /* If the number of bits in the reference is the same as the bitsize of
4301 the outer type, then the outer type gives the signedness. Otherwise
4302 (in case of a small bitfield) the signedness is unchanged. */
4303 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4304 *punsignedp = TYPE_UNSIGNED (outer_type);
4306 /* Compute the mask to access the bitfield. */
4307 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4308 precision = TYPE_PRECISION (unsigned_type);
4310 mask = build_int_cst_type (unsigned_type, -1);
4312 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4313 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4315 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4316 if (and_mask != 0)
4317 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4318 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4320 *pmask = mask;
4321 *pand_mask = and_mask;
4322 return inner;
4325 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4326 bit positions. */
4328 static int
4329 all_ones_mask_p (const_tree mask, int size)
4331 tree type = TREE_TYPE (mask);
4332 unsigned int precision = TYPE_PRECISION (type);
4333 tree tmask;
4335 tmask = build_int_cst_type (signed_type_for (type), -1);
4337 return
4338 tree_int_cst_equal (mask,
4339 const_binop (RSHIFT_EXPR,
4340 const_binop (LSHIFT_EXPR, tmask,
4341 size_int (precision - size),
4343 size_int (precision - size), 0));
4346 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4347 represents the sign bit of EXP's type. If EXP represents a sign
4348 or zero extension, also test VAL against the unextended type.
4349 The return value is the (sub)expression whose sign bit is VAL,
4350 or NULL_TREE otherwise. */
4352 static tree
4353 sign_bit_p (tree exp, const_tree val)
4355 unsigned HOST_WIDE_INT mask_lo, lo;
4356 HOST_WIDE_INT mask_hi, hi;
4357 int width;
4358 tree t;
4360 /* Tree EXP must have an integral type. */
4361 t = TREE_TYPE (exp);
4362 if (! INTEGRAL_TYPE_P (t))
4363 return NULL_TREE;
4365 /* Tree VAL must be an integer constant. */
4366 if (TREE_CODE (val) != INTEGER_CST
4367 || TREE_OVERFLOW (val))
4368 return NULL_TREE;
4370 width = TYPE_PRECISION (t);
4371 if (width > HOST_BITS_PER_WIDE_INT)
4373 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4374 lo = 0;
4376 mask_hi = ((unsigned HOST_WIDE_INT) -1
4377 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4378 mask_lo = -1;
4380 else
4382 hi = 0;
4383 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4385 mask_hi = 0;
4386 mask_lo = ((unsigned HOST_WIDE_INT) -1
4387 >> (HOST_BITS_PER_WIDE_INT - width));
4390 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4391 treat VAL as if it were unsigned. */
4392 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4393 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4394 return exp;
4396 /* Handle extension from a narrower type. */
4397 if (TREE_CODE (exp) == NOP_EXPR
4398 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4399 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4401 return NULL_TREE;
4404 /* Subroutine for fold_truthop: determine if an operand is simple enough
4405 to be evaluated unconditionally. */
4407 static int
4408 simple_operand_p (const_tree exp)
4410 /* Strip any conversions that don't change the machine mode. */
4411 STRIP_NOPS (exp);
4413 return (CONSTANT_CLASS_P (exp)
4414 || TREE_CODE (exp) == SSA_NAME
4415 || (DECL_P (exp)
4416 && ! TREE_ADDRESSABLE (exp)
4417 && ! TREE_THIS_VOLATILE (exp)
4418 && ! DECL_NONLOCAL (exp)
4419 /* Don't regard global variables as simple. They may be
4420 allocated in ways unknown to the compiler (shared memory,
4421 #pragma weak, etc). */
4422 && ! TREE_PUBLIC (exp)
4423 && ! DECL_EXTERNAL (exp)
4424 /* Loading a static variable is unduly expensive, but global
4425 registers aren't expensive. */
4426 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4429 /* The following functions are subroutines to fold_range_test and allow it to
4430 try to change a logical combination of comparisons into a range test.
4432 For example, both
4433 X == 2 || X == 3 || X == 4 || X == 5
4435 X >= 2 && X <= 5
4436 are converted to
4437 (unsigned) (X - 2) <= 3
4439 We describe each set of comparisons as being either inside or outside
4440 a range, using a variable named like IN_P, and then describe the
4441 range with a lower and upper bound. If one of the bounds is omitted,
4442 it represents either the highest or lowest value of the type.
4444 In the comments below, we represent a range by two numbers in brackets
4445 preceded by a "+" to designate being inside that range, or a "-" to
4446 designate being outside that range, so the condition can be inverted by
4447 flipping the prefix. An omitted bound is represented by a "-". For
4448 example, "- [-, 10]" means being outside the range starting at the lowest
4449 possible value and ending at 10, in other words, being greater than 10.
4450 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4451 always false.
4453 We set up things so that the missing bounds are handled in a consistent
4454 manner so neither a missing bound nor "true" and "false" need to be
4455 handled using a special case. */
4457 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4458 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4459 and UPPER1_P are nonzero if the respective argument is an upper bound
4460 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4461 must be specified for a comparison. ARG1 will be converted to ARG0's
4462 type if both are specified. */
4464 static tree
4465 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4466 tree arg1, int upper1_p)
4468 tree tem;
4469 int result;
4470 int sgn0, sgn1;
4472 /* If neither arg represents infinity, do the normal operation.
4473 Else, if not a comparison, return infinity. Else handle the special
4474 comparison rules. Note that most of the cases below won't occur, but
4475 are handled for consistency. */
4477 if (arg0 != 0 && arg1 != 0)
4479 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4480 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4481 STRIP_NOPS (tem);
4482 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4485 if (TREE_CODE_CLASS (code) != tcc_comparison)
4486 return 0;
4488 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4489 for neither. In real maths, we cannot assume open ended ranges are
4490 the same. But, this is computer arithmetic, where numbers are finite.
4491 We can therefore make the transformation of any unbounded range with
4492 the value Z, Z being greater than any representable number. This permits
4493 us to treat unbounded ranges as equal. */
4494 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4495 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4496 switch (code)
4498 case EQ_EXPR:
4499 result = sgn0 == sgn1;
4500 break;
4501 case NE_EXPR:
4502 result = sgn0 != sgn1;
4503 break;
4504 case LT_EXPR:
4505 result = sgn0 < sgn1;
4506 break;
4507 case LE_EXPR:
4508 result = sgn0 <= sgn1;
4509 break;
4510 case GT_EXPR:
4511 result = sgn0 > sgn1;
4512 break;
4513 case GE_EXPR:
4514 result = sgn0 >= sgn1;
4515 break;
4516 default:
4517 gcc_unreachable ();
4520 return constant_boolean_node (result, type);
4523 /* Given EXP, a logical expression, set the range it is testing into
4524 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4525 actually being tested. *PLOW and *PHIGH will be made of the same
4526 type as the returned expression. If EXP is not a comparison, we
4527 will most likely not be returning a useful value and range. Set
4528 *STRICT_OVERFLOW_P to true if the return value is only valid
4529 because signed overflow is undefined; otherwise, do not change
4530 *STRICT_OVERFLOW_P. */
4532 tree
4533 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4534 bool *strict_overflow_p)
4536 enum tree_code code;
4537 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4538 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4539 int in_p, n_in_p;
4540 tree low, high, n_low, n_high;
4541 location_t loc = EXPR_LOCATION (exp);
4543 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4544 and see if we can refine the range. Some of the cases below may not
4545 happen, but it doesn't seem worth worrying about this. We "continue"
4546 the outer loop when we've changed something; otherwise we "break"
4547 the switch, which will "break" the while. */
4549 in_p = 0;
4550 low = high = build_int_cst (TREE_TYPE (exp), 0);
4552 while (1)
4554 code = TREE_CODE (exp);
4555 exp_type = TREE_TYPE (exp);
4557 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4559 if (TREE_OPERAND_LENGTH (exp) > 0)
4560 arg0 = TREE_OPERAND (exp, 0);
4561 if (TREE_CODE_CLASS (code) == tcc_comparison
4562 || TREE_CODE_CLASS (code) == tcc_unary
4563 || TREE_CODE_CLASS (code) == tcc_binary)
4564 arg0_type = TREE_TYPE (arg0);
4565 if (TREE_CODE_CLASS (code) == tcc_binary
4566 || TREE_CODE_CLASS (code) == tcc_comparison
4567 || (TREE_CODE_CLASS (code) == tcc_expression
4568 && TREE_OPERAND_LENGTH (exp) > 1))
4569 arg1 = TREE_OPERAND (exp, 1);
4572 switch (code)
4574 case TRUTH_NOT_EXPR:
4575 in_p = ! in_p, exp = arg0;
4576 continue;
4578 case EQ_EXPR: case NE_EXPR:
4579 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4580 /* We can only do something if the range is testing for zero
4581 and if the second operand is an integer constant. Note that
4582 saying something is "in" the range we make is done by
4583 complementing IN_P since it will set in the initial case of
4584 being not equal to zero; "out" is leaving it alone. */
4585 if (low == 0 || high == 0
4586 || ! integer_zerop (low) || ! integer_zerop (high)
4587 || TREE_CODE (arg1) != INTEGER_CST)
4588 break;
4590 switch (code)
4592 case NE_EXPR: /* - [c, c] */
4593 low = high = arg1;
4594 break;
4595 case EQ_EXPR: /* + [c, c] */
4596 in_p = ! in_p, low = high = arg1;
4597 break;
4598 case GT_EXPR: /* - [-, c] */
4599 low = 0, high = arg1;
4600 break;
4601 case GE_EXPR: /* + [c, -] */
4602 in_p = ! in_p, low = arg1, high = 0;
4603 break;
4604 case LT_EXPR: /* - [c, -] */
4605 low = arg1, high = 0;
4606 break;
4607 case LE_EXPR: /* + [-, c] */
4608 in_p = ! in_p, low = 0, high = arg1;
4609 break;
4610 default:
4611 gcc_unreachable ();
4614 /* If this is an unsigned comparison, we also know that EXP is
4615 greater than or equal to zero. We base the range tests we make
4616 on that fact, so we record it here so we can parse existing
4617 range tests. We test arg0_type since often the return type
4618 of, e.g. EQ_EXPR, is boolean. */
4619 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4621 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4622 in_p, low, high, 1,
4623 build_int_cst (arg0_type, 0),
4624 NULL_TREE))
4625 break;
4627 in_p = n_in_p, low = n_low, high = n_high;
4629 /* If the high bound is missing, but we have a nonzero low
4630 bound, reverse the range so it goes from zero to the low bound
4631 minus 1. */
4632 if (high == 0 && low && ! integer_zerop (low))
4634 in_p = ! in_p;
4635 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4636 integer_one_node, 0);
4637 low = build_int_cst (arg0_type, 0);
4641 exp = arg0;
4642 continue;
4644 case NEGATE_EXPR:
4645 /* (-x) IN [a,b] -> x in [-b, -a] */
4646 n_low = range_binop (MINUS_EXPR, exp_type,
4647 build_int_cst (exp_type, 0),
4648 0, high, 1);
4649 n_high = range_binop (MINUS_EXPR, exp_type,
4650 build_int_cst (exp_type, 0),
4651 0, low, 0);
4652 low = n_low, high = n_high;
4653 exp = arg0;
4654 continue;
4656 case BIT_NOT_EXPR:
4657 /* ~ X -> -X - 1 */
4658 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4659 build_int_cst (exp_type, 1));
4660 SET_EXPR_LOCATION (exp, loc);
4661 continue;
4663 case PLUS_EXPR: case MINUS_EXPR:
4664 if (TREE_CODE (arg1) != INTEGER_CST)
4665 break;
4667 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4668 move a constant to the other side. */
4669 if (!TYPE_UNSIGNED (arg0_type)
4670 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4671 break;
4673 /* If EXP is signed, any overflow in the computation is undefined,
4674 so we don't worry about it so long as our computations on
4675 the bounds don't overflow. For unsigned, overflow is defined
4676 and this is exactly the right thing. */
4677 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4678 arg0_type, low, 0, arg1, 0);
4679 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4680 arg0_type, high, 1, arg1, 0);
4681 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4682 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4683 break;
4685 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4686 *strict_overflow_p = true;
4688 /* Check for an unsigned range which has wrapped around the maximum
4689 value thus making n_high < n_low, and normalize it. */
4690 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4692 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4693 integer_one_node, 0);
4694 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4695 integer_one_node, 0);
4697 /* If the range is of the form +/- [ x+1, x ], we won't
4698 be able to normalize it. But then, it represents the
4699 whole range or the empty set, so make it
4700 +/- [ -, - ]. */
4701 if (tree_int_cst_equal (n_low, low)
4702 && tree_int_cst_equal (n_high, high))
4703 low = high = 0;
4704 else
4705 in_p = ! in_p;
4707 else
4708 low = n_low, high = n_high;
4710 exp = arg0;
4711 continue;
4713 CASE_CONVERT: case NON_LVALUE_EXPR:
4714 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4715 break;
4717 if (! INTEGRAL_TYPE_P (arg0_type)
4718 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4719 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4720 break;
4722 n_low = low, n_high = high;
4724 if (n_low != 0)
4725 n_low = fold_convert_loc (loc, arg0_type, n_low);
4727 if (n_high != 0)
4728 n_high = fold_convert_loc (loc, arg0_type, n_high);
4731 /* If we're converting arg0 from an unsigned type, to exp,
4732 a signed type, we will be doing the comparison as unsigned.
4733 The tests above have already verified that LOW and HIGH
4734 are both positive.
4736 So we have to ensure that we will handle large unsigned
4737 values the same way that the current signed bounds treat
4738 negative values. */
4740 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4742 tree high_positive;
4743 tree equiv_type;
4744 /* For fixed-point modes, we need to pass the saturating flag
4745 as the 2nd parameter. */
4746 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4747 equiv_type = lang_hooks.types.type_for_mode
4748 (TYPE_MODE (arg0_type),
4749 TYPE_SATURATING (arg0_type));
4750 else
4751 equiv_type = lang_hooks.types.type_for_mode
4752 (TYPE_MODE (arg0_type), 1);
4754 /* A range without an upper bound is, naturally, unbounded.
4755 Since convert would have cropped a very large value, use
4756 the max value for the destination type. */
4757 high_positive
4758 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4759 : TYPE_MAX_VALUE (arg0_type);
4761 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4762 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4763 fold_convert_loc (loc, arg0_type,
4764 high_positive),
4765 build_int_cst (arg0_type, 1));
4767 /* If the low bound is specified, "and" the range with the
4768 range for which the original unsigned value will be
4769 positive. */
4770 if (low != 0)
4772 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4773 1, n_low, n_high, 1,
4774 fold_convert_loc (loc, arg0_type,
4775 integer_zero_node),
4776 high_positive))
4777 break;
4779 in_p = (n_in_p == in_p);
4781 else
4783 /* Otherwise, "or" the range with the range of the input
4784 that will be interpreted as negative. */
4785 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4786 0, n_low, n_high, 1,
4787 fold_convert_loc (loc, arg0_type,
4788 integer_zero_node),
4789 high_positive))
4790 break;
4792 in_p = (in_p != n_in_p);
4796 exp = arg0;
4797 low = n_low, high = n_high;
4798 continue;
4800 default:
4801 break;
4804 break;
4807 /* If EXP is a constant, we can evaluate whether this is true or false. */
4808 if (TREE_CODE (exp) == INTEGER_CST)
4810 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4811 exp, 0, low, 0))
4812 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4813 exp, 1, high, 1)));
4814 low = high = 0;
4815 exp = 0;
4818 *pin_p = in_p, *plow = low, *phigh = high;
4819 return exp;
4822 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4823 type, TYPE, return an expression to test if EXP is in (or out of, depending
4824 on IN_P) the range. Return 0 if the test couldn't be created. */
4826 tree
4827 build_range_check (location_t loc, tree type, tree exp, int in_p,
4828 tree low, tree high)
4830 tree etype = TREE_TYPE (exp), value;
4832 #ifdef HAVE_canonicalize_funcptr_for_compare
4833 /* Disable this optimization for function pointer expressions
4834 on targets that require function pointer canonicalization. */
4835 if (HAVE_canonicalize_funcptr_for_compare
4836 && TREE_CODE (etype) == POINTER_TYPE
4837 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4838 return NULL_TREE;
4839 #endif
4841 if (! in_p)
4843 value = build_range_check (loc, type, exp, 1, low, high);
4844 if (value != 0)
4845 return invert_truthvalue_loc (loc, value);
4847 return 0;
4850 if (low == 0 && high == 0)
4851 return build_int_cst (type, 1);
4853 if (low == 0)
4854 return fold_build2_loc (loc, LE_EXPR, type, exp,
4855 fold_convert_loc (loc, etype, high));
4857 if (high == 0)
4858 return fold_build2_loc (loc, GE_EXPR, type, exp,
4859 fold_convert_loc (loc, etype, low));
4861 if (operand_equal_p (low, high, 0))
4862 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4863 fold_convert_loc (loc, etype, low));
4865 if (integer_zerop (low))
4867 if (! TYPE_UNSIGNED (etype))
4869 etype = unsigned_type_for (etype);
4870 high = fold_convert_loc (loc, etype, high);
4871 exp = fold_convert_loc (loc, etype, exp);
4873 return build_range_check (loc, type, exp, 1, 0, high);
4876 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4877 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4879 unsigned HOST_WIDE_INT lo;
4880 HOST_WIDE_INT hi;
4881 int prec;
4883 prec = TYPE_PRECISION (etype);
4884 if (prec <= HOST_BITS_PER_WIDE_INT)
4886 hi = 0;
4887 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4889 else
4891 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4892 lo = (unsigned HOST_WIDE_INT) -1;
4895 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4897 if (TYPE_UNSIGNED (etype))
4899 tree signed_etype = signed_type_for (etype);
4900 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4901 etype
4902 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4903 else
4904 etype = signed_etype;
4905 exp = fold_convert_loc (loc, etype, exp);
4907 return fold_build2_loc (loc, GT_EXPR, type, exp,
4908 build_int_cst (etype, 0));
4912 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4913 This requires wrap-around arithmetics for the type of the expression.
4914 First make sure that arithmetics in this type is valid, then make sure
4915 that it wraps around. */
4916 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4917 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4918 TYPE_UNSIGNED (etype));
4920 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4922 tree utype, minv, maxv;
4924 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4925 for the type in question, as we rely on this here. */
4926 utype = unsigned_type_for (etype);
4927 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4928 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4929 integer_one_node, 1);
4930 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4932 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4933 minv, 1, maxv, 1)))
4934 etype = utype;
4935 else
4936 return 0;
4939 high = fold_convert_loc (loc, etype, high);
4940 low = fold_convert_loc (loc, etype, low);
4941 exp = fold_convert_loc (loc, etype, exp);
4943 value = const_binop (MINUS_EXPR, high, low, 0);
4946 if (POINTER_TYPE_P (etype))
4948 if (value != 0 && !TREE_OVERFLOW (value))
4950 low = fold_convert_loc (loc, sizetype, low);
4951 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4952 return build_range_check (loc, type,
4953 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4954 etype, exp, low),
4955 1, build_int_cst (etype, 0), value);
4957 return 0;
4960 if (value != 0 && !TREE_OVERFLOW (value))
4961 return build_range_check (loc, type,
4962 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4963 1, build_int_cst (etype, 0), value);
4965 return 0;
4968 /* Return the predecessor of VAL in its type, handling the infinite case. */
4970 static tree
4971 range_predecessor (tree val)
4973 tree type = TREE_TYPE (val);
4975 if (INTEGRAL_TYPE_P (type)
4976 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4977 return 0;
4978 else
4979 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4982 /* Return the successor of VAL in its type, handling the infinite case. */
4984 static tree
4985 range_successor (tree val)
4987 tree type = TREE_TYPE (val);
4989 if (INTEGRAL_TYPE_P (type)
4990 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4991 return 0;
4992 else
4993 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4996 /* Given two ranges, see if we can merge them into one. Return 1 if we
4997 can, 0 if we can't. Set the output range into the specified parameters. */
4999 bool
5000 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5001 tree high0, int in1_p, tree low1, tree high1)
5003 int no_overlap;
5004 int subset;
5005 int temp;
5006 tree tem;
5007 int in_p;
5008 tree low, high;
5009 int lowequal = ((low0 == 0 && low1 == 0)
5010 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5011 low0, 0, low1, 0)));
5012 int highequal = ((high0 == 0 && high1 == 0)
5013 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5014 high0, 1, high1, 1)));
5016 /* Make range 0 be the range that starts first, or ends last if they
5017 start at the same value. Swap them if it isn't. */
5018 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5019 low0, 0, low1, 0))
5020 || (lowequal
5021 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5022 high1, 1, high0, 1))))
5024 temp = in0_p, in0_p = in1_p, in1_p = temp;
5025 tem = low0, low0 = low1, low1 = tem;
5026 tem = high0, high0 = high1, high1 = tem;
5029 /* Now flag two cases, whether the ranges are disjoint or whether the
5030 second range is totally subsumed in the first. Note that the tests
5031 below are simplified by the ones above. */
5032 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5033 high0, 1, low1, 0));
5034 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5035 high1, 1, high0, 1));
5037 /* We now have four cases, depending on whether we are including or
5038 excluding the two ranges. */
5039 if (in0_p && in1_p)
5041 /* If they don't overlap, the result is false. If the second range
5042 is a subset it is the result. Otherwise, the range is from the start
5043 of the second to the end of the first. */
5044 if (no_overlap)
5045 in_p = 0, low = high = 0;
5046 else if (subset)
5047 in_p = 1, low = low1, high = high1;
5048 else
5049 in_p = 1, low = low1, high = high0;
5052 else if (in0_p && ! in1_p)
5054 /* If they don't overlap, the result is the first range. If they are
5055 equal, the result is false. If the second range is a subset of the
5056 first, and the ranges begin at the same place, we go from just after
5057 the end of the second range to the end of the first. If the second
5058 range is not a subset of the first, or if it is a subset and both
5059 ranges end at the same place, the range starts at the start of the
5060 first range and ends just before the second range.
5061 Otherwise, we can't describe this as a single range. */
5062 if (no_overlap)
5063 in_p = 1, low = low0, high = high0;
5064 else if (lowequal && highequal)
5065 in_p = 0, low = high = 0;
5066 else if (subset && lowequal)
5068 low = range_successor (high1);
5069 high = high0;
5070 in_p = 1;
5071 if (low == 0)
5073 /* We are in the weird situation where high0 > high1 but
5074 high1 has no successor. Punt. */
5075 return 0;
5078 else if (! subset || highequal)
5080 low = low0;
5081 high = range_predecessor (low1);
5082 in_p = 1;
5083 if (high == 0)
5085 /* low0 < low1 but low1 has no predecessor. Punt. */
5086 return 0;
5089 else
5090 return 0;
5093 else if (! in0_p && in1_p)
5095 /* If they don't overlap, the result is the second range. If the second
5096 is a subset of the first, the result is false. Otherwise,
5097 the range starts just after the first range and ends at the
5098 end of the second. */
5099 if (no_overlap)
5100 in_p = 1, low = low1, high = high1;
5101 else if (subset || highequal)
5102 in_p = 0, low = high = 0;
5103 else
5105 low = range_successor (high0);
5106 high = high1;
5107 in_p = 1;
5108 if (low == 0)
5110 /* high1 > high0 but high0 has no successor. Punt. */
5111 return 0;
5116 else
5118 /* The case where we are excluding both ranges. Here the complex case
5119 is if they don't overlap. In that case, the only time we have a
5120 range is if they are adjacent. If the second is a subset of the
5121 first, the result is the first. Otherwise, the range to exclude
5122 starts at the beginning of the first range and ends at the end of the
5123 second. */
5124 if (no_overlap)
5126 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5127 range_successor (high0),
5128 1, low1, 0)))
5129 in_p = 0, low = low0, high = high1;
5130 else
5132 /* Canonicalize - [min, x] into - [-, x]. */
5133 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5134 switch (TREE_CODE (TREE_TYPE (low0)))
5136 case ENUMERAL_TYPE:
5137 if (TYPE_PRECISION (TREE_TYPE (low0))
5138 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5139 break;
5140 /* FALLTHROUGH */
5141 case INTEGER_TYPE:
5142 if (tree_int_cst_equal (low0,
5143 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5144 low0 = 0;
5145 break;
5146 case POINTER_TYPE:
5147 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5148 && integer_zerop (low0))
5149 low0 = 0;
5150 break;
5151 default:
5152 break;
5155 /* Canonicalize - [x, max] into - [x, -]. */
5156 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5157 switch (TREE_CODE (TREE_TYPE (high1)))
5159 case ENUMERAL_TYPE:
5160 if (TYPE_PRECISION (TREE_TYPE (high1))
5161 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5162 break;
5163 /* FALLTHROUGH */
5164 case INTEGER_TYPE:
5165 if (tree_int_cst_equal (high1,
5166 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5167 high1 = 0;
5168 break;
5169 case POINTER_TYPE:
5170 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5171 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5172 high1, 1,
5173 integer_one_node, 1)))
5174 high1 = 0;
5175 break;
5176 default:
5177 break;
5180 /* The ranges might be also adjacent between the maximum and
5181 minimum values of the given type. For
5182 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5183 return + [x + 1, y - 1]. */
5184 if (low0 == 0 && high1 == 0)
5186 low = range_successor (high0);
5187 high = range_predecessor (low1);
5188 if (low == 0 || high == 0)
5189 return 0;
5191 in_p = 1;
5193 else
5194 return 0;
5197 else if (subset)
5198 in_p = 0, low = low0, high = high0;
5199 else
5200 in_p = 0, low = low0, high = high1;
5203 *pin_p = in_p, *plow = low, *phigh = high;
5204 return 1;
5208 /* Subroutine of fold, looking inside expressions of the form
5209 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5210 of the COND_EXPR. This function is being used also to optimize
5211 A op B ? C : A, by reversing the comparison first.
5213 Return a folded expression whose code is not a COND_EXPR
5214 anymore, or NULL_TREE if no folding opportunity is found. */
5216 static tree
5217 fold_cond_expr_with_comparison (location_t loc, tree type,
5218 tree arg0, tree arg1, tree arg2)
5220 enum tree_code comp_code = TREE_CODE (arg0);
5221 tree arg00 = TREE_OPERAND (arg0, 0);
5222 tree arg01 = TREE_OPERAND (arg0, 1);
5223 tree arg1_type = TREE_TYPE (arg1);
5224 tree tem;
5226 STRIP_NOPS (arg1);
5227 STRIP_NOPS (arg2);
5229 /* If we have A op 0 ? A : -A, consider applying the following
5230 transformations:
5232 A == 0? A : -A same as -A
5233 A != 0? A : -A same as A
5234 A >= 0? A : -A same as abs (A)
5235 A > 0? A : -A same as abs (A)
5236 A <= 0? A : -A same as -abs (A)
5237 A < 0? A : -A same as -abs (A)
5239 None of these transformations work for modes with signed
5240 zeros. If A is +/-0, the first two transformations will
5241 change the sign of the result (from +0 to -0, or vice
5242 versa). The last four will fix the sign of the result,
5243 even though the original expressions could be positive or
5244 negative, depending on the sign of A.
5246 Note that all these transformations are correct if A is
5247 NaN, since the two alternatives (A and -A) are also NaNs. */
5248 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5249 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5250 ? real_zerop (arg01)
5251 : integer_zerop (arg01))
5252 && ((TREE_CODE (arg2) == NEGATE_EXPR
5253 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5254 /* In the case that A is of the form X-Y, '-A' (arg2) may
5255 have already been folded to Y-X, check for that. */
5256 || (TREE_CODE (arg1) == MINUS_EXPR
5257 && TREE_CODE (arg2) == MINUS_EXPR
5258 && operand_equal_p (TREE_OPERAND (arg1, 0),
5259 TREE_OPERAND (arg2, 1), 0)
5260 && operand_equal_p (TREE_OPERAND (arg1, 1),
5261 TREE_OPERAND (arg2, 0), 0))))
5262 switch (comp_code)
5264 case EQ_EXPR:
5265 case UNEQ_EXPR:
5266 tem = fold_convert_loc (loc, arg1_type, arg1);
5267 return pedantic_non_lvalue_loc (loc,
5268 fold_convert_loc (loc, type,
5269 negate_expr (tem)));
5270 case NE_EXPR:
5271 case LTGT_EXPR:
5272 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5273 case UNGE_EXPR:
5274 case UNGT_EXPR:
5275 if (flag_trapping_math)
5276 break;
5277 /* Fall through. */
5278 case GE_EXPR:
5279 case GT_EXPR:
5280 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5281 arg1 = fold_convert_loc (loc, signed_type_for
5282 (TREE_TYPE (arg1)), arg1);
5283 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5284 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5285 case UNLE_EXPR:
5286 case UNLT_EXPR:
5287 if (flag_trapping_math)
5288 break;
5289 case LE_EXPR:
5290 case LT_EXPR:
5291 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5292 arg1 = fold_convert_loc (loc, signed_type_for
5293 (TREE_TYPE (arg1)), arg1);
5294 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5295 return negate_expr (fold_convert_loc (loc, type, tem));
5296 default:
5297 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5298 break;
5301 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5302 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5303 both transformations are correct when A is NaN: A != 0
5304 is then true, and A == 0 is false. */
5306 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5307 && integer_zerop (arg01) && integer_zerop (arg2))
5309 if (comp_code == NE_EXPR)
5310 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5311 else if (comp_code == EQ_EXPR)
5312 return build_int_cst (type, 0);
5315 /* Try some transformations of A op B ? A : B.
5317 A == B? A : B same as B
5318 A != B? A : B same as A
5319 A >= B? A : B same as max (A, B)
5320 A > B? A : B same as max (B, A)
5321 A <= B? A : B same as min (A, B)
5322 A < B? A : B same as min (B, A)
5324 As above, these transformations don't work in the presence
5325 of signed zeros. For example, if A and B are zeros of
5326 opposite sign, the first two transformations will change
5327 the sign of the result. In the last four, the original
5328 expressions give different results for (A=+0, B=-0) and
5329 (A=-0, B=+0), but the transformed expressions do not.
5331 The first two transformations are correct if either A or B
5332 is a NaN. In the first transformation, the condition will
5333 be false, and B will indeed be chosen. In the case of the
5334 second transformation, the condition A != B will be true,
5335 and A will be chosen.
5337 The conversions to max() and min() are not correct if B is
5338 a number and A is not. The conditions in the original
5339 expressions will be false, so all four give B. The min()
5340 and max() versions would give a NaN instead. */
5341 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5342 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5343 /* Avoid these transformations if the COND_EXPR may be used
5344 as an lvalue in the C++ front-end. PR c++/19199. */
5345 && (in_gimple_form
5346 || (strcmp (lang_hooks.name, "GNU C++") != 0
5347 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5348 || ! maybe_lvalue_p (arg1)
5349 || ! maybe_lvalue_p (arg2)))
5351 tree comp_op0 = arg00;
5352 tree comp_op1 = arg01;
5353 tree comp_type = TREE_TYPE (comp_op0);
5355 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5356 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5358 comp_type = type;
5359 comp_op0 = arg1;
5360 comp_op1 = arg2;
5363 switch (comp_code)
5365 case EQ_EXPR:
5366 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5367 case NE_EXPR:
5368 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5369 case LE_EXPR:
5370 case LT_EXPR:
5371 case UNLE_EXPR:
5372 case UNLT_EXPR:
5373 /* In C++ a ?: expression can be an lvalue, so put the
5374 operand which will be used if they are equal first
5375 so that we can convert this back to the
5376 corresponding COND_EXPR. */
5377 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5379 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5380 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5381 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5382 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5383 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5384 comp_op1, comp_op0);
5385 return pedantic_non_lvalue_loc (loc,
5386 fold_convert_loc (loc, type, tem));
5388 break;
5389 case GE_EXPR:
5390 case GT_EXPR:
5391 case UNGE_EXPR:
5392 case UNGT_EXPR:
5393 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5395 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5396 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5397 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5398 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5399 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5400 comp_op1, comp_op0);
5401 return pedantic_non_lvalue_loc (loc,
5402 fold_convert_loc (loc, type, tem));
5404 break;
5405 case UNEQ_EXPR:
5406 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5407 return pedantic_non_lvalue_loc (loc,
5408 fold_convert_loc (loc, type, arg2));
5409 break;
5410 case LTGT_EXPR:
5411 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5412 return pedantic_non_lvalue_loc (loc,
5413 fold_convert_loc (loc, type, arg1));
5414 break;
5415 default:
5416 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5417 break;
5421 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5422 we might still be able to simplify this. For example,
5423 if C1 is one less or one more than C2, this might have started
5424 out as a MIN or MAX and been transformed by this function.
5425 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5427 if (INTEGRAL_TYPE_P (type)
5428 && TREE_CODE (arg01) == INTEGER_CST
5429 && TREE_CODE (arg2) == INTEGER_CST)
5430 switch (comp_code)
5432 case EQ_EXPR:
5433 if (TREE_CODE (arg1) == INTEGER_CST)
5434 break;
5435 /* We can replace A with C1 in this case. */
5436 arg1 = fold_convert_loc (loc, type, arg01);
5437 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5439 case LT_EXPR:
5440 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5441 MIN_EXPR, to preserve the signedness of the comparison. */
5442 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5443 OEP_ONLY_CONST)
5444 && operand_equal_p (arg01,
5445 const_binop (PLUS_EXPR, arg2,
5446 build_int_cst (type, 1), 0),
5447 OEP_ONLY_CONST))
5449 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5450 fold_convert_loc (loc, TREE_TYPE (arg00),
5451 arg2));
5452 return pedantic_non_lvalue_loc (loc,
5453 fold_convert_loc (loc, type, tem));
5455 break;
5457 case LE_EXPR:
5458 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5459 as above. */
5460 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5461 OEP_ONLY_CONST)
5462 && operand_equal_p (arg01,
5463 const_binop (MINUS_EXPR, arg2,
5464 build_int_cst (type, 1), 0),
5465 OEP_ONLY_CONST))
5467 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5468 fold_convert_loc (loc, TREE_TYPE (arg00),
5469 arg2));
5470 return pedantic_non_lvalue_loc (loc,
5471 fold_convert_loc (loc, type, tem));
5473 break;
5475 case GT_EXPR:
5476 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5477 MAX_EXPR, to preserve the signedness of the comparison. */
5478 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5479 OEP_ONLY_CONST)
5480 && operand_equal_p (arg01,
5481 const_binop (MINUS_EXPR, arg2,
5482 build_int_cst (type, 1), 0),
5483 OEP_ONLY_CONST))
5485 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5486 fold_convert_loc (loc, TREE_TYPE (arg00),
5487 arg2));
5488 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5490 break;
5492 case GE_EXPR:
5493 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5494 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5495 OEP_ONLY_CONST)
5496 && operand_equal_p (arg01,
5497 const_binop (PLUS_EXPR, arg2,
5498 build_int_cst (type, 1), 0),
5499 OEP_ONLY_CONST))
5501 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5502 fold_convert_loc (loc, TREE_TYPE (arg00),
5503 arg2));
5504 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5506 break;
5507 case NE_EXPR:
5508 break;
5509 default:
5510 gcc_unreachable ();
5513 return NULL_TREE;
5518 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5519 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5520 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5521 false) >= 2)
5522 #endif
5524 /* EXP is some logical combination of boolean tests. See if we can
5525 merge it into some range test. Return the new tree if so. */
5527 static tree
5528 fold_range_test (location_t loc, enum tree_code code, tree type,
5529 tree op0, tree op1)
5531 int or_op = (code == TRUTH_ORIF_EXPR
5532 || code == TRUTH_OR_EXPR);
5533 int in0_p, in1_p, in_p;
5534 tree low0, low1, low, high0, high1, high;
5535 bool strict_overflow_p = false;
5536 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5537 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5538 tree tem;
5539 const char * const warnmsg = G_("assuming signed overflow does not occur "
5540 "when simplifying range test");
5542 /* If this is an OR operation, invert both sides; we will invert
5543 again at the end. */
5544 if (or_op)
5545 in0_p = ! in0_p, in1_p = ! in1_p;
5547 /* If both expressions are the same, if we can merge the ranges, and we
5548 can build the range test, return it or it inverted. If one of the
5549 ranges is always true or always false, consider it to be the same
5550 expression as the other. */
5551 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5552 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5553 in1_p, low1, high1)
5554 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5555 lhs != 0 ? lhs
5556 : rhs != 0 ? rhs : integer_zero_node,
5557 in_p, low, high))))
5559 if (strict_overflow_p)
5560 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5561 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5564 /* On machines where the branch cost is expensive, if this is a
5565 short-circuited branch and the underlying object on both sides
5566 is the same, make a non-short-circuit operation. */
5567 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5568 && lhs != 0 && rhs != 0
5569 && (code == TRUTH_ANDIF_EXPR
5570 || code == TRUTH_ORIF_EXPR)
5571 && operand_equal_p (lhs, rhs, 0))
5573 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5574 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5575 which cases we can't do this. */
5576 if (simple_operand_p (lhs))
5578 tem = build2 (code == TRUTH_ANDIF_EXPR
5579 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5580 type, op0, op1);
5581 SET_EXPR_LOCATION (tem, loc);
5582 return tem;
5585 else if (lang_hooks.decls.global_bindings_p () == 0
5586 && ! CONTAINS_PLACEHOLDER_P (lhs))
5588 tree common = save_expr (lhs);
5590 if (0 != (lhs = build_range_check (loc, type, common,
5591 or_op ? ! in0_p : in0_p,
5592 low0, high0))
5593 && (0 != (rhs = build_range_check (loc, type, common,
5594 or_op ? ! in1_p : in1_p,
5595 low1, high1))))
5597 if (strict_overflow_p)
5598 fold_overflow_warning (warnmsg,
5599 WARN_STRICT_OVERFLOW_COMPARISON);
5600 tem = build2 (code == TRUTH_ANDIF_EXPR
5601 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5602 type, lhs, rhs);
5603 SET_EXPR_LOCATION (tem, loc);
5604 return tem;
5609 return 0;
5612 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5613 bit value. Arrange things so the extra bits will be set to zero if and
5614 only if C is signed-extended to its full width. If MASK is nonzero,
5615 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5617 static tree
5618 unextend (tree c, int p, int unsignedp, tree mask)
5620 tree type = TREE_TYPE (c);
5621 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5622 tree temp;
5624 if (p == modesize || unsignedp)
5625 return c;
5627 /* We work by getting just the sign bit into the low-order bit, then
5628 into the high-order bit, then sign-extend. We then XOR that value
5629 with C. */
5630 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5631 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5633 /* We must use a signed type in order to get an arithmetic right shift.
5634 However, we must also avoid introducing accidental overflows, so that
5635 a subsequent call to integer_zerop will work. Hence we must
5636 do the type conversion here. At this point, the constant is either
5637 zero or one, and the conversion to a signed type can never overflow.
5638 We could get an overflow if this conversion is done anywhere else. */
5639 if (TYPE_UNSIGNED (type))
5640 temp = fold_convert (signed_type_for (type), temp);
5642 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5643 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5644 if (mask != 0)
5645 temp = const_binop (BIT_AND_EXPR, temp,
5646 fold_convert (TREE_TYPE (c), mask),
5648 /* If necessary, convert the type back to match the type of C. */
5649 if (TYPE_UNSIGNED (type))
5650 temp = fold_convert (type, temp);
5652 return fold_convert (type,
5653 const_binop (BIT_XOR_EXPR, c, temp, 0));
5656 /* Find ways of folding logical expressions of LHS and RHS:
5657 Try to merge two comparisons to the same innermost item.
5658 Look for range tests like "ch >= '0' && ch <= '9'".
5659 Look for combinations of simple terms on machines with expensive branches
5660 and evaluate the RHS unconditionally.
5662 For example, if we have p->a == 2 && p->b == 4 and we can make an
5663 object large enough to span both A and B, we can do this with a comparison
5664 against the object ANDed with the a mask.
5666 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5667 operations to do this with one comparison.
5669 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5670 function and the one above.
5672 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5673 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5675 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5676 two operands.
5678 We return the simplified tree or 0 if no optimization is possible. */
5680 static tree
5681 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5682 tree lhs, tree rhs)
5684 /* If this is the "or" of two comparisons, we can do something if
5685 the comparisons are NE_EXPR. If this is the "and", we can do something
5686 if the comparisons are EQ_EXPR. I.e.,
5687 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5689 WANTED_CODE is this operation code. For single bit fields, we can
5690 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5691 comparison for one-bit fields. */
5693 enum tree_code wanted_code;
5694 enum tree_code lcode, rcode;
5695 tree ll_arg, lr_arg, rl_arg, rr_arg;
5696 tree ll_inner, lr_inner, rl_inner, rr_inner;
5697 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5698 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5699 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5700 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5701 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5702 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5703 enum machine_mode lnmode, rnmode;
5704 tree ll_mask, lr_mask, rl_mask, rr_mask;
5705 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5706 tree l_const, r_const;
5707 tree lntype, rntype, result;
5708 HOST_WIDE_INT first_bit, end_bit;
5709 int volatilep;
5710 tree orig_lhs = lhs, orig_rhs = rhs;
5711 enum tree_code orig_code = code;
5713 /* Start by getting the comparison codes. Fail if anything is volatile.
5714 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5715 it were surrounded with a NE_EXPR. */
5717 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5718 return 0;
5720 lcode = TREE_CODE (lhs);
5721 rcode = TREE_CODE (rhs);
5723 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5725 lhs = build2 (NE_EXPR, truth_type, lhs,
5726 build_int_cst (TREE_TYPE (lhs), 0));
5727 lcode = NE_EXPR;
5730 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5732 rhs = build2 (NE_EXPR, truth_type, rhs,
5733 build_int_cst (TREE_TYPE (rhs), 0));
5734 rcode = NE_EXPR;
5737 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5738 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5739 return 0;
5741 ll_arg = TREE_OPERAND (lhs, 0);
5742 lr_arg = TREE_OPERAND (lhs, 1);
5743 rl_arg = TREE_OPERAND (rhs, 0);
5744 rr_arg = TREE_OPERAND (rhs, 1);
5746 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5747 if (simple_operand_p (ll_arg)
5748 && simple_operand_p (lr_arg))
5750 tree result;
5751 if (operand_equal_p (ll_arg, rl_arg, 0)
5752 && operand_equal_p (lr_arg, rr_arg, 0))
5754 result = combine_comparisons (loc, code, lcode, rcode,
5755 truth_type, ll_arg, lr_arg);
5756 if (result)
5757 return result;
5759 else if (operand_equal_p (ll_arg, rr_arg, 0)
5760 && operand_equal_p (lr_arg, rl_arg, 0))
5762 result = combine_comparisons (loc, code, lcode,
5763 swap_tree_comparison (rcode),
5764 truth_type, ll_arg, lr_arg);
5765 if (result)
5766 return result;
5770 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5771 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5773 /* If the RHS can be evaluated unconditionally and its operands are
5774 simple, it wins to evaluate the RHS unconditionally on machines
5775 with expensive branches. In this case, this isn't a comparison
5776 that can be merged. Avoid doing this if the RHS is a floating-point
5777 comparison since those can trap. */
5779 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5780 false) >= 2
5781 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5782 && simple_operand_p (rl_arg)
5783 && simple_operand_p (rr_arg))
5785 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5786 if (code == TRUTH_OR_EXPR
5787 && lcode == NE_EXPR && integer_zerop (lr_arg)
5788 && rcode == NE_EXPR && integer_zerop (rr_arg)
5789 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5790 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5792 result = build2 (NE_EXPR, truth_type,
5793 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5794 ll_arg, rl_arg),
5795 build_int_cst (TREE_TYPE (ll_arg), 0));
5796 goto fold_truthop_exit;
5799 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5800 if (code == TRUTH_AND_EXPR
5801 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5802 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5803 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5804 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5806 result = build2 (EQ_EXPR, truth_type,
5807 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5808 ll_arg, rl_arg),
5809 build_int_cst (TREE_TYPE (ll_arg), 0));
5810 goto fold_truthop_exit;
5813 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5815 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5817 result = build2 (code, truth_type, lhs, rhs);
5818 goto fold_truthop_exit;
5820 return NULL_TREE;
5824 /* See if the comparisons can be merged. Then get all the parameters for
5825 each side. */
5827 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5828 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5829 return 0;
5831 volatilep = 0;
5832 ll_inner = decode_field_reference (loc, ll_arg,
5833 &ll_bitsize, &ll_bitpos, &ll_mode,
5834 &ll_unsignedp, &volatilep, &ll_mask,
5835 &ll_and_mask);
5836 lr_inner = decode_field_reference (loc, lr_arg,
5837 &lr_bitsize, &lr_bitpos, &lr_mode,
5838 &lr_unsignedp, &volatilep, &lr_mask,
5839 &lr_and_mask);
5840 rl_inner = decode_field_reference (loc, rl_arg,
5841 &rl_bitsize, &rl_bitpos, &rl_mode,
5842 &rl_unsignedp, &volatilep, &rl_mask,
5843 &rl_and_mask);
5844 rr_inner = decode_field_reference (loc, rr_arg,
5845 &rr_bitsize, &rr_bitpos, &rr_mode,
5846 &rr_unsignedp, &volatilep, &rr_mask,
5847 &rr_and_mask);
5849 /* It must be true that the inner operation on the lhs of each
5850 comparison must be the same if we are to be able to do anything.
5851 Then see if we have constants. If not, the same must be true for
5852 the rhs's. */
5853 if (volatilep || ll_inner == 0 || rl_inner == 0
5854 || ! operand_equal_p (ll_inner, rl_inner, 0))
5855 return 0;
5857 if (TREE_CODE (lr_arg) == INTEGER_CST
5858 && TREE_CODE (rr_arg) == INTEGER_CST)
5859 l_const = lr_arg, r_const = rr_arg;
5860 else if (lr_inner == 0 || rr_inner == 0
5861 || ! operand_equal_p (lr_inner, rr_inner, 0))
5862 return 0;
5863 else
5864 l_const = r_const = 0;
5866 /* If either comparison code is not correct for our logical operation,
5867 fail. However, we can convert a one-bit comparison against zero into
5868 the opposite comparison against that bit being set in the field. */
5870 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5871 if (lcode != wanted_code)
5873 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5875 /* Make the left operand unsigned, since we are only interested
5876 in the value of one bit. Otherwise we are doing the wrong
5877 thing below. */
5878 ll_unsignedp = 1;
5879 l_const = ll_mask;
5881 else
5882 return 0;
5885 /* This is analogous to the code for l_const above. */
5886 if (rcode != wanted_code)
5888 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5890 rl_unsignedp = 1;
5891 r_const = rl_mask;
5893 else
5894 return 0;
5897 /* See if we can find a mode that contains both fields being compared on
5898 the left. If we can't, fail. Otherwise, update all constants and masks
5899 to be relative to a field of that size. */
5900 first_bit = MIN (ll_bitpos, rl_bitpos);
5901 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5902 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5903 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5904 volatilep);
5905 if (lnmode == VOIDmode)
5906 return 0;
5908 lnbitsize = GET_MODE_BITSIZE (lnmode);
5909 lnbitpos = first_bit & ~ (lnbitsize - 1);
5910 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5911 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5913 if (BYTES_BIG_ENDIAN)
5915 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5916 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5919 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5920 size_int (xll_bitpos), 0);
5921 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5922 size_int (xrl_bitpos), 0);
5924 if (l_const)
5926 l_const = fold_convert_loc (loc, lntype, l_const);
5927 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5928 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5929 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5930 fold_build1_loc (loc, BIT_NOT_EXPR,
5931 lntype, ll_mask),
5932 0)))
5934 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5936 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5939 if (r_const)
5941 r_const = fold_convert_loc (loc, lntype, r_const);
5942 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5943 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5944 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5945 fold_build1_loc (loc, BIT_NOT_EXPR,
5946 lntype, rl_mask),
5947 0)))
5949 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5951 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5955 /* If the right sides are not constant, do the same for it. Also,
5956 disallow this optimization if a size or signedness mismatch occurs
5957 between the left and right sides. */
5958 if (l_const == 0)
5960 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5961 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5962 /* Make sure the two fields on the right
5963 correspond to the left without being swapped. */
5964 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5965 return 0;
5967 first_bit = MIN (lr_bitpos, rr_bitpos);
5968 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5969 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5970 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5971 volatilep);
5972 if (rnmode == VOIDmode)
5973 return 0;
5975 rnbitsize = GET_MODE_BITSIZE (rnmode);
5976 rnbitpos = first_bit & ~ (rnbitsize - 1);
5977 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5978 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5980 if (BYTES_BIG_ENDIAN)
5982 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5983 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5986 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5987 rntype, lr_mask),
5988 size_int (xlr_bitpos), 0);
5989 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5990 rntype, rr_mask),
5991 size_int (xrr_bitpos), 0);
5993 /* Make a mask that corresponds to both fields being compared.
5994 Do this for both items being compared. If the operands are the
5995 same size and the bits being compared are in the same position
5996 then we can do this by masking both and comparing the masked
5997 results. */
5998 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5999 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6000 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6002 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6003 ll_unsignedp || rl_unsignedp);
6004 if (! all_ones_mask_p (ll_mask, lnbitsize))
6005 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6007 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6008 lr_unsignedp || rr_unsignedp);
6009 if (! all_ones_mask_p (lr_mask, rnbitsize))
6010 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6012 result = build2 (wanted_code, truth_type, lhs, rhs);
6013 goto fold_truthop_exit;
6016 /* There is still another way we can do something: If both pairs of
6017 fields being compared are adjacent, we may be able to make a wider
6018 field containing them both.
6020 Note that we still must mask the lhs/rhs expressions. Furthermore,
6021 the mask must be shifted to account for the shift done by
6022 make_bit_field_ref. */
6023 if ((ll_bitsize + ll_bitpos == rl_bitpos
6024 && lr_bitsize + lr_bitpos == rr_bitpos)
6025 || (ll_bitpos == rl_bitpos + rl_bitsize
6026 && lr_bitpos == rr_bitpos + rr_bitsize))
6028 tree type;
6030 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6031 ll_bitsize + rl_bitsize,
6032 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6033 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6034 lr_bitsize + rr_bitsize,
6035 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6037 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6038 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6039 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6040 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6042 /* Convert to the smaller type before masking out unwanted bits. */
6043 type = lntype;
6044 if (lntype != rntype)
6046 if (lnbitsize > rnbitsize)
6048 lhs = fold_convert_loc (loc, rntype, lhs);
6049 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6050 type = rntype;
6052 else if (lnbitsize < rnbitsize)
6054 rhs = fold_convert_loc (loc, lntype, rhs);
6055 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6056 type = lntype;
6060 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6061 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6063 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6064 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6066 result = build2 (wanted_code, truth_type, lhs, rhs);
6067 goto fold_truthop_exit;
6070 return 0;
6073 /* Handle the case of comparisons with constants. If there is something in
6074 common between the masks, those bits of the constants must be the same.
6075 If not, the condition is always false. Test for this to avoid generating
6076 incorrect code below. */
6077 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6078 if (! integer_zerop (result)
6079 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6080 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6082 if (wanted_code == NE_EXPR)
6084 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6085 return constant_boolean_node (true, truth_type);
6087 else
6089 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6090 return constant_boolean_node (false, truth_type);
6094 /* Construct the expression we will return. First get the component
6095 reference we will make. Unless the mask is all ones the width of
6096 that field, perform the mask operation. Then compare with the
6097 merged constant. */
6098 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6099 ll_unsignedp || rl_unsignedp);
6101 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6102 if (! all_ones_mask_p (ll_mask, lnbitsize))
6104 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6105 SET_EXPR_LOCATION (result, loc);
6108 result = build2 (wanted_code, truth_type, result,
6109 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6111 fold_truthop_exit:
6112 SET_EXPR_LOCATION (result, loc);
6113 return result;
6116 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6117 constant. */
6119 static tree
6120 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6121 tree op0, tree op1)
6123 tree arg0 = op0;
6124 enum tree_code op_code;
6125 tree comp_const;
6126 tree minmax_const;
6127 int consts_equal, consts_lt;
6128 tree inner;
6130 STRIP_SIGN_NOPS (arg0);
6132 op_code = TREE_CODE (arg0);
6133 minmax_const = TREE_OPERAND (arg0, 1);
6134 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6135 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6136 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6137 inner = TREE_OPERAND (arg0, 0);
6139 /* If something does not permit us to optimize, return the original tree. */
6140 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6141 || TREE_CODE (comp_const) != INTEGER_CST
6142 || TREE_OVERFLOW (comp_const)
6143 || TREE_CODE (minmax_const) != INTEGER_CST
6144 || TREE_OVERFLOW (minmax_const))
6145 return NULL_TREE;
6147 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6148 and GT_EXPR, doing the rest with recursive calls using logical
6149 simplifications. */
6150 switch (code)
6152 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6154 tree tem
6155 = optimize_minmax_comparison (loc,
6156 invert_tree_comparison (code, false),
6157 type, op0, op1);
6158 if (tem)
6159 return invert_truthvalue_loc (loc, tem);
6160 return NULL_TREE;
6163 case GE_EXPR:
6164 return
6165 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6166 optimize_minmax_comparison
6167 (loc, EQ_EXPR, type, arg0, comp_const),
6168 optimize_minmax_comparison
6169 (loc, GT_EXPR, type, arg0, comp_const));
6171 case EQ_EXPR:
6172 if (op_code == MAX_EXPR && consts_equal)
6173 /* MAX (X, 0) == 0 -> X <= 0 */
6174 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6176 else if (op_code == MAX_EXPR && consts_lt)
6177 /* MAX (X, 0) == 5 -> X == 5 */
6178 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6180 else if (op_code == MAX_EXPR)
6181 /* MAX (X, 0) == -1 -> false */
6182 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6184 else if (consts_equal)
6185 /* MIN (X, 0) == 0 -> X >= 0 */
6186 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6188 else if (consts_lt)
6189 /* MIN (X, 0) == 5 -> false */
6190 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6192 else
6193 /* MIN (X, 0) == -1 -> X == -1 */
6194 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6196 case GT_EXPR:
6197 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6198 /* MAX (X, 0) > 0 -> X > 0
6199 MAX (X, 0) > 5 -> X > 5 */
6200 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6202 else if (op_code == MAX_EXPR)
6203 /* MAX (X, 0) > -1 -> true */
6204 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6206 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6207 /* MIN (X, 0) > 0 -> false
6208 MIN (X, 0) > 5 -> false */
6209 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6211 else
6212 /* MIN (X, 0) > -1 -> X > -1 */
6213 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6215 default:
6216 return NULL_TREE;
6220 /* T is an integer expression that is being multiplied, divided, or taken a
6221 modulus (CODE says which and what kind of divide or modulus) by a
6222 constant C. See if we can eliminate that operation by folding it with
6223 other operations already in T. WIDE_TYPE, if non-null, is a type that
6224 should be used for the computation if wider than our type.
6226 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6227 (X * 2) + (Y * 4). We must, however, be assured that either the original
6228 expression would not overflow or that overflow is undefined for the type
6229 in the language in question.
6231 If we return a non-null expression, it is an equivalent form of the
6232 original computation, but need not be in the original type.
6234 We set *STRICT_OVERFLOW_P to true if the return values depends on
6235 signed overflow being undefined. Otherwise we do not change
6236 *STRICT_OVERFLOW_P. */
6238 static tree
6239 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6240 bool *strict_overflow_p)
6242 /* To avoid exponential search depth, refuse to allow recursion past
6243 three levels. Beyond that (1) it's highly unlikely that we'll find
6244 something interesting and (2) we've probably processed it before
6245 when we built the inner expression. */
6247 static int depth;
6248 tree ret;
6250 if (depth > 3)
6251 return NULL;
6253 depth++;
6254 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6255 depth--;
6257 return ret;
6260 static tree
6261 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6262 bool *strict_overflow_p)
6264 tree type = TREE_TYPE (t);
6265 enum tree_code tcode = TREE_CODE (t);
6266 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6267 > GET_MODE_SIZE (TYPE_MODE (type)))
6268 ? wide_type : type);
6269 tree t1, t2;
6270 int same_p = tcode == code;
6271 tree op0 = NULL_TREE, op1 = NULL_TREE;
6272 bool sub_strict_overflow_p;
6274 /* Don't deal with constants of zero here; they confuse the code below. */
6275 if (integer_zerop (c))
6276 return NULL_TREE;
6278 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6279 op0 = TREE_OPERAND (t, 0);
6281 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6282 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6284 /* Note that we need not handle conditional operations here since fold
6285 already handles those cases. So just do arithmetic here. */
6286 switch (tcode)
6288 case INTEGER_CST:
6289 /* For a constant, we can always simplify if we are a multiply
6290 or (for divide and modulus) if it is a multiple of our constant. */
6291 if (code == MULT_EXPR
6292 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6293 return const_binop (code, fold_convert (ctype, t),
6294 fold_convert (ctype, c), 0);
6295 break;
6297 CASE_CONVERT: case NON_LVALUE_EXPR:
6298 /* If op0 is an expression ... */
6299 if ((COMPARISON_CLASS_P (op0)
6300 || UNARY_CLASS_P (op0)
6301 || BINARY_CLASS_P (op0)
6302 || VL_EXP_CLASS_P (op0)
6303 || EXPRESSION_CLASS_P (op0))
6304 /* ... and has wrapping overflow, and its type is smaller
6305 than ctype, then we cannot pass through as widening. */
6306 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6307 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6308 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6309 && (TYPE_PRECISION (ctype)
6310 > TYPE_PRECISION (TREE_TYPE (op0))))
6311 /* ... or this is a truncation (t is narrower than op0),
6312 then we cannot pass through this narrowing. */
6313 || (TYPE_PRECISION (type)
6314 < TYPE_PRECISION (TREE_TYPE (op0)))
6315 /* ... or signedness changes for division or modulus,
6316 then we cannot pass through this conversion. */
6317 || (code != MULT_EXPR
6318 && (TYPE_UNSIGNED (ctype)
6319 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6320 /* ... or has undefined overflow while the converted to
6321 type has not, we cannot do the operation in the inner type
6322 as that would introduce undefined overflow. */
6323 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6324 && !TYPE_OVERFLOW_UNDEFINED (type))))
6325 break;
6327 /* Pass the constant down and see if we can make a simplification. If
6328 we can, replace this expression with the inner simplification for
6329 possible later conversion to our or some other type. */
6330 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6331 && TREE_CODE (t2) == INTEGER_CST
6332 && !TREE_OVERFLOW (t2)
6333 && (0 != (t1 = extract_muldiv (op0, t2, code,
6334 code == MULT_EXPR
6335 ? ctype : NULL_TREE,
6336 strict_overflow_p))))
6337 return t1;
6338 break;
6340 case ABS_EXPR:
6341 /* If widening the type changes it from signed to unsigned, then we
6342 must avoid building ABS_EXPR itself as unsigned. */
6343 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6345 tree cstype = (*signed_type_for) (ctype);
6346 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6347 != 0)
6349 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6350 return fold_convert (ctype, t1);
6352 break;
6354 /* If the constant is negative, we cannot simplify this. */
6355 if (tree_int_cst_sgn (c) == -1)
6356 break;
6357 /* FALLTHROUGH */
6358 case NEGATE_EXPR:
6359 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6360 != 0)
6361 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6362 break;
6364 case MIN_EXPR: case MAX_EXPR:
6365 /* If widening the type changes the signedness, then we can't perform
6366 this optimization as that changes the result. */
6367 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6368 break;
6370 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6371 sub_strict_overflow_p = false;
6372 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6373 &sub_strict_overflow_p)) != 0
6374 && (t2 = extract_muldiv (op1, c, code, wide_type,
6375 &sub_strict_overflow_p)) != 0)
6377 if (tree_int_cst_sgn (c) < 0)
6378 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6379 if (sub_strict_overflow_p)
6380 *strict_overflow_p = true;
6381 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6382 fold_convert (ctype, t2));
6384 break;
6386 case LSHIFT_EXPR: case RSHIFT_EXPR:
6387 /* If the second operand is constant, this is a multiplication
6388 or floor division, by a power of two, so we can treat it that
6389 way unless the multiplier or divisor overflows. Signed
6390 left-shift overflow is implementation-defined rather than
6391 undefined in C90, so do not convert signed left shift into
6392 multiplication. */
6393 if (TREE_CODE (op1) == INTEGER_CST
6394 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6395 /* const_binop may not detect overflow correctly,
6396 so check for it explicitly here. */
6397 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6398 && TREE_INT_CST_HIGH (op1) == 0
6399 && 0 != (t1 = fold_convert (ctype,
6400 const_binop (LSHIFT_EXPR,
6401 size_one_node,
6402 op1, 0)))
6403 && !TREE_OVERFLOW (t1))
6404 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6405 ? MULT_EXPR : FLOOR_DIV_EXPR,
6406 ctype,
6407 fold_convert (ctype, op0),
6408 t1),
6409 c, code, wide_type, strict_overflow_p);
6410 break;
6412 case PLUS_EXPR: case MINUS_EXPR:
6413 /* See if we can eliminate the operation on both sides. If we can, we
6414 can return a new PLUS or MINUS. If we can't, the only remaining
6415 cases where we can do anything are if the second operand is a
6416 constant. */
6417 sub_strict_overflow_p = false;
6418 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6419 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6420 if (t1 != 0 && t2 != 0
6421 && (code == MULT_EXPR
6422 /* If not multiplication, we can only do this if both operands
6423 are divisible by c. */
6424 || (multiple_of_p (ctype, op0, c)
6425 && multiple_of_p (ctype, op1, c))))
6427 if (sub_strict_overflow_p)
6428 *strict_overflow_p = true;
6429 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6430 fold_convert (ctype, t2));
6433 /* If this was a subtraction, negate OP1 and set it to be an addition.
6434 This simplifies the logic below. */
6435 if (tcode == MINUS_EXPR)
6436 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6438 if (TREE_CODE (op1) != INTEGER_CST)
6439 break;
6441 /* If either OP1 or C are negative, this optimization is not safe for
6442 some of the division and remainder types while for others we need
6443 to change the code. */
6444 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6446 if (code == CEIL_DIV_EXPR)
6447 code = FLOOR_DIV_EXPR;
6448 else if (code == FLOOR_DIV_EXPR)
6449 code = CEIL_DIV_EXPR;
6450 else if (code != MULT_EXPR
6451 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6452 break;
6455 /* If it's a multiply or a division/modulus operation of a multiple
6456 of our constant, do the operation and verify it doesn't overflow. */
6457 if (code == MULT_EXPR
6458 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6460 op1 = const_binop (code, fold_convert (ctype, op1),
6461 fold_convert (ctype, c), 0);
6462 /* We allow the constant to overflow with wrapping semantics. */
6463 if (op1 == 0
6464 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6465 break;
6467 else
6468 break;
6470 /* If we have an unsigned type is not a sizetype, we cannot widen
6471 the operation since it will change the result if the original
6472 computation overflowed. */
6473 if (TYPE_UNSIGNED (ctype)
6474 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6475 && ctype != type)
6476 break;
6478 /* If we were able to eliminate our operation from the first side,
6479 apply our operation to the second side and reform the PLUS. */
6480 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6481 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6483 /* The last case is if we are a multiply. In that case, we can
6484 apply the distributive law to commute the multiply and addition
6485 if the multiplication of the constants doesn't overflow. */
6486 if (code == MULT_EXPR)
6487 return fold_build2 (tcode, ctype,
6488 fold_build2 (code, ctype,
6489 fold_convert (ctype, op0),
6490 fold_convert (ctype, c)),
6491 op1);
6493 break;
6495 case MULT_EXPR:
6496 /* We have a special case here if we are doing something like
6497 (C * 8) % 4 since we know that's zero. */
6498 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6499 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6500 /* If the multiplication can overflow we cannot optimize this.
6501 ??? Until we can properly mark individual operations as
6502 not overflowing we need to treat sizetype special here as
6503 stor-layout relies on this opimization to make
6504 DECL_FIELD_BIT_OFFSET always a constant. */
6505 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6506 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6507 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6508 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6509 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6511 *strict_overflow_p = true;
6512 return omit_one_operand (type, integer_zero_node, op0);
6515 /* ... fall through ... */
6517 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6518 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6519 /* If we can extract our operation from the LHS, do so and return a
6520 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6521 do something only if the second operand is a constant. */
6522 if (same_p
6523 && (t1 = extract_muldiv (op0, c, code, wide_type,
6524 strict_overflow_p)) != 0)
6525 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6526 fold_convert (ctype, op1));
6527 else if (tcode == MULT_EXPR && code == MULT_EXPR
6528 && (t1 = extract_muldiv (op1, c, code, wide_type,
6529 strict_overflow_p)) != 0)
6530 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6531 fold_convert (ctype, t1));
6532 else if (TREE_CODE (op1) != INTEGER_CST)
6533 return 0;
6535 /* If these are the same operation types, we can associate them
6536 assuming no overflow. */
6537 if (tcode == code
6538 && 0 != (t1 = int_const_binop (MULT_EXPR,
6539 fold_convert (ctype, op1),
6540 fold_convert (ctype, c), 1))
6541 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6542 TREE_INT_CST_HIGH (t1),
6543 (TYPE_UNSIGNED (ctype)
6544 && tcode != MULT_EXPR) ? -1 : 1,
6545 TREE_OVERFLOW (t1)))
6546 && !TREE_OVERFLOW (t1))
6547 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6549 /* If these operations "cancel" each other, we have the main
6550 optimizations of this pass, which occur when either constant is a
6551 multiple of the other, in which case we replace this with either an
6552 operation or CODE or TCODE.
6554 If we have an unsigned type that is not a sizetype, we cannot do
6555 this since it will change the result if the original computation
6556 overflowed. */
6557 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6558 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6559 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6560 || (tcode == MULT_EXPR
6561 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6562 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6563 && code != MULT_EXPR)))
6565 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6567 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6568 *strict_overflow_p = true;
6569 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6570 fold_convert (ctype,
6571 const_binop (TRUNC_DIV_EXPR,
6572 op1, c, 0)));
6574 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6576 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6577 *strict_overflow_p = true;
6578 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6579 fold_convert (ctype,
6580 const_binop (TRUNC_DIV_EXPR,
6581 c, op1, 0)));
6584 break;
6586 default:
6587 break;
6590 return 0;
6593 /* Return a node which has the indicated constant VALUE (either 0 or
6594 1), and is of the indicated TYPE. */
6596 tree
6597 constant_boolean_node (int value, tree type)
6599 if (type == integer_type_node)
6600 return value ? integer_one_node : integer_zero_node;
6601 else if (type == boolean_type_node)
6602 return value ? boolean_true_node : boolean_false_node;
6603 else
6604 return build_int_cst (type, value);
6608 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6609 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6610 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6611 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6612 COND is the first argument to CODE; otherwise (as in the example
6613 given here), it is the second argument. TYPE is the type of the
6614 original expression. Return NULL_TREE if no simplification is
6615 possible. */
6617 static tree
6618 fold_binary_op_with_conditional_arg (location_t loc,
6619 enum tree_code code,
6620 tree type, tree op0, tree op1,
6621 tree cond, tree arg, int cond_first_p)
6623 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6624 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6625 tree test, true_value, false_value;
6626 tree lhs = NULL_TREE;
6627 tree rhs = NULL_TREE;
6629 /* This transformation is only worthwhile if we don't have to wrap
6630 arg in a SAVE_EXPR, and the operation can be simplified on at least
6631 one of the branches once its pushed inside the COND_EXPR. */
6632 if (!TREE_CONSTANT (arg))
6633 return NULL_TREE;
6635 if (TREE_CODE (cond) == COND_EXPR)
6637 test = TREE_OPERAND (cond, 0);
6638 true_value = TREE_OPERAND (cond, 1);
6639 false_value = TREE_OPERAND (cond, 2);
6640 /* If this operand throws an expression, then it does not make
6641 sense to try to perform a logical or arithmetic operation
6642 involving it. */
6643 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6644 lhs = true_value;
6645 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6646 rhs = false_value;
6648 else
6650 tree testtype = TREE_TYPE (cond);
6651 test = cond;
6652 true_value = constant_boolean_node (true, testtype);
6653 false_value = constant_boolean_node (false, testtype);
6656 arg = fold_convert_loc (loc, arg_type, arg);
6657 if (lhs == 0)
6659 true_value = fold_convert_loc (loc, cond_type, true_value);
6660 if (cond_first_p)
6661 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6662 else
6663 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6665 if (rhs == 0)
6667 false_value = fold_convert_loc (loc, cond_type, false_value);
6668 if (cond_first_p)
6669 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6670 else
6671 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6674 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6675 return fold_convert_loc (loc, type, test);
6679 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6681 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6682 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6683 ADDEND is the same as X.
6685 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6686 and finite. The problematic cases are when X is zero, and its mode
6687 has signed zeros. In the case of rounding towards -infinity,
6688 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6689 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6691 bool
6692 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6694 if (!real_zerop (addend))
6695 return false;
6697 /* Don't allow the fold with -fsignaling-nans. */
6698 if (HONOR_SNANS (TYPE_MODE (type)))
6699 return false;
6701 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6702 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6703 return true;
6705 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6706 if (TREE_CODE (addend) == REAL_CST
6707 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6708 negate = !negate;
6710 /* The mode has signed zeros, and we have to honor their sign.
6711 In this situation, there is only one case we can return true for.
6712 X - 0 is the same as X unless rounding towards -infinity is
6713 supported. */
6714 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6717 /* Subroutine of fold() that checks comparisons of built-in math
6718 functions against real constants.
6720 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6721 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6722 is the type of the result and ARG0 and ARG1 are the operands of the
6723 comparison. ARG1 must be a TREE_REAL_CST.
6725 The function returns the constant folded tree if a simplification
6726 can be made, and NULL_TREE otherwise. */
6728 static tree
6729 fold_mathfn_compare (location_t loc,
6730 enum built_in_function fcode, enum tree_code code,
6731 tree type, tree arg0, tree arg1)
6733 REAL_VALUE_TYPE c;
6735 if (BUILTIN_SQRT_P (fcode))
6737 tree arg = CALL_EXPR_ARG (arg0, 0);
6738 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6740 c = TREE_REAL_CST (arg1);
6741 if (REAL_VALUE_NEGATIVE (c))
6743 /* sqrt(x) < y is always false, if y is negative. */
6744 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6745 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6747 /* sqrt(x) > y is always true, if y is negative and we
6748 don't care about NaNs, i.e. negative values of x. */
6749 if (code == NE_EXPR || !HONOR_NANS (mode))
6750 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6752 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6753 return fold_build2_loc (loc, GE_EXPR, type, arg,
6754 build_real (TREE_TYPE (arg), dconst0));
6756 else if (code == GT_EXPR || code == GE_EXPR)
6758 REAL_VALUE_TYPE c2;
6760 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6761 real_convert (&c2, mode, &c2);
6763 if (REAL_VALUE_ISINF (c2))
6765 /* sqrt(x) > y is x == +Inf, when y is very large. */
6766 if (HONOR_INFINITIES (mode))
6767 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6768 build_real (TREE_TYPE (arg), c2));
6770 /* sqrt(x) > y is always false, when y is very large
6771 and we don't care about infinities. */
6772 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6775 /* sqrt(x) > c is the same as x > c*c. */
6776 return fold_build2_loc (loc, code, type, arg,
6777 build_real (TREE_TYPE (arg), c2));
6779 else if (code == LT_EXPR || code == LE_EXPR)
6781 REAL_VALUE_TYPE c2;
6783 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6784 real_convert (&c2, mode, &c2);
6786 if (REAL_VALUE_ISINF (c2))
6788 /* sqrt(x) < y is always true, when y is a very large
6789 value and we don't care about NaNs or Infinities. */
6790 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6791 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6793 /* sqrt(x) < y is x != +Inf when y is very large and we
6794 don't care about NaNs. */
6795 if (! HONOR_NANS (mode))
6796 return fold_build2_loc (loc, NE_EXPR, type, arg,
6797 build_real (TREE_TYPE (arg), c2));
6799 /* sqrt(x) < y is x >= 0 when y is very large and we
6800 don't care about Infinities. */
6801 if (! HONOR_INFINITIES (mode))
6802 return fold_build2_loc (loc, GE_EXPR, type, arg,
6803 build_real (TREE_TYPE (arg), dconst0));
6805 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6806 if (lang_hooks.decls.global_bindings_p () != 0
6807 || CONTAINS_PLACEHOLDER_P (arg))
6808 return NULL_TREE;
6810 arg = save_expr (arg);
6811 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6812 fold_build2_loc (loc, GE_EXPR, type, arg,
6813 build_real (TREE_TYPE (arg),
6814 dconst0)),
6815 fold_build2_loc (loc, NE_EXPR, type, arg,
6816 build_real (TREE_TYPE (arg),
6817 c2)));
6820 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6821 if (! HONOR_NANS (mode))
6822 return fold_build2_loc (loc, code, type, arg,
6823 build_real (TREE_TYPE (arg), c2));
6825 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6826 if (lang_hooks.decls.global_bindings_p () == 0
6827 && ! CONTAINS_PLACEHOLDER_P (arg))
6829 arg = save_expr (arg);
6830 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6831 fold_build2_loc (loc, GE_EXPR, type, arg,
6832 build_real (TREE_TYPE (arg),
6833 dconst0)),
6834 fold_build2_loc (loc, code, type, arg,
6835 build_real (TREE_TYPE (arg),
6836 c2)));
6841 return NULL_TREE;
6844 /* Subroutine of fold() that optimizes comparisons against Infinities,
6845 either +Inf or -Inf.
6847 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6848 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6849 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6851 The function returns the constant folded tree if a simplification
6852 can be made, and NULL_TREE otherwise. */
6854 static tree
6855 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6856 tree arg0, tree arg1)
6858 enum machine_mode mode;
6859 REAL_VALUE_TYPE max;
6860 tree temp;
6861 bool neg;
6863 mode = TYPE_MODE (TREE_TYPE (arg0));
6865 /* For negative infinity swap the sense of the comparison. */
6866 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6867 if (neg)
6868 code = swap_tree_comparison (code);
6870 switch (code)
6872 case GT_EXPR:
6873 /* x > +Inf is always false, if with ignore sNANs. */
6874 if (HONOR_SNANS (mode))
6875 return NULL_TREE;
6876 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6878 case LE_EXPR:
6879 /* x <= +Inf is always true, if we don't case about NaNs. */
6880 if (! HONOR_NANS (mode))
6881 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6883 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6884 if (lang_hooks.decls.global_bindings_p () == 0
6885 && ! CONTAINS_PLACEHOLDER_P (arg0))
6887 arg0 = save_expr (arg0);
6888 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6890 break;
6892 case EQ_EXPR:
6893 case GE_EXPR:
6894 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6895 real_maxval (&max, neg, mode);
6896 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6897 arg0, build_real (TREE_TYPE (arg0), max));
6899 case LT_EXPR:
6900 /* x < +Inf is always equal to x <= DBL_MAX. */
6901 real_maxval (&max, neg, mode);
6902 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6903 arg0, build_real (TREE_TYPE (arg0), max));
6905 case NE_EXPR:
6906 /* x != +Inf is always equal to !(x > DBL_MAX). */
6907 real_maxval (&max, neg, mode);
6908 if (! HONOR_NANS (mode))
6909 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6910 arg0, build_real (TREE_TYPE (arg0), max));
6912 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6913 arg0, build_real (TREE_TYPE (arg0), max));
6914 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6916 default:
6917 break;
6920 return NULL_TREE;
6923 /* Subroutine of fold() that optimizes comparisons of a division by
6924 a nonzero integer constant against an integer constant, i.e.
6925 X/C1 op C2.
6927 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6928 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6929 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6931 The function returns the constant folded tree if a simplification
6932 can be made, and NULL_TREE otherwise. */
6934 static tree
6935 fold_div_compare (location_t loc,
6936 enum tree_code code, tree type, tree arg0, tree arg1)
6938 tree prod, tmp, hi, lo;
6939 tree arg00 = TREE_OPERAND (arg0, 0);
6940 tree arg01 = TREE_OPERAND (arg0, 1);
6941 unsigned HOST_WIDE_INT lpart;
6942 HOST_WIDE_INT hpart;
6943 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6944 bool neg_overflow;
6945 int overflow;
6947 /* We have to do this the hard way to detect unsigned overflow.
6948 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6949 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6950 TREE_INT_CST_HIGH (arg01),
6951 TREE_INT_CST_LOW (arg1),
6952 TREE_INT_CST_HIGH (arg1),
6953 &lpart, &hpart, unsigned_p);
6954 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6955 -1, overflow);
6956 neg_overflow = false;
6958 if (unsigned_p)
6960 tmp = int_const_binop (MINUS_EXPR, arg01,
6961 build_int_cst (TREE_TYPE (arg01), 1), 0);
6962 lo = prod;
6964 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6965 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6966 TREE_INT_CST_HIGH (prod),
6967 TREE_INT_CST_LOW (tmp),
6968 TREE_INT_CST_HIGH (tmp),
6969 &lpart, &hpart, unsigned_p);
6970 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6971 -1, overflow | TREE_OVERFLOW (prod));
6973 else if (tree_int_cst_sgn (arg01) >= 0)
6975 tmp = int_const_binop (MINUS_EXPR, arg01,
6976 build_int_cst (TREE_TYPE (arg01), 1), 0);
6977 switch (tree_int_cst_sgn (arg1))
6979 case -1:
6980 neg_overflow = true;
6981 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6982 hi = prod;
6983 break;
6985 case 0:
6986 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6987 hi = tmp;
6988 break;
6990 case 1:
6991 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6992 lo = prod;
6993 break;
6995 default:
6996 gcc_unreachable ();
6999 else
7001 /* A negative divisor reverses the relational operators. */
7002 code = swap_tree_comparison (code);
7004 tmp = int_const_binop (PLUS_EXPR, arg01,
7005 build_int_cst (TREE_TYPE (arg01), 1), 0);
7006 switch (tree_int_cst_sgn (arg1))
7008 case -1:
7009 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7010 lo = prod;
7011 break;
7013 case 0:
7014 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7015 lo = tmp;
7016 break;
7018 case 1:
7019 neg_overflow = true;
7020 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7021 hi = prod;
7022 break;
7024 default:
7025 gcc_unreachable ();
7029 switch (code)
7031 case EQ_EXPR:
7032 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7033 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7034 if (TREE_OVERFLOW (hi))
7035 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7036 if (TREE_OVERFLOW (lo))
7037 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7038 return build_range_check (loc, type, arg00, 1, lo, hi);
7040 case NE_EXPR:
7041 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7042 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7043 if (TREE_OVERFLOW (hi))
7044 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7045 if (TREE_OVERFLOW (lo))
7046 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7047 return build_range_check (loc, type, arg00, 0, lo, hi);
7049 case LT_EXPR:
7050 if (TREE_OVERFLOW (lo))
7052 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7053 return omit_one_operand_loc (loc, type, tmp, arg00);
7055 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7057 case LE_EXPR:
7058 if (TREE_OVERFLOW (hi))
7060 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7061 return omit_one_operand_loc (loc, type, tmp, arg00);
7063 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7065 case GT_EXPR:
7066 if (TREE_OVERFLOW (hi))
7068 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7069 return omit_one_operand_loc (loc, type, tmp, arg00);
7071 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7073 case GE_EXPR:
7074 if (TREE_OVERFLOW (lo))
7076 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7077 return omit_one_operand_loc (loc, type, tmp, arg00);
7079 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7081 default:
7082 break;
7085 return NULL_TREE;
7089 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7090 equality/inequality test, then return a simplified form of the test
7091 using a sign testing. Otherwise return NULL. TYPE is the desired
7092 result type. */
7094 static tree
7095 fold_single_bit_test_into_sign_test (location_t loc,
7096 enum tree_code code, tree arg0, tree arg1,
7097 tree result_type)
7099 /* If this is testing a single bit, we can optimize the test. */
7100 if ((code == NE_EXPR || code == EQ_EXPR)
7101 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7102 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7104 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7105 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7106 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7108 if (arg00 != NULL_TREE
7109 /* This is only a win if casting to a signed type is cheap,
7110 i.e. when arg00's type is not a partial mode. */
7111 && TYPE_PRECISION (TREE_TYPE (arg00))
7112 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7114 tree stype = signed_type_for (TREE_TYPE (arg00));
7115 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7116 result_type,
7117 fold_convert_loc (loc, stype, arg00),
7118 build_int_cst (stype, 0));
7122 return NULL_TREE;
7125 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7126 equality/inequality test, then return a simplified form of
7127 the test using shifts and logical operations. Otherwise return
7128 NULL. TYPE is the desired result type. */
7130 tree
7131 fold_single_bit_test (location_t loc, enum tree_code code,
7132 tree arg0, tree arg1, tree result_type)
7134 /* If this is testing a single bit, we can optimize the test. */
7135 if ((code == NE_EXPR || code == EQ_EXPR)
7136 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7137 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7139 tree inner = TREE_OPERAND (arg0, 0);
7140 tree type = TREE_TYPE (arg0);
7141 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7142 enum machine_mode operand_mode = TYPE_MODE (type);
7143 int ops_unsigned;
7144 tree signed_type, unsigned_type, intermediate_type;
7145 tree tem, one;
7147 /* First, see if we can fold the single bit test into a sign-bit
7148 test. */
7149 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7150 result_type);
7151 if (tem)
7152 return tem;
7154 /* Otherwise we have (A & C) != 0 where C is a single bit,
7155 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7156 Similarly for (A & C) == 0. */
7158 /* If INNER is a right shift of a constant and it plus BITNUM does
7159 not overflow, adjust BITNUM and INNER. */
7160 if (TREE_CODE (inner) == RSHIFT_EXPR
7161 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7162 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7163 && bitnum < TYPE_PRECISION (type)
7164 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7165 bitnum - TYPE_PRECISION (type)))
7167 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7168 inner = TREE_OPERAND (inner, 0);
7171 /* If we are going to be able to omit the AND below, we must do our
7172 operations as unsigned. If we must use the AND, we have a choice.
7173 Normally unsigned is faster, but for some machines signed is. */
7174 #ifdef LOAD_EXTEND_OP
7175 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7176 && !flag_syntax_only) ? 0 : 1;
7177 #else
7178 ops_unsigned = 1;
7179 #endif
7181 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7182 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7183 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7184 inner = fold_convert_loc (loc, intermediate_type, inner);
7186 if (bitnum != 0)
7187 inner = build2 (RSHIFT_EXPR, intermediate_type,
7188 inner, size_int (bitnum));
7190 one = build_int_cst (intermediate_type, 1);
7192 if (code == EQ_EXPR)
7193 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7195 /* Put the AND last so it can combine with more things. */
7196 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7198 /* Make sure to return the proper type. */
7199 inner = fold_convert_loc (loc, result_type, inner);
7201 return inner;
7203 return NULL_TREE;
7206 /* Check whether we are allowed to reorder operands arg0 and arg1,
7207 such that the evaluation of arg1 occurs before arg0. */
7209 static bool
7210 reorder_operands_p (const_tree arg0, const_tree arg1)
7212 if (! flag_evaluation_order)
7213 return true;
7214 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7215 return true;
7216 return ! TREE_SIDE_EFFECTS (arg0)
7217 && ! TREE_SIDE_EFFECTS (arg1);
7220 /* Test whether it is preferable two swap two operands, ARG0 and
7221 ARG1, for example because ARG0 is an integer constant and ARG1
7222 isn't. If REORDER is true, only recommend swapping if we can
7223 evaluate the operands in reverse order. */
7225 bool
7226 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7228 STRIP_SIGN_NOPS (arg0);
7229 STRIP_SIGN_NOPS (arg1);
7231 if (TREE_CODE (arg1) == INTEGER_CST)
7232 return 0;
7233 if (TREE_CODE (arg0) == INTEGER_CST)
7234 return 1;
7236 if (TREE_CODE (arg1) == REAL_CST)
7237 return 0;
7238 if (TREE_CODE (arg0) == REAL_CST)
7239 return 1;
7241 if (TREE_CODE (arg1) == FIXED_CST)
7242 return 0;
7243 if (TREE_CODE (arg0) == FIXED_CST)
7244 return 1;
7246 if (TREE_CODE (arg1) == COMPLEX_CST)
7247 return 0;
7248 if (TREE_CODE (arg0) == COMPLEX_CST)
7249 return 1;
7251 if (TREE_CONSTANT (arg1))
7252 return 0;
7253 if (TREE_CONSTANT (arg0))
7254 return 1;
7256 if (optimize_function_for_size_p (cfun))
7257 return 0;
7259 if (reorder && flag_evaluation_order
7260 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7261 return 0;
7263 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7264 for commutative and comparison operators. Ensuring a canonical
7265 form allows the optimizers to find additional redundancies without
7266 having to explicitly check for both orderings. */
7267 if (TREE_CODE (arg0) == SSA_NAME
7268 && TREE_CODE (arg1) == SSA_NAME
7269 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7270 return 1;
7272 /* Put SSA_NAMEs last. */
7273 if (TREE_CODE (arg1) == SSA_NAME)
7274 return 0;
7275 if (TREE_CODE (arg0) == SSA_NAME)
7276 return 1;
7278 /* Put variables last. */
7279 if (DECL_P (arg1))
7280 return 0;
7281 if (DECL_P (arg0))
7282 return 1;
7284 return 0;
7287 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7288 ARG0 is extended to a wider type. */
7290 static tree
7291 fold_widened_comparison (location_t loc, enum tree_code code,
7292 tree type, tree arg0, tree arg1)
7294 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7295 tree arg1_unw;
7296 tree shorter_type, outer_type;
7297 tree min, max;
7298 bool above, below;
7300 if (arg0_unw == arg0)
7301 return NULL_TREE;
7302 shorter_type = TREE_TYPE (arg0_unw);
7304 #ifdef HAVE_canonicalize_funcptr_for_compare
7305 /* Disable this optimization if we're casting a function pointer
7306 type on targets that require function pointer canonicalization. */
7307 if (HAVE_canonicalize_funcptr_for_compare
7308 && TREE_CODE (shorter_type) == POINTER_TYPE
7309 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7310 return NULL_TREE;
7311 #endif
7313 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7314 return NULL_TREE;
7316 arg1_unw = get_unwidened (arg1, NULL_TREE);
7318 /* If possible, express the comparison in the shorter mode. */
7319 if ((code == EQ_EXPR || code == NE_EXPR
7320 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7321 && (TREE_TYPE (arg1_unw) == shorter_type
7322 || ((TYPE_PRECISION (shorter_type)
7323 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7324 && (TYPE_UNSIGNED (shorter_type)
7325 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7326 || (TREE_CODE (arg1_unw) == INTEGER_CST
7327 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7328 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7329 && int_fits_type_p (arg1_unw, shorter_type))))
7330 return fold_build2_loc (loc, code, type, arg0_unw,
7331 fold_convert_loc (loc, shorter_type, arg1_unw));
7333 if (TREE_CODE (arg1_unw) != INTEGER_CST
7334 || TREE_CODE (shorter_type) != INTEGER_TYPE
7335 || !int_fits_type_p (arg1_unw, shorter_type))
7336 return NULL_TREE;
7338 /* If we are comparing with the integer that does not fit into the range
7339 of the shorter type, the result is known. */
7340 outer_type = TREE_TYPE (arg1_unw);
7341 min = lower_bound_in_type (outer_type, shorter_type);
7342 max = upper_bound_in_type (outer_type, shorter_type);
7344 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7345 max, arg1_unw));
7346 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7347 arg1_unw, min));
7349 switch (code)
7351 case EQ_EXPR:
7352 if (above || below)
7353 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7354 break;
7356 case NE_EXPR:
7357 if (above || below)
7358 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7359 break;
7361 case LT_EXPR:
7362 case LE_EXPR:
7363 if (above)
7364 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7365 else if (below)
7366 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7368 case GT_EXPR:
7369 case GE_EXPR:
7370 if (above)
7371 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7372 else if (below)
7373 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7375 default:
7376 break;
7379 return NULL_TREE;
7382 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7383 ARG0 just the signedness is changed. */
7385 static tree
7386 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7387 tree arg0, tree arg1)
7389 tree arg0_inner;
7390 tree inner_type, outer_type;
7392 if (!CONVERT_EXPR_P (arg0))
7393 return NULL_TREE;
7395 outer_type = TREE_TYPE (arg0);
7396 arg0_inner = TREE_OPERAND (arg0, 0);
7397 inner_type = TREE_TYPE (arg0_inner);
7399 #ifdef HAVE_canonicalize_funcptr_for_compare
7400 /* Disable this optimization if we're casting a function pointer
7401 type on targets that require function pointer canonicalization. */
7402 if (HAVE_canonicalize_funcptr_for_compare
7403 && TREE_CODE (inner_type) == POINTER_TYPE
7404 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7405 return NULL_TREE;
7406 #endif
7408 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7409 return NULL_TREE;
7411 if (TREE_CODE (arg1) != INTEGER_CST
7412 && !(CONVERT_EXPR_P (arg1)
7413 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7414 return NULL_TREE;
7416 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7417 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7418 && code != NE_EXPR
7419 && code != EQ_EXPR)
7420 return NULL_TREE;
7422 if (TREE_CODE (arg1) == INTEGER_CST)
7423 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7424 TREE_INT_CST_HIGH (arg1), 0,
7425 TREE_OVERFLOW (arg1));
7426 else
7427 arg1 = fold_convert_loc (loc, inner_type, arg1);
7429 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7432 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7433 step of the array. Reconstructs s and delta in the case of s *
7434 delta being an integer constant (and thus already folded). ADDR is
7435 the address. MULT is the multiplicative expression. If the
7436 function succeeds, the new address expression is returned.
7437 Otherwise NULL_TREE is returned. LOC is the location of the
7438 resulting expression. */
7440 static tree
7441 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7443 tree s, delta, step;
7444 tree ref = TREE_OPERAND (addr, 0), pref;
7445 tree ret, pos;
7446 tree itype;
7447 bool mdim = false;
7449 /* Strip the nops that might be added when converting op1 to sizetype. */
7450 STRIP_NOPS (op1);
7452 /* Canonicalize op1 into a possibly non-constant delta
7453 and an INTEGER_CST s. */
7454 if (TREE_CODE (op1) == MULT_EXPR)
7456 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7458 STRIP_NOPS (arg0);
7459 STRIP_NOPS (arg1);
7461 if (TREE_CODE (arg0) == INTEGER_CST)
7463 s = arg0;
7464 delta = arg1;
7466 else if (TREE_CODE (arg1) == INTEGER_CST)
7468 s = arg1;
7469 delta = arg0;
7471 else
7472 return NULL_TREE;
7474 else if (TREE_CODE (op1) == INTEGER_CST)
7476 delta = op1;
7477 s = NULL_TREE;
7479 else
7481 /* Simulate we are delta * 1. */
7482 delta = op1;
7483 s = integer_one_node;
7486 for (;; ref = TREE_OPERAND (ref, 0))
7488 if (TREE_CODE (ref) == ARRAY_REF)
7490 /* Remember if this was a multi-dimensional array. */
7491 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7492 mdim = true;
7494 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7495 if (! itype)
7496 continue;
7498 step = array_ref_element_size (ref);
7499 if (TREE_CODE (step) != INTEGER_CST)
7500 continue;
7502 if (s)
7504 if (! tree_int_cst_equal (step, s))
7505 continue;
7507 else
7509 /* Try if delta is a multiple of step. */
7510 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7511 if (! tmp)
7512 continue;
7513 delta = tmp;
7516 /* Only fold here if we can verify we do not overflow one
7517 dimension of a multi-dimensional array. */
7518 if (mdim)
7520 tree tmp;
7522 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7523 || !INTEGRAL_TYPE_P (itype)
7524 || !TYPE_MAX_VALUE (itype)
7525 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7526 continue;
7528 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7529 fold_convert_loc (loc, itype,
7530 TREE_OPERAND (ref, 1)),
7531 fold_convert_loc (loc, itype, delta));
7532 if (!tmp
7533 || TREE_CODE (tmp) != INTEGER_CST
7534 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7535 continue;
7538 break;
7540 else
7541 mdim = false;
7543 if (!handled_component_p (ref))
7544 return NULL_TREE;
7547 /* We found the suitable array reference. So copy everything up to it,
7548 and replace the index. */
7550 pref = TREE_OPERAND (addr, 0);
7551 ret = copy_node (pref);
7552 SET_EXPR_LOCATION (ret, loc);
7553 pos = ret;
7555 while (pref != ref)
7557 pref = TREE_OPERAND (pref, 0);
7558 TREE_OPERAND (pos, 0) = copy_node (pref);
7559 pos = TREE_OPERAND (pos, 0);
7562 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7563 fold_convert_loc (loc, itype,
7564 TREE_OPERAND (pos, 1)),
7565 fold_convert_loc (loc, itype, delta));
7567 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7571 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7572 means A >= Y && A != MAX, but in this case we know that
7573 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7575 static tree
7576 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7578 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7580 if (TREE_CODE (bound) == LT_EXPR)
7581 a = TREE_OPERAND (bound, 0);
7582 else if (TREE_CODE (bound) == GT_EXPR)
7583 a = TREE_OPERAND (bound, 1);
7584 else
7585 return NULL_TREE;
7587 typea = TREE_TYPE (a);
7588 if (!INTEGRAL_TYPE_P (typea)
7589 && !POINTER_TYPE_P (typea))
7590 return NULL_TREE;
7592 if (TREE_CODE (ineq) == LT_EXPR)
7594 a1 = TREE_OPERAND (ineq, 1);
7595 y = TREE_OPERAND (ineq, 0);
7597 else if (TREE_CODE (ineq) == GT_EXPR)
7599 a1 = TREE_OPERAND (ineq, 0);
7600 y = TREE_OPERAND (ineq, 1);
7602 else
7603 return NULL_TREE;
7605 if (TREE_TYPE (a1) != typea)
7606 return NULL_TREE;
7608 if (POINTER_TYPE_P (typea))
7610 /* Convert the pointer types into integer before taking the difference. */
7611 tree ta = fold_convert_loc (loc, ssizetype, a);
7612 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7613 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7615 else
7616 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7618 if (!diff || !integer_onep (diff))
7619 return NULL_TREE;
7621 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7624 /* Fold a sum or difference of at least one multiplication.
7625 Returns the folded tree or NULL if no simplification could be made. */
7627 static tree
7628 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7629 tree arg0, tree arg1)
7631 tree arg00, arg01, arg10, arg11;
7632 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7634 /* (A * C) +- (B * C) -> (A+-B) * C.
7635 (A * C) +- A -> A * (C+-1).
7636 We are most concerned about the case where C is a constant,
7637 but other combinations show up during loop reduction. Since
7638 it is not difficult, try all four possibilities. */
7640 if (TREE_CODE (arg0) == MULT_EXPR)
7642 arg00 = TREE_OPERAND (arg0, 0);
7643 arg01 = TREE_OPERAND (arg0, 1);
7645 else if (TREE_CODE (arg0) == INTEGER_CST)
7647 arg00 = build_one_cst (type);
7648 arg01 = arg0;
7650 else
7652 /* We cannot generate constant 1 for fract. */
7653 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7654 return NULL_TREE;
7655 arg00 = arg0;
7656 arg01 = build_one_cst (type);
7658 if (TREE_CODE (arg1) == MULT_EXPR)
7660 arg10 = TREE_OPERAND (arg1, 0);
7661 arg11 = TREE_OPERAND (arg1, 1);
7663 else if (TREE_CODE (arg1) == INTEGER_CST)
7665 arg10 = build_one_cst (type);
7666 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7667 the purpose of this canonicalization. */
7668 if (TREE_INT_CST_HIGH (arg1) == -1
7669 && negate_expr_p (arg1)
7670 && code == PLUS_EXPR)
7672 arg11 = negate_expr (arg1);
7673 code = MINUS_EXPR;
7675 else
7676 arg11 = arg1;
7678 else
7680 /* We cannot generate constant 1 for fract. */
7681 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7682 return NULL_TREE;
7683 arg10 = arg1;
7684 arg11 = build_one_cst (type);
7686 same = NULL_TREE;
7688 if (operand_equal_p (arg01, arg11, 0))
7689 same = arg01, alt0 = arg00, alt1 = arg10;
7690 else if (operand_equal_p (arg00, arg10, 0))
7691 same = arg00, alt0 = arg01, alt1 = arg11;
7692 else if (operand_equal_p (arg00, arg11, 0))
7693 same = arg00, alt0 = arg01, alt1 = arg10;
7694 else if (operand_equal_p (arg01, arg10, 0))
7695 same = arg01, alt0 = arg00, alt1 = arg11;
7697 /* No identical multiplicands; see if we can find a common
7698 power-of-two factor in non-power-of-two multiplies. This
7699 can help in multi-dimensional array access. */
7700 else if (host_integerp (arg01, 0)
7701 && host_integerp (arg11, 0))
7703 HOST_WIDE_INT int01, int11, tmp;
7704 bool swap = false;
7705 tree maybe_same;
7706 int01 = TREE_INT_CST_LOW (arg01);
7707 int11 = TREE_INT_CST_LOW (arg11);
7709 /* Move min of absolute values to int11. */
7710 if ((int01 >= 0 ? int01 : -int01)
7711 < (int11 >= 0 ? int11 : -int11))
7713 tmp = int01, int01 = int11, int11 = tmp;
7714 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7715 maybe_same = arg01;
7716 swap = true;
7718 else
7719 maybe_same = arg11;
7721 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7722 /* The remainder should not be a constant, otherwise we
7723 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7724 increased the number of multiplications necessary. */
7725 && TREE_CODE (arg10) != INTEGER_CST)
7727 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7728 build_int_cst (TREE_TYPE (arg00),
7729 int01 / int11));
7730 alt1 = arg10;
7731 same = maybe_same;
7732 if (swap)
7733 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7737 if (same)
7738 return fold_build2_loc (loc, MULT_EXPR, type,
7739 fold_build2_loc (loc, code, type,
7740 fold_convert_loc (loc, type, alt0),
7741 fold_convert_loc (loc, type, alt1)),
7742 fold_convert_loc (loc, type, same));
7744 return NULL_TREE;
7747 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7748 specified by EXPR into the buffer PTR of length LEN bytes.
7749 Return the number of bytes placed in the buffer, or zero
7750 upon failure. */
7752 static int
7753 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7755 tree type = TREE_TYPE (expr);
7756 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7757 int byte, offset, word, words;
7758 unsigned char value;
7760 if (total_bytes > len)
7761 return 0;
7762 words = total_bytes / UNITS_PER_WORD;
7764 for (byte = 0; byte < total_bytes; byte++)
7766 int bitpos = byte * BITS_PER_UNIT;
7767 if (bitpos < HOST_BITS_PER_WIDE_INT)
7768 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7769 else
7770 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7771 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7773 if (total_bytes > UNITS_PER_WORD)
7775 word = byte / UNITS_PER_WORD;
7776 if (WORDS_BIG_ENDIAN)
7777 word = (words - 1) - word;
7778 offset = word * UNITS_PER_WORD;
7779 if (BYTES_BIG_ENDIAN)
7780 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7781 else
7782 offset += byte % UNITS_PER_WORD;
7784 else
7785 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7786 ptr[offset] = value;
7788 return total_bytes;
7792 /* Subroutine of native_encode_expr. Encode the REAL_CST
7793 specified by EXPR into the buffer PTR of length LEN bytes.
7794 Return the number of bytes placed in the buffer, or zero
7795 upon failure. */
7797 static int
7798 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7800 tree type = TREE_TYPE (expr);
7801 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7802 int byte, offset, word, words, bitpos;
7803 unsigned char value;
7805 /* There are always 32 bits in each long, no matter the size of
7806 the hosts long. We handle floating point representations with
7807 up to 192 bits. */
7808 long tmp[6];
7810 if (total_bytes > len)
7811 return 0;
7812 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7814 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7816 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7817 bitpos += BITS_PER_UNIT)
7819 byte = (bitpos / BITS_PER_UNIT) & 3;
7820 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7822 if (UNITS_PER_WORD < 4)
7824 word = byte / UNITS_PER_WORD;
7825 if (WORDS_BIG_ENDIAN)
7826 word = (words - 1) - word;
7827 offset = word * UNITS_PER_WORD;
7828 if (BYTES_BIG_ENDIAN)
7829 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7830 else
7831 offset += byte % UNITS_PER_WORD;
7833 else
7834 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7835 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7837 return total_bytes;
7840 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7841 specified by EXPR into the buffer PTR of length LEN bytes.
7842 Return the number of bytes placed in the buffer, or zero
7843 upon failure. */
7845 static int
7846 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7848 int rsize, isize;
7849 tree part;
7851 part = TREE_REALPART (expr);
7852 rsize = native_encode_expr (part, ptr, len);
7853 if (rsize == 0)
7854 return 0;
7855 part = TREE_IMAGPART (expr);
7856 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7857 if (isize != rsize)
7858 return 0;
7859 return rsize + isize;
7863 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7864 specified by EXPR into the buffer PTR of length LEN bytes.
7865 Return the number of bytes placed in the buffer, or zero
7866 upon failure. */
7868 static int
7869 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7871 int i, size, offset, count;
7872 tree itype, elem, elements;
7874 offset = 0;
7875 elements = TREE_VECTOR_CST_ELTS (expr);
7876 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7877 itype = TREE_TYPE (TREE_TYPE (expr));
7878 size = GET_MODE_SIZE (TYPE_MODE (itype));
7879 for (i = 0; i < count; i++)
7881 if (elements)
7883 elem = TREE_VALUE (elements);
7884 elements = TREE_CHAIN (elements);
7886 else
7887 elem = NULL_TREE;
7889 if (elem)
7891 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7892 return 0;
7894 else
7896 if (offset + size > len)
7897 return 0;
7898 memset (ptr+offset, 0, size);
7900 offset += size;
7902 return offset;
7906 /* Subroutine of native_encode_expr. Encode the STRING_CST
7907 specified by EXPR into the buffer PTR of length LEN bytes.
7908 Return the number of bytes placed in the buffer, or zero
7909 upon failure. */
7911 static int
7912 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7914 tree type = TREE_TYPE (expr);
7915 HOST_WIDE_INT total_bytes;
7917 if (TREE_CODE (type) != ARRAY_TYPE
7918 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7919 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7920 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7921 return 0;
7922 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7923 if (total_bytes > len)
7924 return 0;
7925 if (TREE_STRING_LENGTH (expr) < total_bytes)
7927 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7928 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7929 total_bytes - TREE_STRING_LENGTH (expr));
7931 else
7932 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7933 return total_bytes;
7937 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7938 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7939 buffer PTR of length LEN bytes. Return the number of bytes
7940 placed in the buffer, or zero upon failure. */
7943 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7945 switch (TREE_CODE (expr))
7947 case INTEGER_CST:
7948 return native_encode_int (expr, ptr, len);
7950 case REAL_CST:
7951 return native_encode_real (expr, ptr, len);
7953 case COMPLEX_CST:
7954 return native_encode_complex (expr, ptr, len);
7956 case VECTOR_CST:
7957 return native_encode_vector (expr, ptr, len);
7959 case STRING_CST:
7960 return native_encode_string (expr, ptr, len);
7962 default:
7963 return 0;
7968 /* Subroutine of native_interpret_expr. Interpret the contents of
7969 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7970 If the buffer cannot be interpreted, return NULL_TREE. */
7972 static tree
7973 native_interpret_int (tree type, const unsigned char *ptr, int len)
7975 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7976 int byte, offset, word, words;
7977 unsigned char value;
7978 unsigned int HOST_WIDE_INT lo = 0;
7979 HOST_WIDE_INT hi = 0;
7981 if (total_bytes > len)
7982 return NULL_TREE;
7983 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7984 return NULL_TREE;
7985 words = total_bytes / UNITS_PER_WORD;
7987 for (byte = 0; byte < total_bytes; byte++)
7989 int bitpos = byte * BITS_PER_UNIT;
7990 if (total_bytes > UNITS_PER_WORD)
7992 word = byte / UNITS_PER_WORD;
7993 if (WORDS_BIG_ENDIAN)
7994 word = (words - 1) - word;
7995 offset = word * UNITS_PER_WORD;
7996 if (BYTES_BIG_ENDIAN)
7997 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7998 else
7999 offset += byte % UNITS_PER_WORD;
8001 else
8002 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8003 value = ptr[offset];
8005 if (bitpos < HOST_BITS_PER_WIDE_INT)
8006 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8007 else
8008 hi |= (unsigned HOST_WIDE_INT) value
8009 << (bitpos - HOST_BITS_PER_WIDE_INT);
8012 return build_int_cst_wide_type (type, lo, hi);
8016 /* Subroutine of native_interpret_expr. Interpret the contents of
8017 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8018 If the buffer cannot be interpreted, return NULL_TREE. */
8020 static tree
8021 native_interpret_real (tree type, const unsigned char *ptr, int len)
8023 enum machine_mode mode = TYPE_MODE (type);
8024 int total_bytes = GET_MODE_SIZE (mode);
8025 int byte, offset, word, words, bitpos;
8026 unsigned char value;
8027 /* There are always 32 bits in each long, no matter the size of
8028 the hosts long. We handle floating point representations with
8029 up to 192 bits. */
8030 REAL_VALUE_TYPE r;
8031 long tmp[6];
8033 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8034 if (total_bytes > len || total_bytes > 24)
8035 return NULL_TREE;
8036 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8038 memset (tmp, 0, sizeof (tmp));
8039 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8040 bitpos += BITS_PER_UNIT)
8042 byte = (bitpos / BITS_PER_UNIT) & 3;
8043 if (UNITS_PER_WORD < 4)
8045 word = byte / UNITS_PER_WORD;
8046 if (WORDS_BIG_ENDIAN)
8047 word = (words - 1) - word;
8048 offset = word * UNITS_PER_WORD;
8049 if (BYTES_BIG_ENDIAN)
8050 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8051 else
8052 offset += byte % UNITS_PER_WORD;
8054 else
8055 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8056 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8058 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8061 real_from_target (&r, tmp, mode);
8062 return build_real (type, r);
8066 /* Subroutine of native_interpret_expr. Interpret the contents of
8067 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8068 If the buffer cannot be interpreted, return NULL_TREE. */
8070 static tree
8071 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8073 tree etype, rpart, ipart;
8074 int size;
8076 etype = TREE_TYPE (type);
8077 size = GET_MODE_SIZE (TYPE_MODE (etype));
8078 if (size * 2 > len)
8079 return NULL_TREE;
8080 rpart = native_interpret_expr (etype, ptr, size);
8081 if (!rpart)
8082 return NULL_TREE;
8083 ipart = native_interpret_expr (etype, ptr+size, size);
8084 if (!ipart)
8085 return NULL_TREE;
8086 return build_complex (type, rpart, ipart);
8090 /* Subroutine of native_interpret_expr. Interpret the contents of
8091 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8092 If the buffer cannot be interpreted, return NULL_TREE. */
8094 static tree
8095 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8097 tree etype, elem, elements;
8098 int i, size, count;
8100 etype = TREE_TYPE (type);
8101 size = GET_MODE_SIZE (TYPE_MODE (etype));
8102 count = TYPE_VECTOR_SUBPARTS (type);
8103 if (size * count > len)
8104 return NULL_TREE;
8106 elements = NULL_TREE;
8107 for (i = count - 1; i >= 0; i--)
8109 elem = native_interpret_expr (etype, ptr+(i*size), size);
8110 if (!elem)
8111 return NULL_TREE;
8112 elements = tree_cons (NULL_TREE, elem, elements);
8114 return build_vector (type, elements);
8118 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8119 the buffer PTR of length LEN as a constant of type TYPE. For
8120 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8121 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8122 return NULL_TREE. */
8124 tree
8125 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8127 switch (TREE_CODE (type))
8129 case INTEGER_TYPE:
8130 case ENUMERAL_TYPE:
8131 case BOOLEAN_TYPE:
8132 return native_interpret_int (type, ptr, len);
8134 case REAL_TYPE:
8135 return native_interpret_real (type, ptr, len);
8137 case COMPLEX_TYPE:
8138 return native_interpret_complex (type, ptr, len);
8140 case VECTOR_TYPE:
8141 return native_interpret_vector (type, ptr, len);
8143 default:
8144 return NULL_TREE;
8149 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8150 TYPE at compile-time. If we're unable to perform the conversion
8151 return NULL_TREE. */
8153 static tree
8154 fold_view_convert_expr (tree type, tree expr)
8156 /* We support up to 512-bit values (for V8DFmode). */
8157 unsigned char buffer[64];
8158 int len;
8160 /* Check that the host and target are sane. */
8161 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8162 return NULL_TREE;
8164 len = native_encode_expr (expr, buffer, sizeof (buffer));
8165 if (len == 0)
8166 return NULL_TREE;
8168 return native_interpret_expr (type, buffer, len);
8171 /* Build an expression for the address of T. Folds away INDIRECT_REF
8172 to avoid confusing the gimplify process. */
8174 tree
8175 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8177 /* The size of the object is not relevant when talking about its address. */
8178 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8179 t = TREE_OPERAND (t, 0);
8181 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8182 if (TREE_CODE (t) == INDIRECT_REF
8183 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8185 t = TREE_OPERAND (t, 0);
8187 if (TREE_TYPE (t) != ptrtype)
8189 t = build1 (NOP_EXPR, ptrtype, t);
8190 SET_EXPR_LOCATION (t, loc);
8193 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8195 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8197 if (TREE_TYPE (t) != ptrtype)
8198 t = fold_convert_loc (loc, ptrtype, t);
8200 else
8202 t = build1 (ADDR_EXPR, ptrtype, t);
8203 SET_EXPR_LOCATION (t, loc);
8206 return t;
8209 /* Build an expression for the address of T. */
8211 tree
8212 build_fold_addr_expr_loc (location_t loc, tree t)
8214 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8216 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8219 /* Fold a unary expression of code CODE and type TYPE with operand
8220 OP0. Return the folded expression if folding is successful.
8221 Otherwise, return NULL_TREE. */
8223 tree
8224 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8226 tree tem;
8227 tree arg0;
8228 enum tree_code_class kind = TREE_CODE_CLASS (code);
8230 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8231 && TREE_CODE_LENGTH (code) == 1);
8233 arg0 = op0;
8234 if (arg0)
8236 if (CONVERT_EXPR_CODE_P (code)
8237 || code == FLOAT_EXPR || code == ABS_EXPR)
8239 /* Don't use STRIP_NOPS, because signedness of argument type
8240 matters. */
8241 STRIP_SIGN_NOPS (arg0);
8243 else
8245 /* Strip any conversions that don't change the mode. This
8246 is safe for every expression, except for a comparison
8247 expression because its signedness is derived from its
8248 operands.
8250 Note that this is done as an internal manipulation within
8251 the constant folder, in order to find the simplest
8252 representation of the arguments so that their form can be
8253 studied. In any cases, the appropriate type conversions
8254 should be put back in the tree that will get out of the
8255 constant folder. */
8256 STRIP_NOPS (arg0);
8260 if (TREE_CODE_CLASS (code) == tcc_unary)
8262 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8263 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8264 fold_build1_loc (loc, code, type,
8265 fold_convert_loc (loc, TREE_TYPE (op0),
8266 TREE_OPERAND (arg0, 1))));
8267 else if (TREE_CODE (arg0) == COND_EXPR)
8269 tree arg01 = TREE_OPERAND (arg0, 1);
8270 tree arg02 = TREE_OPERAND (arg0, 2);
8271 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8272 arg01 = fold_build1_loc (loc, code, type,
8273 fold_convert_loc (loc,
8274 TREE_TYPE (op0), arg01));
8275 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8276 arg02 = fold_build1_loc (loc, code, type,
8277 fold_convert_loc (loc,
8278 TREE_TYPE (op0), arg02));
8279 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8280 arg01, arg02);
8282 /* If this was a conversion, and all we did was to move into
8283 inside the COND_EXPR, bring it back out. But leave it if
8284 it is a conversion from integer to integer and the
8285 result precision is no wider than a word since such a
8286 conversion is cheap and may be optimized away by combine,
8287 while it couldn't if it were outside the COND_EXPR. Then return
8288 so we don't get into an infinite recursion loop taking the
8289 conversion out and then back in. */
8291 if ((CONVERT_EXPR_CODE_P (code)
8292 || code == NON_LVALUE_EXPR)
8293 && TREE_CODE (tem) == COND_EXPR
8294 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8295 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8296 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8297 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8298 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8299 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8300 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8301 && (INTEGRAL_TYPE_P
8302 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8303 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8304 || flag_syntax_only))
8306 tem = build1 (code, type,
8307 build3 (COND_EXPR,
8308 TREE_TYPE (TREE_OPERAND
8309 (TREE_OPERAND (tem, 1), 0)),
8310 TREE_OPERAND (tem, 0),
8311 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8312 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8313 SET_EXPR_LOCATION (tem, loc);
8315 return tem;
8317 else if (COMPARISON_CLASS_P (arg0))
8319 if (TREE_CODE (type) == BOOLEAN_TYPE)
8321 arg0 = copy_node (arg0);
8322 TREE_TYPE (arg0) = type;
8323 return arg0;
8325 else if (TREE_CODE (type) != INTEGER_TYPE)
8326 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8327 fold_build1_loc (loc, code, type,
8328 integer_one_node),
8329 fold_build1_loc (loc, code, type,
8330 integer_zero_node));
8334 switch (code)
8336 case PAREN_EXPR:
8337 /* Re-association barriers around constants and other re-association
8338 barriers can be removed. */
8339 if (CONSTANT_CLASS_P (op0)
8340 || TREE_CODE (op0) == PAREN_EXPR)
8341 return fold_convert_loc (loc, type, op0);
8342 return NULL_TREE;
8344 CASE_CONVERT:
8345 case FLOAT_EXPR:
8346 case FIX_TRUNC_EXPR:
8347 if (TREE_TYPE (op0) == type)
8348 return op0;
8350 /* If we have (type) (a CMP b) and type is an integral type, return
8351 new expression involving the new type. */
8352 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8353 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8354 TREE_OPERAND (op0, 1));
8356 /* Handle cases of two conversions in a row. */
8357 if (CONVERT_EXPR_P (op0))
8359 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8360 tree inter_type = TREE_TYPE (op0);
8361 int inside_int = INTEGRAL_TYPE_P (inside_type);
8362 int inside_ptr = POINTER_TYPE_P (inside_type);
8363 int inside_float = FLOAT_TYPE_P (inside_type);
8364 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8365 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8366 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8367 int inter_int = INTEGRAL_TYPE_P (inter_type);
8368 int inter_ptr = POINTER_TYPE_P (inter_type);
8369 int inter_float = FLOAT_TYPE_P (inter_type);
8370 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8371 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8372 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8373 int final_int = INTEGRAL_TYPE_P (type);
8374 int final_ptr = POINTER_TYPE_P (type);
8375 int final_float = FLOAT_TYPE_P (type);
8376 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8377 unsigned int final_prec = TYPE_PRECISION (type);
8378 int final_unsignedp = TYPE_UNSIGNED (type);
8380 /* In addition to the cases of two conversions in a row
8381 handled below, if we are converting something to its own
8382 type via an object of identical or wider precision, neither
8383 conversion is needed. */
8384 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8385 && (((inter_int || inter_ptr) && final_int)
8386 || (inter_float && final_float))
8387 && inter_prec >= final_prec)
8388 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8390 /* Likewise, if the intermediate and initial types are either both
8391 float or both integer, we don't need the middle conversion if the
8392 former is wider than the latter and doesn't change the signedness
8393 (for integers). Avoid this if the final type is a pointer since
8394 then we sometimes need the middle conversion. Likewise if the
8395 final type has a precision not equal to the size of its mode. */
8396 if (((inter_int && inside_int)
8397 || (inter_float && inside_float)
8398 || (inter_vec && inside_vec))
8399 && inter_prec >= inside_prec
8400 && (inter_float || inter_vec
8401 || inter_unsignedp == inside_unsignedp)
8402 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8403 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8404 && ! final_ptr
8405 && (! final_vec || inter_prec == inside_prec))
8406 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8408 /* If we have a sign-extension of a zero-extended value, we can
8409 replace that by a single zero-extension. */
8410 if (inside_int && inter_int && final_int
8411 && inside_prec < inter_prec && inter_prec < final_prec
8412 && inside_unsignedp && !inter_unsignedp)
8413 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8415 /* Two conversions in a row are not needed unless:
8416 - some conversion is floating-point (overstrict for now), or
8417 - some conversion is a vector (overstrict for now), or
8418 - the intermediate type is narrower than both initial and
8419 final, or
8420 - the intermediate type and innermost type differ in signedness,
8421 and the outermost type is wider than the intermediate, or
8422 - the initial type is a pointer type and the precisions of the
8423 intermediate and final types differ, or
8424 - the final type is a pointer type and the precisions of the
8425 initial and intermediate types differ. */
8426 if (! inside_float && ! inter_float && ! final_float
8427 && ! inside_vec && ! inter_vec && ! final_vec
8428 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8429 && ! (inside_int && inter_int
8430 && inter_unsignedp != inside_unsignedp
8431 && inter_prec < final_prec)
8432 && ((inter_unsignedp && inter_prec > inside_prec)
8433 == (final_unsignedp && final_prec > inter_prec))
8434 && ! (inside_ptr && inter_prec != final_prec)
8435 && ! (final_ptr && inside_prec != inter_prec)
8436 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8437 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8438 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8441 /* Handle (T *)&A.B.C for A being of type T and B and C
8442 living at offset zero. This occurs frequently in
8443 C++ upcasting and then accessing the base. */
8444 if (TREE_CODE (op0) == ADDR_EXPR
8445 && POINTER_TYPE_P (type)
8446 && handled_component_p (TREE_OPERAND (op0, 0)))
8448 HOST_WIDE_INT bitsize, bitpos;
8449 tree offset;
8450 enum machine_mode mode;
8451 int unsignedp, volatilep;
8452 tree base = TREE_OPERAND (op0, 0);
8453 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8454 &mode, &unsignedp, &volatilep, false);
8455 /* If the reference was to a (constant) zero offset, we can use
8456 the address of the base if it has the same base type
8457 as the result type. */
8458 if (! offset && bitpos == 0
8459 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8460 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8461 return fold_convert_loc (loc, type,
8462 build_fold_addr_expr_loc (loc, base));
8465 if (TREE_CODE (op0) == MODIFY_EXPR
8466 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8467 /* Detect assigning a bitfield. */
8468 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8469 && DECL_BIT_FIELD
8470 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8472 /* Don't leave an assignment inside a conversion
8473 unless assigning a bitfield. */
8474 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8475 /* First do the assignment, then return converted constant. */
8476 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8477 TREE_NO_WARNING (tem) = 1;
8478 TREE_USED (tem) = 1;
8479 SET_EXPR_LOCATION (tem, loc);
8480 return tem;
8483 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8484 constants (if x has signed type, the sign bit cannot be set
8485 in c). This folds extension into the BIT_AND_EXPR.
8486 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8487 very likely don't have maximal range for their precision and this
8488 transformation effectively doesn't preserve non-maximal ranges. */
8489 if (TREE_CODE (type) == INTEGER_TYPE
8490 && TREE_CODE (op0) == BIT_AND_EXPR
8491 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8493 tree and_expr = op0;
8494 tree and0 = TREE_OPERAND (and_expr, 0);
8495 tree and1 = TREE_OPERAND (and_expr, 1);
8496 int change = 0;
8498 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8499 || (TYPE_PRECISION (type)
8500 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8501 change = 1;
8502 else if (TYPE_PRECISION (TREE_TYPE (and1))
8503 <= HOST_BITS_PER_WIDE_INT
8504 && host_integerp (and1, 1))
8506 unsigned HOST_WIDE_INT cst;
8508 cst = tree_low_cst (and1, 1);
8509 cst &= (HOST_WIDE_INT) -1
8510 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8511 change = (cst == 0);
8512 #ifdef LOAD_EXTEND_OP
8513 if (change
8514 && !flag_syntax_only
8515 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8516 == ZERO_EXTEND))
8518 tree uns = unsigned_type_for (TREE_TYPE (and0));
8519 and0 = fold_convert_loc (loc, uns, and0);
8520 and1 = fold_convert_loc (loc, uns, and1);
8522 #endif
8524 if (change)
8526 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8527 TREE_INT_CST_HIGH (and1), 0,
8528 TREE_OVERFLOW (and1));
8529 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8530 fold_convert_loc (loc, type, and0), tem);
8534 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8535 when one of the new casts will fold away. Conservatively we assume
8536 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8537 if (POINTER_TYPE_P (type)
8538 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8539 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8540 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8541 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8543 tree arg00 = TREE_OPERAND (arg0, 0);
8544 tree arg01 = TREE_OPERAND (arg0, 1);
8546 return fold_build2_loc (loc,
8547 TREE_CODE (arg0), type,
8548 fold_convert_loc (loc, type, arg00),
8549 fold_convert_loc (loc, sizetype, arg01));
8552 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8553 of the same precision, and X is an integer type not narrower than
8554 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8555 if (INTEGRAL_TYPE_P (type)
8556 && TREE_CODE (op0) == BIT_NOT_EXPR
8557 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8558 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8559 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8561 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8562 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8563 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8564 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8565 fold_convert_loc (loc, type, tem));
8568 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8569 type of X and Y (integer types only). */
8570 if (INTEGRAL_TYPE_P (type)
8571 && TREE_CODE (op0) == MULT_EXPR
8572 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8573 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8575 /* Be careful not to introduce new overflows. */
8576 tree mult_type;
8577 if (TYPE_OVERFLOW_WRAPS (type))
8578 mult_type = type;
8579 else
8580 mult_type = unsigned_type_for (type);
8582 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8584 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8585 fold_convert_loc (loc, mult_type,
8586 TREE_OPERAND (op0, 0)),
8587 fold_convert_loc (loc, mult_type,
8588 TREE_OPERAND (op0, 1)));
8589 return fold_convert_loc (loc, type, tem);
8593 tem = fold_convert_const (code, type, op0);
8594 return tem ? tem : NULL_TREE;
8596 case FIXED_CONVERT_EXPR:
8597 tem = fold_convert_const (code, type, arg0);
8598 return tem ? tem : NULL_TREE;
8600 case VIEW_CONVERT_EXPR:
8601 if (TREE_TYPE (op0) == type)
8602 return op0;
8603 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8604 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8605 type, TREE_OPERAND (op0, 0));
8607 /* For integral conversions with the same precision or pointer
8608 conversions use a NOP_EXPR instead. */
8609 if ((INTEGRAL_TYPE_P (type)
8610 || POINTER_TYPE_P (type))
8611 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8612 || POINTER_TYPE_P (TREE_TYPE (op0)))
8613 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8614 return fold_convert_loc (loc, type, op0);
8616 /* Strip inner integral conversions that do not change the precision. */
8617 if (CONVERT_EXPR_P (op0)
8618 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8619 || POINTER_TYPE_P (TREE_TYPE (op0)))
8620 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8621 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8622 && (TYPE_PRECISION (TREE_TYPE (op0))
8623 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8624 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8625 type, TREE_OPERAND (op0, 0));
8627 return fold_view_convert_expr (type, op0);
8629 case NEGATE_EXPR:
8630 tem = fold_negate_expr (loc, arg0);
8631 if (tem)
8632 return fold_convert_loc (loc, type, tem);
8633 return NULL_TREE;
8635 case ABS_EXPR:
8636 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8637 return fold_abs_const (arg0, type);
8638 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8639 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8640 /* Convert fabs((double)float) into (double)fabsf(float). */
8641 else if (TREE_CODE (arg0) == NOP_EXPR
8642 && TREE_CODE (type) == REAL_TYPE)
8644 tree targ0 = strip_float_extensions (arg0);
8645 if (targ0 != arg0)
8646 return fold_convert_loc (loc, type,
8647 fold_build1_loc (loc, ABS_EXPR,
8648 TREE_TYPE (targ0),
8649 targ0));
8651 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8652 else if (TREE_CODE (arg0) == ABS_EXPR)
8653 return arg0;
8654 else if (tree_expr_nonnegative_p (arg0))
8655 return arg0;
8657 /* Strip sign ops from argument. */
8658 if (TREE_CODE (type) == REAL_TYPE)
8660 tem = fold_strip_sign_ops (arg0);
8661 if (tem)
8662 return fold_build1_loc (loc, ABS_EXPR, type,
8663 fold_convert_loc (loc, type, tem));
8665 return NULL_TREE;
8667 case CONJ_EXPR:
8668 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8669 return fold_convert_loc (loc, type, arg0);
8670 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8672 tree itype = TREE_TYPE (type);
8673 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8674 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8675 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8676 negate_expr (ipart));
8678 if (TREE_CODE (arg0) == COMPLEX_CST)
8680 tree itype = TREE_TYPE (type);
8681 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8682 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8683 return build_complex (type, rpart, negate_expr (ipart));
8685 if (TREE_CODE (arg0) == CONJ_EXPR)
8686 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8687 return NULL_TREE;
8689 case BIT_NOT_EXPR:
8690 if (TREE_CODE (arg0) == INTEGER_CST)
8691 return fold_not_const (arg0, type);
8692 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8693 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8694 /* Convert ~ (-A) to A - 1. */
8695 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8696 return fold_build2_loc (loc, MINUS_EXPR, type,
8697 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8698 build_int_cst (type, 1));
8699 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8700 else if (INTEGRAL_TYPE_P (type)
8701 && ((TREE_CODE (arg0) == MINUS_EXPR
8702 && integer_onep (TREE_OPERAND (arg0, 1)))
8703 || (TREE_CODE (arg0) == PLUS_EXPR
8704 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8705 return fold_build1_loc (loc, NEGATE_EXPR, type,
8706 fold_convert_loc (loc, type,
8707 TREE_OPERAND (arg0, 0)));
8708 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8709 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8710 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8711 fold_convert_loc (loc, type,
8712 TREE_OPERAND (arg0, 0)))))
8713 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8714 fold_convert_loc (loc, type,
8715 TREE_OPERAND (arg0, 1)));
8716 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8717 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8718 fold_convert_loc (loc, type,
8719 TREE_OPERAND (arg0, 1)))))
8720 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8721 fold_convert_loc (loc, type,
8722 TREE_OPERAND (arg0, 0)), tem);
8723 /* Perform BIT_NOT_EXPR on each element individually. */
8724 else if (TREE_CODE (arg0) == VECTOR_CST)
8726 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8727 int count = TYPE_VECTOR_SUBPARTS (type), i;
8729 for (i = 0; i < count; i++)
8731 if (elements)
8733 elem = TREE_VALUE (elements);
8734 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8735 if (elem == NULL_TREE)
8736 break;
8737 elements = TREE_CHAIN (elements);
8739 else
8740 elem = build_int_cst (TREE_TYPE (type), -1);
8741 list = tree_cons (NULL_TREE, elem, list);
8743 if (i == count)
8744 return build_vector (type, nreverse (list));
8747 return NULL_TREE;
8749 case TRUTH_NOT_EXPR:
8750 /* The argument to invert_truthvalue must have Boolean type. */
8751 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8752 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8754 /* Note that the operand of this must be an int
8755 and its values must be 0 or 1.
8756 ("true" is a fixed value perhaps depending on the language,
8757 but we don't handle values other than 1 correctly yet.) */
8758 tem = fold_truth_not_expr (loc, arg0);
8759 if (!tem)
8760 return NULL_TREE;
8761 return fold_convert_loc (loc, type, tem);
8763 case REALPART_EXPR:
8764 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8765 return fold_convert_loc (loc, type, arg0);
8766 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8767 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8768 TREE_OPERAND (arg0, 1));
8769 if (TREE_CODE (arg0) == COMPLEX_CST)
8770 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8771 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8773 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8774 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8775 fold_build1_loc (loc, REALPART_EXPR, itype,
8776 TREE_OPERAND (arg0, 0)),
8777 fold_build1_loc (loc, REALPART_EXPR, itype,
8778 TREE_OPERAND (arg0, 1)));
8779 return fold_convert_loc (loc, type, tem);
8781 if (TREE_CODE (arg0) == CONJ_EXPR)
8783 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8784 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8785 TREE_OPERAND (arg0, 0));
8786 return fold_convert_loc (loc, type, tem);
8788 if (TREE_CODE (arg0) == CALL_EXPR)
8790 tree fn = get_callee_fndecl (arg0);
8791 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8792 switch (DECL_FUNCTION_CODE (fn))
8794 CASE_FLT_FN (BUILT_IN_CEXPI):
8795 fn = mathfn_built_in (type, BUILT_IN_COS);
8796 if (fn)
8797 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8798 break;
8800 default:
8801 break;
8804 return NULL_TREE;
8806 case IMAGPART_EXPR:
8807 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8808 return fold_convert_loc (loc, type, integer_zero_node);
8809 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8810 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8811 TREE_OPERAND (arg0, 0));
8812 if (TREE_CODE (arg0) == COMPLEX_CST)
8813 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8814 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8816 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8817 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8818 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8819 TREE_OPERAND (arg0, 0)),
8820 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8821 TREE_OPERAND (arg0, 1)));
8822 return fold_convert_loc (loc, type, tem);
8824 if (TREE_CODE (arg0) == CONJ_EXPR)
8826 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8827 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8828 return fold_convert_loc (loc, type, negate_expr (tem));
8830 if (TREE_CODE (arg0) == CALL_EXPR)
8832 tree fn = get_callee_fndecl (arg0);
8833 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8834 switch (DECL_FUNCTION_CODE (fn))
8836 CASE_FLT_FN (BUILT_IN_CEXPI):
8837 fn = mathfn_built_in (type, BUILT_IN_SIN);
8838 if (fn)
8839 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8840 break;
8842 default:
8843 break;
8846 return NULL_TREE;
8848 default:
8849 return NULL_TREE;
8850 } /* switch (code) */
8854 /* If the operation was a conversion do _not_ mark a resulting constant
8855 with TREE_OVERFLOW if the original constant was not. These conversions
8856 have implementation defined behavior and retaining the TREE_OVERFLOW
8857 flag here would confuse later passes such as VRP. */
8858 tree
8859 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8860 tree type, tree op0)
8862 tree res = fold_unary_loc (loc, code, type, op0);
8863 if (res
8864 && TREE_CODE (res) == INTEGER_CST
8865 && TREE_CODE (op0) == INTEGER_CST
8866 && CONVERT_EXPR_CODE_P (code))
8867 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8869 return res;
8872 /* Fold a binary expression of code CODE and type TYPE with operands
8873 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8874 Return the folded expression if folding is successful. Otherwise,
8875 return NULL_TREE. */
8877 static tree
8878 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8880 enum tree_code compl_code;
8882 if (code == MIN_EXPR)
8883 compl_code = MAX_EXPR;
8884 else if (code == MAX_EXPR)
8885 compl_code = MIN_EXPR;
8886 else
8887 gcc_unreachable ();
8889 /* MIN (MAX (a, b), b) == b. */
8890 if (TREE_CODE (op0) == compl_code
8891 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8892 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8894 /* MIN (MAX (b, a), b) == b. */
8895 if (TREE_CODE (op0) == compl_code
8896 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8897 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8898 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8900 /* MIN (a, MAX (a, b)) == a. */
8901 if (TREE_CODE (op1) == compl_code
8902 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8903 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8904 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8906 /* MIN (a, MAX (b, a)) == a. */
8907 if (TREE_CODE (op1) == compl_code
8908 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8909 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8910 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8912 return NULL_TREE;
8915 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8916 by changing CODE to reduce the magnitude of constants involved in
8917 ARG0 of the comparison.
8918 Returns a canonicalized comparison tree if a simplification was
8919 possible, otherwise returns NULL_TREE.
8920 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8921 valid if signed overflow is undefined. */
8923 static tree
8924 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8925 tree arg0, tree arg1,
8926 bool *strict_overflow_p)
8928 enum tree_code code0 = TREE_CODE (arg0);
8929 tree t, cst0 = NULL_TREE;
8930 int sgn0;
8931 bool swap = false;
8933 /* Match A +- CST code arg1 and CST code arg1. We can change the
8934 first form only if overflow is undefined. */
8935 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8936 /* In principle pointers also have undefined overflow behavior,
8937 but that causes problems elsewhere. */
8938 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8939 && (code0 == MINUS_EXPR
8940 || code0 == PLUS_EXPR)
8941 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8942 || code0 == INTEGER_CST))
8943 return NULL_TREE;
8945 /* Identify the constant in arg0 and its sign. */
8946 if (code0 == INTEGER_CST)
8947 cst0 = arg0;
8948 else
8949 cst0 = TREE_OPERAND (arg0, 1);
8950 sgn0 = tree_int_cst_sgn (cst0);
8952 /* Overflowed constants and zero will cause problems. */
8953 if (integer_zerop (cst0)
8954 || TREE_OVERFLOW (cst0))
8955 return NULL_TREE;
8957 /* See if we can reduce the magnitude of the constant in
8958 arg0 by changing the comparison code. */
8959 if (code0 == INTEGER_CST)
8961 /* CST <= arg1 -> CST-1 < arg1. */
8962 if (code == LE_EXPR && sgn0 == 1)
8963 code = LT_EXPR;
8964 /* -CST < arg1 -> -CST-1 <= arg1. */
8965 else if (code == LT_EXPR && sgn0 == -1)
8966 code = LE_EXPR;
8967 /* CST > arg1 -> CST-1 >= arg1. */
8968 else if (code == GT_EXPR && sgn0 == 1)
8969 code = GE_EXPR;
8970 /* -CST >= arg1 -> -CST-1 > arg1. */
8971 else if (code == GE_EXPR && sgn0 == -1)
8972 code = GT_EXPR;
8973 else
8974 return NULL_TREE;
8975 /* arg1 code' CST' might be more canonical. */
8976 swap = true;
8978 else
8980 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8981 if (code == LT_EXPR
8982 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8983 code = LE_EXPR;
8984 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8985 else if (code == GT_EXPR
8986 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8987 code = GE_EXPR;
8988 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8989 else if (code == LE_EXPR
8990 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8991 code = LT_EXPR;
8992 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8993 else if (code == GE_EXPR
8994 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8995 code = GT_EXPR;
8996 else
8997 return NULL_TREE;
8998 *strict_overflow_p = true;
9001 /* Now build the constant reduced in magnitude. But not if that
9002 would produce one outside of its types range. */
9003 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9004 && ((sgn0 == 1
9005 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9006 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9007 || (sgn0 == -1
9008 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9009 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9010 /* We cannot swap the comparison here as that would cause us to
9011 endlessly recurse. */
9012 return NULL_TREE;
9014 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9015 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9016 if (code0 != INTEGER_CST)
9017 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9019 /* If swapping might yield to a more canonical form, do so. */
9020 if (swap)
9021 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9022 else
9023 return fold_build2_loc (loc, code, type, t, arg1);
9026 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9027 overflow further. Try to decrease the magnitude of constants involved
9028 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9029 and put sole constants at the second argument position.
9030 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9032 static tree
9033 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9034 tree arg0, tree arg1)
9036 tree t;
9037 bool strict_overflow_p;
9038 const char * const warnmsg = G_("assuming signed overflow does not occur "
9039 "when reducing constant in comparison");
9041 /* Try canonicalization by simplifying arg0. */
9042 strict_overflow_p = false;
9043 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9044 &strict_overflow_p);
9045 if (t)
9047 if (strict_overflow_p)
9048 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9049 return t;
9052 /* Try canonicalization by simplifying arg1 using the swapped
9053 comparison. */
9054 code = swap_tree_comparison (code);
9055 strict_overflow_p = false;
9056 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9057 &strict_overflow_p);
9058 if (t && strict_overflow_p)
9059 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9060 return t;
9063 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9064 space. This is used to avoid issuing overflow warnings for
9065 expressions like &p->x which can not wrap. */
9067 static bool
9068 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9070 unsigned HOST_WIDE_INT offset_low, total_low;
9071 HOST_WIDE_INT size, offset_high, total_high;
9073 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9074 return true;
9076 if (bitpos < 0)
9077 return true;
9079 if (offset == NULL_TREE)
9081 offset_low = 0;
9082 offset_high = 0;
9084 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9085 return true;
9086 else
9088 offset_low = TREE_INT_CST_LOW (offset);
9089 offset_high = TREE_INT_CST_HIGH (offset);
9092 if (add_double_with_sign (offset_low, offset_high,
9093 bitpos / BITS_PER_UNIT, 0,
9094 &total_low, &total_high,
9095 true))
9096 return true;
9098 if (total_high != 0)
9099 return true;
9101 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9102 if (size <= 0)
9103 return true;
9105 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9106 array. */
9107 if (TREE_CODE (base) == ADDR_EXPR)
9109 HOST_WIDE_INT base_size;
9111 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9112 if (base_size > 0 && size < base_size)
9113 size = base_size;
9116 return total_low > (unsigned HOST_WIDE_INT) size;
9119 /* Subroutine of fold_binary. This routine performs all of the
9120 transformations that are common to the equality/inequality
9121 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9122 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9123 fold_binary should call fold_binary. Fold a comparison with
9124 tree code CODE and type TYPE with operands OP0 and OP1. Return
9125 the folded comparison or NULL_TREE. */
9127 static tree
9128 fold_comparison (location_t loc, enum tree_code code, tree type,
9129 tree op0, tree op1)
9131 tree arg0, arg1, tem;
9133 arg0 = op0;
9134 arg1 = op1;
9136 STRIP_SIGN_NOPS (arg0);
9137 STRIP_SIGN_NOPS (arg1);
9139 tem = fold_relational_const (code, type, arg0, arg1);
9140 if (tem != NULL_TREE)
9141 return tem;
9143 /* If one arg is a real or integer constant, put it last. */
9144 if (tree_swap_operands_p (arg0, arg1, true))
9145 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9147 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9148 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9149 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9150 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9151 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9152 && (TREE_CODE (arg1) == INTEGER_CST
9153 && !TREE_OVERFLOW (arg1)))
9155 tree const1 = TREE_OPERAND (arg0, 1);
9156 tree const2 = arg1;
9157 tree variable = TREE_OPERAND (arg0, 0);
9158 tree lhs;
9159 int lhs_add;
9160 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9162 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9163 TREE_TYPE (arg1), const2, const1);
9165 /* If the constant operation overflowed this can be
9166 simplified as a comparison against INT_MAX/INT_MIN. */
9167 if (TREE_CODE (lhs) == INTEGER_CST
9168 && TREE_OVERFLOW (lhs))
9170 int const1_sgn = tree_int_cst_sgn (const1);
9171 enum tree_code code2 = code;
9173 /* Get the sign of the constant on the lhs if the
9174 operation were VARIABLE + CONST1. */
9175 if (TREE_CODE (arg0) == MINUS_EXPR)
9176 const1_sgn = -const1_sgn;
9178 /* The sign of the constant determines if we overflowed
9179 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9180 Canonicalize to the INT_MIN overflow by swapping the comparison
9181 if necessary. */
9182 if (const1_sgn == -1)
9183 code2 = swap_tree_comparison (code);
9185 /* We now can look at the canonicalized case
9186 VARIABLE + 1 CODE2 INT_MIN
9187 and decide on the result. */
9188 if (code2 == LT_EXPR
9189 || code2 == LE_EXPR
9190 || code2 == EQ_EXPR)
9191 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9192 else if (code2 == NE_EXPR
9193 || code2 == GE_EXPR
9194 || code2 == GT_EXPR)
9195 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9198 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9199 && (TREE_CODE (lhs) != INTEGER_CST
9200 || !TREE_OVERFLOW (lhs)))
9202 fold_overflow_warning (("assuming signed overflow does not occur "
9203 "when changing X +- C1 cmp C2 to "
9204 "X cmp C1 +- C2"),
9205 WARN_STRICT_OVERFLOW_COMPARISON);
9206 return fold_build2_loc (loc, code, type, variable, lhs);
9210 /* For comparisons of pointers we can decompose it to a compile time
9211 comparison of the base objects and the offsets into the object.
9212 This requires at least one operand being an ADDR_EXPR or a
9213 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9214 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9215 && (TREE_CODE (arg0) == ADDR_EXPR
9216 || TREE_CODE (arg1) == ADDR_EXPR
9217 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9218 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9220 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9221 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9222 enum machine_mode mode;
9223 int volatilep, unsignedp;
9224 bool indirect_base0 = false, indirect_base1 = false;
9226 /* Get base and offset for the access. Strip ADDR_EXPR for
9227 get_inner_reference, but put it back by stripping INDIRECT_REF
9228 off the base object if possible. indirect_baseN will be true
9229 if baseN is not an address but refers to the object itself. */
9230 base0 = arg0;
9231 if (TREE_CODE (arg0) == ADDR_EXPR)
9233 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9234 &bitsize, &bitpos0, &offset0, &mode,
9235 &unsignedp, &volatilep, false);
9236 if (TREE_CODE (base0) == INDIRECT_REF)
9237 base0 = TREE_OPERAND (base0, 0);
9238 else
9239 indirect_base0 = true;
9241 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9243 base0 = TREE_OPERAND (arg0, 0);
9244 offset0 = TREE_OPERAND (arg0, 1);
9247 base1 = arg1;
9248 if (TREE_CODE (arg1) == ADDR_EXPR)
9250 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9251 &bitsize, &bitpos1, &offset1, &mode,
9252 &unsignedp, &volatilep, false);
9253 if (TREE_CODE (base1) == INDIRECT_REF)
9254 base1 = TREE_OPERAND (base1, 0);
9255 else
9256 indirect_base1 = true;
9258 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9260 base1 = TREE_OPERAND (arg1, 0);
9261 offset1 = TREE_OPERAND (arg1, 1);
9264 /* If we have equivalent bases we might be able to simplify. */
9265 if (indirect_base0 == indirect_base1
9266 && operand_equal_p (base0, base1, 0))
9268 /* We can fold this expression to a constant if the non-constant
9269 offset parts are equal. */
9270 if ((offset0 == offset1
9271 || (offset0 && offset1
9272 && operand_equal_p (offset0, offset1, 0)))
9273 && (code == EQ_EXPR
9274 || code == NE_EXPR
9275 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9278 if (code != EQ_EXPR
9279 && code != NE_EXPR
9280 && bitpos0 != bitpos1
9281 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9282 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9283 fold_overflow_warning (("assuming pointer wraparound does not "
9284 "occur when comparing P +- C1 with "
9285 "P +- C2"),
9286 WARN_STRICT_OVERFLOW_CONDITIONAL);
9288 switch (code)
9290 case EQ_EXPR:
9291 return constant_boolean_node (bitpos0 == bitpos1, type);
9292 case NE_EXPR:
9293 return constant_boolean_node (bitpos0 != bitpos1, type);
9294 case LT_EXPR:
9295 return constant_boolean_node (bitpos0 < bitpos1, type);
9296 case LE_EXPR:
9297 return constant_boolean_node (bitpos0 <= bitpos1, type);
9298 case GE_EXPR:
9299 return constant_boolean_node (bitpos0 >= bitpos1, type);
9300 case GT_EXPR:
9301 return constant_boolean_node (bitpos0 > bitpos1, type);
9302 default:;
9305 /* We can simplify the comparison to a comparison of the variable
9306 offset parts if the constant offset parts are equal.
9307 Be careful to use signed size type here because otherwise we
9308 mess with array offsets in the wrong way. This is possible
9309 because pointer arithmetic is restricted to retain within an
9310 object and overflow on pointer differences is undefined as of
9311 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9312 else if (bitpos0 == bitpos1
9313 && ((code == EQ_EXPR || code == NE_EXPR)
9314 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9316 tree signed_size_type_node;
9317 signed_size_type_node = signed_type_for (size_type_node);
9319 /* By converting to signed size type we cover middle-end pointer
9320 arithmetic which operates on unsigned pointer types of size
9321 type size and ARRAY_REF offsets which are properly sign or
9322 zero extended from their type in case it is narrower than
9323 size type. */
9324 if (offset0 == NULL_TREE)
9325 offset0 = build_int_cst (signed_size_type_node, 0);
9326 else
9327 offset0 = fold_convert_loc (loc, signed_size_type_node,
9328 offset0);
9329 if (offset1 == NULL_TREE)
9330 offset1 = build_int_cst (signed_size_type_node, 0);
9331 else
9332 offset1 = fold_convert_loc (loc, signed_size_type_node,
9333 offset1);
9335 if (code != EQ_EXPR
9336 && code != NE_EXPR
9337 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9338 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9339 fold_overflow_warning (("assuming pointer wraparound does not "
9340 "occur when comparing P +- C1 with "
9341 "P +- C2"),
9342 WARN_STRICT_OVERFLOW_COMPARISON);
9344 return fold_build2_loc (loc, code, type, offset0, offset1);
9347 /* For non-equal bases we can simplify if they are addresses
9348 of local binding decls or constants. */
9349 else if (indirect_base0 && indirect_base1
9350 /* We know that !operand_equal_p (base0, base1, 0)
9351 because the if condition was false. But make
9352 sure two decls are not the same. */
9353 && base0 != base1
9354 && TREE_CODE (arg0) == ADDR_EXPR
9355 && TREE_CODE (arg1) == ADDR_EXPR
9356 && (((TREE_CODE (base0) == VAR_DECL
9357 || TREE_CODE (base0) == PARM_DECL)
9358 && (targetm.binds_local_p (base0)
9359 || CONSTANT_CLASS_P (base1)))
9360 || CONSTANT_CLASS_P (base0))
9361 && (((TREE_CODE (base1) == VAR_DECL
9362 || TREE_CODE (base1) == PARM_DECL)
9363 && (targetm.binds_local_p (base1)
9364 || CONSTANT_CLASS_P (base0)))
9365 || CONSTANT_CLASS_P (base1)))
9367 if (code == EQ_EXPR)
9368 return omit_two_operands_loc (loc, type, boolean_false_node,
9369 arg0, arg1);
9370 else if (code == NE_EXPR)
9371 return omit_two_operands_loc (loc, type, boolean_true_node,
9372 arg0, arg1);
9374 /* For equal offsets we can simplify to a comparison of the
9375 base addresses. */
9376 else if (bitpos0 == bitpos1
9377 && (indirect_base0
9378 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9379 && (indirect_base1
9380 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9381 && ((offset0 == offset1)
9382 || (offset0 && offset1
9383 && operand_equal_p (offset0, offset1, 0))))
9385 if (indirect_base0)
9386 base0 = build_fold_addr_expr_loc (loc, base0);
9387 if (indirect_base1)
9388 base1 = build_fold_addr_expr_loc (loc, base1);
9389 return fold_build2_loc (loc, code, type, base0, base1);
9393 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9394 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9395 the resulting offset is smaller in absolute value than the
9396 original one. */
9397 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9398 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9399 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9400 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9401 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9402 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9403 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9405 tree const1 = TREE_OPERAND (arg0, 1);
9406 tree const2 = TREE_OPERAND (arg1, 1);
9407 tree variable1 = TREE_OPERAND (arg0, 0);
9408 tree variable2 = TREE_OPERAND (arg1, 0);
9409 tree cst;
9410 const char * const warnmsg = G_("assuming signed overflow does not "
9411 "occur when combining constants around "
9412 "a comparison");
9414 /* Put the constant on the side where it doesn't overflow and is
9415 of lower absolute value than before. */
9416 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9417 ? MINUS_EXPR : PLUS_EXPR,
9418 const2, const1, 0);
9419 if (!TREE_OVERFLOW (cst)
9420 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9422 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9423 return fold_build2_loc (loc, code, type,
9424 variable1,
9425 fold_build2_loc (loc,
9426 TREE_CODE (arg1), TREE_TYPE (arg1),
9427 variable2, cst));
9430 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9431 ? MINUS_EXPR : PLUS_EXPR,
9432 const1, const2, 0);
9433 if (!TREE_OVERFLOW (cst)
9434 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9436 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9437 return fold_build2_loc (loc, code, type,
9438 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9439 variable1, cst),
9440 variable2);
9444 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9445 signed arithmetic case. That form is created by the compiler
9446 often enough for folding it to be of value. One example is in
9447 computing loop trip counts after Operator Strength Reduction. */
9448 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9449 && TREE_CODE (arg0) == MULT_EXPR
9450 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9451 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9452 && integer_zerop (arg1))
9454 tree const1 = TREE_OPERAND (arg0, 1);
9455 tree const2 = arg1; /* zero */
9456 tree variable1 = TREE_OPERAND (arg0, 0);
9457 enum tree_code cmp_code = code;
9459 gcc_assert (!integer_zerop (const1));
9461 fold_overflow_warning (("assuming signed overflow does not occur when "
9462 "eliminating multiplication in comparison "
9463 "with zero"),
9464 WARN_STRICT_OVERFLOW_COMPARISON);
9466 /* If const1 is negative we swap the sense of the comparison. */
9467 if (tree_int_cst_sgn (const1) < 0)
9468 cmp_code = swap_tree_comparison (cmp_code);
9470 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9473 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9474 if (tem)
9475 return tem;
9477 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9479 tree targ0 = strip_float_extensions (arg0);
9480 tree targ1 = strip_float_extensions (arg1);
9481 tree newtype = TREE_TYPE (targ0);
9483 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9484 newtype = TREE_TYPE (targ1);
9486 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9487 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9488 return fold_build2_loc (loc, code, type,
9489 fold_convert_loc (loc, newtype, targ0),
9490 fold_convert_loc (loc, newtype, targ1));
9492 /* (-a) CMP (-b) -> b CMP a */
9493 if (TREE_CODE (arg0) == NEGATE_EXPR
9494 && TREE_CODE (arg1) == NEGATE_EXPR)
9495 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9496 TREE_OPERAND (arg0, 0));
9498 if (TREE_CODE (arg1) == REAL_CST)
9500 REAL_VALUE_TYPE cst;
9501 cst = TREE_REAL_CST (arg1);
9503 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9504 if (TREE_CODE (arg0) == NEGATE_EXPR)
9505 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9506 TREE_OPERAND (arg0, 0),
9507 build_real (TREE_TYPE (arg1),
9508 REAL_VALUE_NEGATE (cst)));
9510 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9511 /* a CMP (-0) -> a CMP 0 */
9512 if (REAL_VALUE_MINUS_ZERO (cst))
9513 return fold_build2_loc (loc, code, type, arg0,
9514 build_real (TREE_TYPE (arg1), dconst0));
9516 /* x != NaN is always true, other ops are always false. */
9517 if (REAL_VALUE_ISNAN (cst)
9518 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9520 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9521 return omit_one_operand_loc (loc, type, tem, arg0);
9524 /* Fold comparisons against infinity. */
9525 if (REAL_VALUE_ISINF (cst)
9526 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9528 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9529 if (tem != NULL_TREE)
9530 return tem;
9534 /* If this is a comparison of a real constant with a PLUS_EXPR
9535 or a MINUS_EXPR of a real constant, we can convert it into a
9536 comparison with a revised real constant as long as no overflow
9537 occurs when unsafe_math_optimizations are enabled. */
9538 if (flag_unsafe_math_optimizations
9539 && TREE_CODE (arg1) == REAL_CST
9540 && (TREE_CODE (arg0) == PLUS_EXPR
9541 || TREE_CODE (arg0) == MINUS_EXPR)
9542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9543 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9544 ? MINUS_EXPR : PLUS_EXPR,
9545 arg1, TREE_OPERAND (arg0, 1), 0))
9546 && !TREE_OVERFLOW (tem))
9547 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9549 /* Likewise, we can simplify a comparison of a real constant with
9550 a MINUS_EXPR whose first operand is also a real constant, i.e.
9551 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9552 floating-point types only if -fassociative-math is set. */
9553 if (flag_associative_math
9554 && TREE_CODE (arg1) == REAL_CST
9555 && TREE_CODE (arg0) == MINUS_EXPR
9556 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9557 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9558 arg1, 0))
9559 && !TREE_OVERFLOW (tem))
9560 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9561 TREE_OPERAND (arg0, 1), tem);
9563 /* Fold comparisons against built-in math functions. */
9564 if (TREE_CODE (arg1) == REAL_CST
9565 && flag_unsafe_math_optimizations
9566 && ! flag_errno_math)
9568 enum built_in_function fcode = builtin_mathfn_code (arg0);
9570 if (fcode != END_BUILTINS)
9572 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9573 if (tem != NULL_TREE)
9574 return tem;
9579 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9580 && CONVERT_EXPR_P (arg0))
9582 /* If we are widening one operand of an integer comparison,
9583 see if the other operand is similarly being widened. Perhaps we
9584 can do the comparison in the narrower type. */
9585 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9586 if (tem)
9587 return tem;
9589 /* Or if we are changing signedness. */
9590 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9591 if (tem)
9592 return tem;
9595 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9596 constant, we can simplify it. */
9597 if (TREE_CODE (arg1) == INTEGER_CST
9598 && (TREE_CODE (arg0) == MIN_EXPR
9599 || TREE_CODE (arg0) == MAX_EXPR)
9600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9602 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9603 if (tem)
9604 return tem;
9607 /* Simplify comparison of something with itself. (For IEEE
9608 floating-point, we can only do some of these simplifications.) */
9609 if (operand_equal_p (arg0, arg1, 0))
9611 switch (code)
9613 case EQ_EXPR:
9614 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9615 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9616 return constant_boolean_node (1, type);
9617 break;
9619 case GE_EXPR:
9620 case LE_EXPR:
9621 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9622 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9623 return constant_boolean_node (1, type);
9624 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9626 case NE_EXPR:
9627 /* For NE, we can only do this simplification if integer
9628 or we don't honor IEEE floating point NaNs. */
9629 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9630 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9631 break;
9632 /* ... fall through ... */
9633 case GT_EXPR:
9634 case LT_EXPR:
9635 return constant_boolean_node (0, type);
9636 default:
9637 gcc_unreachable ();
9641 /* If we are comparing an expression that just has comparisons
9642 of two integer values, arithmetic expressions of those comparisons,
9643 and constants, we can simplify it. There are only three cases
9644 to check: the two values can either be equal, the first can be
9645 greater, or the second can be greater. Fold the expression for
9646 those three values. Since each value must be 0 or 1, we have
9647 eight possibilities, each of which corresponds to the constant 0
9648 or 1 or one of the six possible comparisons.
9650 This handles common cases like (a > b) == 0 but also handles
9651 expressions like ((x > y) - (y > x)) > 0, which supposedly
9652 occur in macroized code. */
9654 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9656 tree cval1 = 0, cval2 = 0;
9657 int save_p = 0;
9659 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9660 /* Don't handle degenerate cases here; they should already
9661 have been handled anyway. */
9662 && cval1 != 0 && cval2 != 0
9663 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9664 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9665 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9666 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9667 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9668 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9669 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9671 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9672 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9674 /* We can't just pass T to eval_subst in case cval1 or cval2
9675 was the same as ARG1. */
9677 tree high_result
9678 = fold_build2_loc (loc, code, type,
9679 eval_subst (loc, arg0, cval1, maxval,
9680 cval2, minval),
9681 arg1);
9682 tree equal_result
9683 = fold_build2_loc (loc, code, type,
9684 eval_subst (loc, arg0, cval1, maxval,
9685 cval2, maxval),
9686 arg1);
9687 tree low_result
9688 = fold_build2_loc (loc, code, type,
9689 eval_subst (loc, arg0, cval1, minval,
9690 cval2, maxval),
9691 arg1);
9693 /* All three of these results should be 0 or 1. Confirm they are.
9694 Then use those values to select the proper code to use. */
9696 if (TREE_CODE (high_result) == INTEGER_CST
9697 && TREE_CODE (equal_result) == INTEGER_CST
9698 && TREE_CODE (low_result) == INTEGER_CST)
9700 /* Make a 3-bit mask with the high-order bit being the
9701 value for `>', the next for '=', and the low for '<'. */
9702 switch ((integer_onep (high_result) * 4)
9703 + (integer_onep (equal_result) * 2)
9704 + integer_onep (low_result))
9706 case 0:
9707 /* Always false. */
9708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9709 case 1:
9710 code = LT_EXPR;
9711 break;
9712 case 2:
9713 code = EQ_EXPR;
9714 break;
9715 case 3:
9716 code = LE_EXPR;
9717 break;
9718 case 4:
9719 code = GT_EXPR;
9720 break;
9721 case 5:
9722 code = NE_EXPR;
9723 break;
9724 case 6:
9725 code = GE_EXPR;
9726 break;
9727 case 7:
9728 /* Always true. */
9729 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9732 if (save_p)
9734 tem = save_expr (build2 (code, type, cval1, cval2));
9735 SET_EXPR_LOCATION (tem, loc);
9736 return tem;
9738 return fold_build2_loc (loc, code, type, cval1, cval2);
9743 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9744 into a single range test. */
9745 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9746 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9747 && TREE_CODE (arg1) == INTEGER_CST
9748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9749 && !integer_zerop (TREE_OPERAND (arg0, 1))
9750 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9751 && !TREE_OVERFLOW (arg1))
9753 tem = fold_div_compare (loc, code, type, arg0, arg1);
9754 if (tem != NULL_TREE)
9755 return tem;
9758 /* Fold ~X op ~Y as Y op X. */
9759 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9760 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9762 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9763 return fold_build2_loc (loc, code, type,
9764 fold_convert_loc (loc, cmp_type,
9765 TREE_OPERAND (arg1, 0)),
9766 TREE_OPERAND (arg0, 0));
9769 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9770 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9771 && TREE_CODE (arg1) == INTEGER_CST)
9773 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9774 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9775 TREE_OPERAND (arg0, 0),
9776 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9777 fold_convert_loc (loc, cmp_type, arg1)));
9780 return NULL_TREE;
9784 /* Subroutine of fold_binary. Optimize complex multiplications of the
9785 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9786 argument EXPR represents the expression "z" of type TYPE. */
9788 static tree
9789 fold_mult_zconjz (location_t loc, tree type, tree expr)
9791 tree itype = TREE_TYPE (type);
9792 tree rpart, ipart, tem;
9794 if (TREE_CODE (expr) == COMPLEX_EXPR)
9796 rpart = TREE_OPERAND (expr, 0);
9797 ipart = TREE_OPERAND (expr, 1);
9799 else if (TREE_CODE (expr) == COMPLEX_CST)
9801 rpart = TREE_REALPART (expr);
9802 ipart = TREE_IMAGPART (expr);
9804 else
9806 expr = save_expr (expr);
9807 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9808 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9811 rpart = save_expr (rpart);
9812 ipart = save_expr (ipart);
9813 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9814 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9815 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9816 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9817 fold_convert_loc (loc, itype, integer_zero_node));
9821 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9822 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9823 guarantees that P and N have the same least significant log2(M) bits.
9824 N is not otherwise constrained. In particular, N is not normalized to
9825 0 <= N < M as is common. In general, the precise value of P is unknown.
9826 M is chosen as large as possible such that constant N can be determined.
9828 Returns M and sets *RESIDUE to N.
9830 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9831 account. This is not always possible due to PR 35705.
9834 static unsigned HOST_WIDE_INT
9835 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9836 bool allow_func_align)
9838 enum tree_code code;
9840 *residue = 0;
9842 code = TREE_CODE (expr);
9843 if (code == ADDR_EXPR)
9845 expr = TREE_OPERAND (expr, 0);
9846 if (handled_component_p (expr))
9848 HOST_WIDE_INT bitsize, bitpos;
9849 tree offset;
9850 enum machine_mode mode;
9851 int unsignedp, volatilep;
9853 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9854 &mode, &unsignedp, &volatilep, false);
9855 *residue = bitpos / BITS_PER_UNIT;
9856 if (offset)
9858 if (TREE_CODE (offset) == INTEGER_CST)
9859 *residue += TREE_INT_CST_LOW (offset);
9860 else
9861 /* We don't handle more complicated offset expressions. */
9862 return 1;
9866 if (DECL_P (expr)
9867 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9868 return DECL_ALIGN_UNIT (expr);
9870 else if (code == POINTER_PLUS_EXPR)
9872 tree op0, op1;
9873 unsigned HOST_WIDE_INT modulus;
9874 enum tree_code inner_code;
9876 op0 = TREE_OPERAND (expr, 0);
9877 STRIP_NOPS (op0);
9878 modulus = get_pointer_modulus_and_residue (op0, residue,
9879 allow_func_align);
9881 op1 = TREE_OPERAND (expr, 1);
9882 STRIP_NOPS (op1);
9883 inner_code = TREE_CODE (op1);
9884 if (inner_code == INTEGER_CST)
9886 *residue += TREE_INT_CST_LOW (op1);
9887 return modulus;
9889 else if (inner_code == MULT_EXPR)
9891 op1 = TREE_OPERAND (op1, 1);
9892 if (TREE_CODE (op1) == INTEGER_CST)
9894 unsigned HOST_WIDE_INT align;
9896 /* Compute the greatest power-of-2 divisor of op1. */
9897 align = TREE_INT_CST_LOW (op1);
9898 align &= -align;
9900 /* If align is non-zero and less than *modulus, replace
9901 *modulus with align., If align is 0, then either op1 is 0
9902 or the greatest power-of-2 divisor of op1 doesn't fit in an
9903 unsigned HOST_WIDE_INT. In either case, no additional
9904 constraint is imposed. */
9905 if (align)
9906 modulus = MIN (modulus, align);
9908 return modulus;
9913 /* If we get here, we were unable to determine anything useful about the
9914 expression. */
9915 return 1;
9919 /* Fold a binary expression of code CODE and type TYPE with operands
9920 OP0 and OP1. LOC is the location of the resulting expression.
9921 Return the folded expression if folding is successful. Otherwise,
9922 return NULL_TREE. */
9924 tree
9925 fold_binary_loc (location_t loc,
9926 enum tree_code code, tree type, tree op0, tree op1)
9928 enum tree_code_class kind = TREE_CODE_CLASS (code);
9929 tree arg0, arg1, tem;
9930 tree t1 = NULL_TREE;
9931 bool strict_overflow_p;
9933 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9934 && TREE_CODE_LENGTH (code) == 2
9935 && op0 != NULL_TREE
9936 && op1 != NULL_TREE);
9938 arg0 = op0;
9939 arg1 = op1;
9941 /* Strip any conversions that don't change the mode. This is
9942 safe for every expression, except for a comparison expression
9943 because its signedness is derived from its operands. So, in
9944 the latter case, only strip conversions that don't change the
9945 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9946 preserved.
9948 Note that this is done as an internal manipulation within the
9949 constant folder, in order to find the simplest representation
9950 of the arguments so that their form can be studied. In any
9951 cases, the appropriate type conversions should be put back in
9952 the tree that will get out of the constant folder. */
9954 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9956 STRIP_SIGN_NOPS (arg0);
9957 STRIP_SIGN_NOPS (arg1);
9959 else
9961 STRIP_NOPS (arg0);
9962 STRIP_NOPS (arg1);
9965 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9966 constant but we can't do arithmetic on them. */
9967 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9968 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9969 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9970 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9971 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9972 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9974 if (kind == tcc_binary)
9976 /* Make sure type and arg0 have the same saturating flag. */
9977 gcc_assert (TYPE_SATURATING (type)
9978 == TYPE_SATURATING (TREE_TYPE (arg0)));
9979 tem = const_binop (code, arg0, arg1, 0);
9981 else if (kind == tcc_comparison)
9982 tem = fold_relational_const (code, type, arg0, arg1);
9983 else
9984 tem = NULL_TREE;
9986 if (tem != NULL_TREE)
9988 if (TREE_TYPE (tem) != type)
9989 tem = fold_convert_loc (loc, type, tem);
9990 return tem;
9994 /* If this is a commutative operation, and ARG0 is a constant, move it
9995 to ARG1 to reduce the number of tests below. */
9996 if (commutative_tree_code (code)
9997 && tree_swap_operands_p (arg0, arg1, true))
9998 return fold_build2_loc (loc, code, type, op1, op0);
10000 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10002 First check for cases where an arithmetic operation is applied to a
10003 compound, conditional, or comparison operation. Push the arithmetic
10004 operation inside the compound or conditional to see if any folding
10005 can then be done. Convert comparison to conditional for this purpose.
10006 The also optimizes non-constant cases that used to be done in
10007 expand_expr.
10009 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10010 one of the operands is a comparison and the other is a comparison, a
10011 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10012 code below would make the expression more complex. Change it to a
10013 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10014 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10016 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10017 || code == EQ_EXPR || code == NE_EXPR)
10018 && ((truth_value_p (TREE_CODE (arg0))
10019 && (truth_value_p (TREE_CODE (arg1))
10020 || (TREE_CODE (arg1) == BIT_AND_EXPR
10021 && integer_onep (TREE_OPERAND (arg1, 1)))))
10022 || (truth_value_p (TREE_CODE (arg1))
10023 && (truth_value_p (TREE_CODE (arg0))
10024 || (TREE_CODE (arg0) == BIT_AND_EXPR
10025 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10027 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10028 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10029 : TRUTH_XOR_EXPR,
10030 boolean_type_node,
10031 fold_convert_loc (loc, boolean_type_node, arg0),
10032 fold_convert_loc (loc, boolean_type_node, arg1));
10034 if (code == EQ_EXPR)
10035 tem = invert_truthvalue_loc (loc, tem);
10037 return fold_convert_loc (loc, type, tem);
10040 if (TREE_CODE_CLASS (code) == tcc_binary
10041 || TREE_CODE_CLASS (code) == tcc_comparison)
10043 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10045 tem = fold_build2_loc (loc, code, type,
10046 fold_convert_loc (loc, TREE_TYPE (op0),
10047 TREE_OPERAND (arg0, 1)), op1);
10048 protected_set_expr_location (tem, loc);
10049 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10050 goto fold_binary_exit;
10052 if (TREE_CODE (arg1) == COMPOUND_EXPR
10053 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10055 tem = fold_build2_loc (loc, code, type, op0,
10056 fold_convert_loc (loc, TREE_TYPE (op1),
10057 TREE_OPERAND (arg1, 1)));
10058 protected_set_expr_location (tem, loc);
10059 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10060 goto fold_binary_exit;
10063 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10065 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10066 arg0, arg1,
10067 /*cond_first_p=*/1);
10068 if (tem != NULL_TREE)
10069 return tem;
10072 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10074 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10075 arg1, arg0,
10076 /*cond_first_p=*/0);
10077 if (tem != NULL_TREE)
10078 return tem;
10082 switch (code)
10084 case POINTER_PLUS_EXPR:
10085 /* 0 +p index -> (type)index */
10086 if (integer_zerop (arg0))
10087 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10089 /* PTR +p 0 -> PTR */
10090 if (integer_zerop (arg1))
10091 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10093 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10094 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10095 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10096 return fold_convert_loc (loc, type,
10097 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10098 fold_convert_loc (loc, sizetype,
10099 arg1),
10100 fold_convert_loc (loc, sizetype,
10101 arg0)));
10103 /* index +p PTR -> PTR +p index */
10104 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10105 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10106 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10107 fold_convert_loc (loc, type, arg1),
10108 fold_convert_loc (loc, sizetype, arg0));
10110 /* (PTR +p B) +p A -> PTR +p (B + A) */
10111 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10113 tree inner;
10114 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10115 tree arg00 = TREE_OPERAND (arg0, 0);
10116 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10117 arg01, fold_convert_loc (loc, sizetype, arg1));
10118 return fold_convert_loc (loc, type,
10119 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10120 TREE_TYPE (arg00),
10121 arg00, inner));
10124 /* PTR_CST +p CST -> CST1 */
10125 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10126 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10127 fold_convert_loc (loc, type, arg1));
10129 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10130 of the array. Loop optimizer sometimes produce this type of
10131 expressions. */
10132 if (TREE_CODE (arg0) == ADDR_EXPR)
10134 tem = try_move_mult_to_index (loc, arg0,
10135 fold_convert_loc (loc, sizetype, arg1));
10136 if (tem)
10137 return fold_convert_loc (loc, type, tem);
10140 return NULL_TREE;
10142 case PLUS_EXPR:
10143 /* A + (-B) -> A - B */
10144 if (TREE_CODE (arg1) == NEGATE_EXPR)
10145 return fold_build2_loc (loc, MINUS_EXPR, type,
10146 fold_convert_loc (loc, type, arg0),
10147 fold_convert_loc (loc, type,
10148 TREE_OPERAND (arg1, 0)));
10149 /* (-A) + B -> B - A */
10150 if (TREE_CODE (arg0) == NEGATE_EXPR
10151 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10152 return fold_build2_loc (loc, MINUS_EXPR, type,
10153 fold_convert_loc (loc, type, arg1),
10154 fold_convert_loc (loc, type,
10155 TREE_OPERAND (arg0, 0)));
10157 if (INTEGRAL_TYPE_P (type))
10159 /* Convert ~A + 1 to -A. */
10160 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10161 && integer_onep (arg1))
10162 return fold_build1_loc (loc, NEGATE_EXPR, type,
10163 fold_convert_loc (loc, type,
10164 TREE_OPERAND (arg0, 0)));
10166 /* ~X + X is -1. */
10167 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10168 && !TYPE_OVERFLOW_TRAPS (type))
10170 tree tem = TREE_OPERAND (arg0, 0);
10172 STRIP_NOPS (tem);
10173 if (operand_equal_p (tem, arg1, 0))
10175 t1 = build_int_cst_type (type, -1);
10176 return omit_one_operand_loc (loc, type, t1, arg1);
10180 /* X + ~X is -1. */
10181 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10182 && !TYPE_OVERFLOW_TRAPS (type))
10184 tree tem = TREE_OPERAND (arg1, 0);
10186 STRIP_NOPS (tem);
10187 if (operand_equal_p (arg0, tem, 0))
10189 t1 = build_int_cst_type (type, -1);
10190 return omit_one_operand_loc (loc, type, t1, arg0);
10194 /* X + (X / CST) * -CST is X % CST. */
10195 if (TREE_CODE (arg1) == MULT_EXPR
10196 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10197 && operand_equal_p (arg0,
10198 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10200 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10201 tree cst1 = TREE_OPERAND (arg1, 1);
10202 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10203 cst1, cst0);
10204 if (sum && integer_zerop (sum))
10205 return fold_convert_loc (loc, type,
10206 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10207 TREE_TYPE (arg0), arg0,
10208 cst0));
10212 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10213 same or one. Make sure type is not saturating.
10214 fold_plusminus_mult_expr will re-associate. */
10215 if ((TREE_CODE (arg0) == MULT_EXPR
10216 || TREE_CODE (arg1) == MULT_EXPR)
10217 && !TYPE_SATURATING (type)
10218 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10220 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10221 if (tem)
10222 return tem;
10225 if (! FLOAT_TYPE_P (type))
10227 if (integer_zerop (arg1))
10228 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10230 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10231 with a constant, and the two constants have no bits in common,
10232 we should treat this as a BIT_IOR_EXPR since this may produce more
10233 simplifications. */
10234 if (TREE_CODE (arg0) == BIT_AND_EXPR
10235 && TREE_CODE (arg1) == BIT_AND_EXPR
10236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10237 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10238 && integer_zerop (const_binop (BIT_AND_EXPR,
10239 TREE_OPERAND (arg0, 1),
10240 TREE_OPERAND (arg1, 1), 0)))
10242 code = BIT_IOR_EXPR;
10243 goto bit_ior;
10246 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10247 (plus (plus (mult) (mult)) (foo)) so that we can
10248 take advantage of the factoring cases below. */
10249 if (((TREE_CODE (arg0) == PLUS_EXPR
10250 || TREE_CODE (arg0) == MINUS_EXPR)
10251 && TREE_CODE (arg1) == MULT_EXPR)
10252 || ((TREE_CODE (arg1) == PLUS_EXPR
10253 || TREE_CODE (arg1) == MINUS_EXPR)
10254 && TREE_CODE (arg0) == MULT_EXPR))
10256 tree parg0, parg1, parg, marg;
10257 enum tree_code pcode;
10259 if (TREE_CODE (arg1) == MULT_EXPR)
10260 parg = arg0, marg = arg1;
10261 else
10262 parg = arg1, marg = arg0;
10263 pcode = TREE_CODE (parg);
10264 parg0 = TREE_OPERAND (parg, 0);
10265 parg1 = TREE_OPERAND (parg, 1);
10266 STRIP_NOPS (parg0);
10267 STRIP_NOPS (parg1);
10269 if (TREE_CODE (parg0) == MULT_EXPR
10270 && TREE_CODE (parg1) != MULT_EXPR)
10271 return fold_build2_loc (loc, pcode, type,
10272 fold_build2_loc (loc, PLUS_EXPR, type,
10273 fold_convert_loc (loc, type,
10274 parg0),
10275 fold_convert_loc (loc, type,
10276 marg)),
10277 fold_convert_loc (loc, type, parg1));
10278 if (TREE_CODE (parg0) != MULT_EXPR
10279 && TREE_CODE (parg1) == MULT_EXPR)
10280 return
10281 fold_build2_loc (loc, PLUS_EXPR, type,
10282 fold_convert_loc (loc, type, parg0),
10283 fold_build2_loc (loc, pcode, type,
10284 fold_convert_loc (loc, type, marg),
10285 fold_convert_loc (loc, type,
10286 parg1)));
10289 else
10291 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10292 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10293 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10295 /* Likewise if the operands are reversed. */
10296 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10297 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10299 /* Convert X + -C into X - C. */
10300 if (TREE_CODE (arg1) == REAL_CST
10301 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10303 tem = fold_negate_const (arg1, type);
10304 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10305 return fold_build2_loc (loc, MINUS_EXPR, type,
10306 fold_convert_loc (loc, type, arg0),
10307 fold_convert_loc (loc, type, tem));
10310 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10311 to __complex__ ( x, y ). This is not the same for SNaNs or
10312 if signed zeros are involved. */
10313 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10314 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10315 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10317 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10318 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10319 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10320 bool arg0rz = false, arg0iz = false;
10321 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10322 || (arg0i && (arg0iz = real_zerop (arg0i))))
10324 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10325 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10326 if (arg0rz && arg1i && real_zerop (arg1i))
10328 tree rp = arg1r ? arg1r
10329 : build1 (REALPART_EXPR, rtype, arg1);
10330 tree ip = arg0i ? arg0i
10331 : build1 (IMAGPART_EXPR, rtype, arg0);
10332 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10334 else if (arg0iz && arg1r && real_zerop (arg1r))
10336 tree rp = arg0r ? arg0r
10337 : build1 (REALPART_EXPR, rtype, arg0);
10338 tree ip = arg1i ? arg1i
10339 : build1 (IMAGPART_EXPR, rtype, arg1);
10340 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10345 if (flag_unsafe_math_optimizations
10346 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10347 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10348 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10349 return tem;
10351 /* Convert x+x into x*2.0. */
10352 if (operand_equal_p (arg0, arg1, 0)
10353 && SCALAR_FLOAT_TYPE_P (type))
10354 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10355 build_real (type, dconst2));
10357 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10358 We associate floats only if the user has specified
10359 -fassociative-math. */
10360 if (flag_associative_math
10361 && TREE_CODE (arg1) == PLUS_EXPR
10362 && TREE_CODE (arg0) != MULT_EXPR)
10364 tree tree10 = TREE_OPERAND (arg1, 0);
10365 tree tree11 = TREE_OPERAND (arg1, 1);
10366 if (TREE_CODE (tree11) == MULT_EXPR
10367 && TREE_CODE (tree10) == MULT_EXPR)
10369 tree tree0;
10370 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10371 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10374 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10375 We associate floats only if the user has specified
10376 -fassociative-math. */
10377 if (flag_associative_math
10378 && TREE_CODE (arg0) == PLUS_EXPR
10379 && TREE_CODE (arg1) != MULT_EXPR)
10381 tree tree00 = TREE_OPERAND (arg0, 0);
10382 tree tree01 = TREE_OPERAND (arg0, 1);
10383 if (TREE_CODE (tree01) == MULT_EXPR
10384 && TREE_CODE (tree00) == MULT_EXPR)
10386 tree tree0;
10387 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10388 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10393 bit_rotate:
10394 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10395 is a rotate of A by C1 bits. */
10396 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10397 is a rotate of A by B bits. */
10399 enum tree_code code0, code1;
10400 tree rtype;
10401 code0 = TREE_CODE (arg0);
10402 code1 = TREE_CODE (arg1);
10403 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10404 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10405 && operand_equal_p (TREE_OPERAND (arg0, 0),
10406 TREE_OPERAND (arg1, 0), 0)
10407 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10408 TYPE_UNSIGNED (rtype))
10409 /* Only create rotates in complete modes. Other cases are not
10410 expanded properly. */
10411 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10413 tree tree01, tree11;
10414 enum tree_code code01, code11;
10416 tree01 = TREE_OPERAND (arg0, 1);
10417 tree11 = TREE_OPERAND (arg1, 1);
10418 STRIP_NOPS (tree01);
10419 STRIP_NOPS (tree11);
10420 code01 = TREE_CODE (tree01);
10421 code11 = TREE_CODE (tree11);
10422 if (code01 == INTEGER_CST
10423 && code11 == INTEGER_CST
10424 && TREE_INT_CST_HIGH (tree01) == 0
10425 && TREE_INT_CST_HIGH (tree11) == 0
10426 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10427 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10429 tem = build2 (LROTATE_EXPR,
10430 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10431 TREE_OPERAND (arg0, 0),
10432 code0 == LSHIFT_EXPR
10433 ? tree01 : tree11);
10434 SET_EXPR_LOCATION (tem, loc);
10435 return fold_convert_loc (loc, type, tem);
10437 else if (code11 == MINUS_EXPR)
10439 tree tree110, tree111;
10440 tree110 = TREE_OPERAND (tree11, 0);
10441 tree111 = TREE_OPERAND (tree11, 1);
10442 STRIP_NOPS (tree110);
10443 STRIP_NOPS (tree111);
10444 if (TREE_CODE (tree110) == INTEGER_CST
10445 && 0 == compare_tree_int (tree110,
10446 TYPE_PRECISION
10447 (TREE_TYPE (TREE_OPERAND
10448 (arg0, 0))))
10449 && operand_equal_p (tree01, tree111, 0))
10450 return
10451 fold_convert_loc (loc, type,
10452 build2 ((code0 == LSHIFT_EXPR
10453 ? LROTATE_EXPR
10454 : RROTATE_EXPR),
10455 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10456 TREE_OPERAND (arg0, 0), tree01));
10458 else if (code01 == MINUS_EXPR)
10460 tree tree010, tree011;
10461 tree010 = TREE_OPERAND (tree01, 0);
10462 tree011 = TREE_OPERAND (tree01, 1);
10463 STRIP_NOPS (tree010);
10464 STRIP_NOPS (tree011);
10465 if (TREE_CODE (tree010) == INTEGER_CST
10466 && 0 == compare_tree_int (tree010,
10467 TYPE_PRECISION
10468 (TREE_TYPE (TREE_OPERAND
10469 (arg0, 0))))
10470 && operand_equal_p (tree11, tree011, 0))
10471 return fold_convert_loc
10472 (loc, type,
10473 build2 ((code0 != LSHIFT_EXPR
10474 ? LROTATE_EXPR
10475 : RROTATE_EXPR),
10476 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10477 TREE_OPERAND (arg0, 0), tree11));
10482 associate:
10483 /* In most languages, can't associate operations on floats through
10484 parentheses. Rather than remember where the parentheses were, we
10485 don't associate floats at all, unless the user has specified
10486 -fassociative-math.
10487 And, we need to make sure type is not saturating. */
10489 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10490 && !TYPE_SATURATING (type))
10492 tree var0, con0, lit0, minus_lit0;
10493 tree var1, con1, lit1, minus_lit1;
10494 bool ok = true;
10496 /* Split both trees into variables, constants, and literals. Then
10497 associate each group together, the constants with literals,
10498 then the result with variables. This increases the chances of
10499 literals being recombined later and of generating relocatable
10500 expressions for the sum of a constant and literal. */
10501 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10502 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10503 code == MINUS_EXPR);
10505 /* With undefined overflow we can only associate constants
10506 with one variable. */
10507 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10508 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10509 && var0 && var1)
10511 tree tmp0 = var0;
10512 tree tmp1 = var1;
10514 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10515 tmp0 = TREE_OPERAND (tmp0, 0);
10516 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10517 tmp1 = TREE_OPERAND (tmp1, 0);
10518 /* The only case we can still associate with two variables
10519 is if they are the same, modulo negation. */
10520 if (!operand_equal_p (tmp0, tmp1, 0))
10521 ok = false;
10524 /* Only do something if we found more than two objects. Otherwise,
10525 nothing has changed and we risk infinite recursion. */
10526 if (ok
10527 && (2 < ((var0 != 0) + (var1 != 0)
10528 + (con0 != 0) + (con1 != 0)
10529 + (lit0 != 0) + (lit1 != 0)
10530 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10532 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10533 if (code == MINUS_EXPR)
10534 code = PLUS_EXPR;
10536 var0 = associate_trees (loc, var0, var1, code, type);
10537 con0 = associate_trees (loc, con0, con1, code, type);
10538 lit0 = associate_trees (loc, lit0, lit1, code, type);
10539 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10541 /* Preserve the MINUS_EXPR if the negative part of the literal is
10542 greater than the positive part. Otherwise, the multiplicative
10543 folding code (i.e extract_muldiv) may be fooled in case
10544 unsigned constants are subtracted, like in the following
10545 example: ((X*2 + 4) - 8U)/2. */
10546 if (minus_lit0 && lit0)
10548 if (TREE_CODE (lit0) == INTEGER_CST
10549 && TREE_CODE (minus_lit0) == INTEGER_CST
10550 && tree_int_cst_lt (lit0, minus_lit0))
10552 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10553 MINUS_EXPR, type);
10554 lit0 = 0;
10556 else
10558 lit0 = associate_trees (loc, lit0, minus_lit0,
10559 MINUS_EXPR, type);
10560 minus_lit0 = 0;
10563 if (minus_lit0)
10565 if (con0 == 0)
10566 return
10567 fold_convert_loc (loc, type,
10568 associate_trees (loc, var0, minus_lit0,
10569 MINUS_EXPR, type));
10570 else
10572 con0 = associate_trees (loc, con0, minus_lit0,
10573 MINUS_EXPR, type);
10574 return
10575 fold_convert_loc (loc, type,
10576 associate_trees (loc, var0, con0,
10577 PLUS_EXPR, type));
10581 con0 = associate_trees (loc, con0, lit0, code, type);
10582 return
10583 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10584 code, type));
10588 return NULL_TREE;
10590 case MINUS_EXPR:
10591 /* Pointer simplifications for subtraction, simple reassociations. */
10592 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10594 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10595 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10596 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10598 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10599 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10600 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10601 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10602 return fold_build2_loc (loc, PLUS_EXPR, type,
10603 fold_build2_loc (loc, MINUS_EXPR, type,
10604 arg00, arg10),
10605 fold_build2_loc (loc, MINUS_EXPR, type,
10606 arg01, arg11));
10608 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10609 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10611 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10612 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10613 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10614 fold_convert_loc (loc, type, arg1));
10615 if (tmp)
10616 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10619 /* A - (-B) -> A + B */
10620 if (TREE_CODE (arg1) == NEGATE_EXPR)
10621 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10622 fold_convert_loc (loc, type,
10623 TREE_OPERAND (arg1, 0)));
10624 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10625 if (TREE_CODE (arg0) == NEGATE_EXPR
10626 && (FLOAT_TYPE_P (type)
10627 || INTEGRAL_TYPE_P (type))
10628 && negate_expr_p (arg1)
10629 && reorder_operands_p (arg0, arg1))
10630 return fold_build2_loc (loc, MINUS_EXPR, type,
10631 fold_convert_loc (loc, type,
10632 negate_expr (arg1)),
10633 fold_convert_loc (loc, type,
10634 TREE_OPERAND (arg0, 0)));
10635 /* Convert -A - 1 to ~A. */
10636 if (INTEGRAL_TYPE_P (type)
10637 && TREE_CODE (arg0) == NEGATE_EXPR
10638 && integer_onep (arg1)
10639 && !TYPE_OVERFLOW_TRAPS (type))
10640 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10641 fold_convert_loc (loc, type,
10642 TREE_OPERAND (arg0, 0)));
10644 /* Convert -1 - A to ~A. */
10645 if (INTEGRAL_TYPE_P (type)
10646 && integer_all_onesp (arg0))
10647 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10650 /* X - (X / CST) * CST is X % CST. */
10651 if (INTEGRAL_TYPE_P (type)
10652 && TREE_CODE (arg1) == MULT_EXPR
10653 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10654 && operand_equal_p (arg0,
10655 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10656 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10657 TREE_OPERAND (arg1, 1), 0))
10658 return
10659 fold_convert_loc (loc, type,
10660 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10661 arg0, TREE_OPERAND (arg1, 1)));
10663 if (! FLOAT_TYPE_P (type))
10665 if (integer_zerop (arg0))
10666 return negate_expr (fold_convert_loc (loc, type, arg1));
10667 if (integer_zerop (arg1))
10668 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10670 /* Fold A - (A & B) into ~B & A. */
10671 if (!TREE_SIDE_EFFECTS (arg0)
10672 && TREE_CODE (arg1) == BIT_AND_EXPR)
10674 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10676 tree arg10 = fold_convert_loc (loc, type,
10677 TREE_OPERAND (arg1, 0));
10678 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10679 fold_build1_loc (loc, BIT_NOT_EXPR,
10680 type, arg10),
10681 fold_convert_loc (loc, type, arg0));
10683 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10685 tree arg11 = fold_convert_loc (loc,
10686 type, TREE_OPERAND (arg1, 1));
10687 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10688 fold_build1_loc (loc, BIT_NOT_EXPR,
10689 type, arg11),
10690 fold_convert_loc (loc, type, arg0));
10694 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10695 any power of 2 minus 1. */
10696 if (TREE_CODE (arg0) == BIT_AND_EXPR
10697 && TREE_CODE (arg1) == BIT_AND_EXPR
10698 && operand_equal_p (TREE_OPERAND (arg0, 0),
10699 TREE_OPERAND (arg1, 0), 0))
10701 tree mask0 = TREE_OPERAND (arg0, 1);
10702 tree mask1 = TREE_OPERAND (arg1, 1);
10703 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10705 if (operand_equal_p (tem, mask1, 0))
10707 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10708 TREE_OPERAND (arg0, 0), mask1);
10709 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10714 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10715 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10716 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10718 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10719 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10720 (-ARG1 + ARG0) reduces to -ARG1. */
10721 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10722 return negate_expr (fold_convert_loc (loc, type, arg1));
10724 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10725 __complex__ ( x, -y ). This is not the same for SNaNs or if
10726 signed zeros are involved. */
10727 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10728 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10729 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10731 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10732 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10733 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10734 bool arg0rz = false, arg0iz = false;
10735 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10736 || (arg0i && (arg0iz = real_zerop (arg0i))))
10738 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10739 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10740 if (arg0rz && arg1i && real_zerop (arg1i))
10742 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10743 arg1r ? arg1r
10744 : build1 (REALPART_EXPR, rtype, arg1));
10745 tree ip = arg0i ? arg0i
10746 : build1 (IMAGPART_EXPR, rtype, arg0);
10747 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10749 else if (arg0iz && arg1r && real_zerop (arg1r))
10751 tree rp = arg0r ? arg0r
10752 : build1 (REALPART_EXPR, rtype, arg0);
10753 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10754 arg1i ? arg1i
10755 : build1 (IMAGPART_EXPR, rtype, arg1));
10756 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10761 /* Fold &x - &x. This can happen from &x.foo - &x.
10762 This is unsafe for certain floats even in non-IEEE formats.
10763 In IEEE, it is unsafe because it does wrong for NaNs.
10764 Also note that operand_equal_p is always false if an operand
10765 is volatile. */
10767 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10768 && operand_equal_p (arg0, arg1, 0))
10769 return fold_convert_loc (loc, type, integer_zero_node);
10771 /* A - B -> A + (-B) if B is easily negatable. */
10772 if (negate_expr_p (arg1)
10773 && ((FLOAT_TYPE_P (type)
10774 /* Avoid this transformation if B is a positive REAL_CST. */
10775 && (TREE_CODE (arg1) != REAL_CST
10776 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10777 || INTEGRAL_TYPE_P (type)))
10778 return fold_build2_loc (loc, PLUS_EXPR, type,
10779 fold_convert_loc (loc, type, arg0),
10780 fold_convert_loc (loc, type,
10781 negate_expr (arg1)));
10783 /* Try folding difference of addresses. */
10785 HOST_WIDE_INT diff;
10787 if ((TREE_CODE (arg0) == ADDR_EXPR
10788 || TREE_CODE (arg1) == ADDR_EXPR)
10789 && ptr_difference_const (arg0, arg1, &diff))
10790 return build_int_cst_type (type, diff);
10793 /* Fold &a[i] - &a[j] to i-j. */
10794 if (TREE_CODE (arg0) == ADDR_EXPR
10795 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10796 && TREE_CODE (arg1) == ADDR_EXPR
10797 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10799 tree aref0 = TREE_OPERAND (arg0, 0);
10800 tree aref1 = TREE_OPERAND (arg1, 0);
10801 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10802 TREE_OPERAND (aref1, 0), 0))
10804 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10805 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10806 tree esz = array_ref_element_size (aref0);
10807 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10808 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10809 fold_convert_loc (loc, type, esz));
10814 if (FLOAT_TYPE_P (type)
10815 && flag_unsafe_math_optimizations
10816 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10817 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10818 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10819 return tem;
10821 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10822 same or one. Make sure type is not saturating.
10823 fold_plusminus_mult_expr will re-associate. */
10824 if ((TREE_CODE (arg0) == MULT_EXPR
10825 || TREE_CODE (arg1) == MULT_EXPR)
10826 && !TYPE_SATURATING (type)
10827 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10829 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10830 if (tem)
10831 return tem;
10834 goto associate;
10836 case MULT_EXPR:
10837 /* (-A) * (-B) -> A * B */
10838 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10839 return fold_build2_loc (loc, MULT_EXPR, type,
10840 fold_convert_loc (loc, type,
10841 TREE_OPERAND (arg0, 0)),
10842 fold_convert_loc (loc, type,
10843 negate_expr (arg1)));
10844 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10845 return fold_build2_loc (loc, MULT_EXPR, type,
10846 fold_convert_loc (loc, type,
10847 negate_expr (arg0)),
10848 fold_convert_loc (loc, type,
10849 TREE_OPERAND (arg1, 0)));
10851 if (! FLOAT_TYPE_P (type))
10853 if (integer_zerop (arg1))
10854 return omit_one_operand_loc (loc, type, arg1, arg0);
10855 if (integer_onep (arg1))
10856 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10857 /* Transform x * -1 into -x. Make sure to do the negation
10858 on the original operand with conversions not stripped
10859 because we can only strip non-sign-changing conversions. */
10860 if (integer_all_onesp (arg1))
10861 return fold_convert_loc (loc, type, negate_expr (op0));
10862 /* Transform x * -C into -x * C if x is easily negatable. */
10863 if (TREE_CODE (arg1) == INTEGER_CST
10864 && tree_int_cst_sgn (arg1) == -1
10865 && negate_expr_p (arg0)
10866 && (tem = negate_expr (arg1)) != arg1
10867 && !TREE_OVERFLOW (tem))
10868 return fold_build2_loc (loc, MULT_EXPR, type,
10869 fold_convert_loc (loc, type,
10870 negate_expr (arg0)),
10871 tem);
10873 /* (a * (1 << b)) is (a << b) */
10874 if (TREE_CODE (arg1) == LSHIFT_EXPR
10875 && integer_onep (TREE_OPERAND (arg1, 0)))
10876 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10877 TREE_OPERAND (arg1, 1));
10878 if (TREE_CODE (arg0) == LSHIFT_EXPR
10879 && integer_onep (TREE_OPERAND (arg0, 0)))
10880 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10881 TREE_OPERAND (arg0, 1));
10883 /* (A + A) * C -> A * 2 * C */
10884 if (TREE_CODE (arg0) == PLUS_EXPR
10885 && TREE_CODE (arg1) == INTEGER_CST
10886 && operand_equal_p (TREE_OPERAND (arg0, 0),
10887 TREE_OPERAND (arg0, 1), 0))
10888 return fold_build2_loc (loc, MULT_EXPR, type,
10889 omit_one_operand_loc (loc, type,
10890 TREE_OPERAND (arg0, 0),
10891 TREE_OPERAND (arg0, 1)),
10892 fold_build2_loc (loc, MULT_EXPR, type,
10893 build_int_cst (type, 2) , arg1));
10895 strict_overflow_p = false;
10896 if (TREE_CODE (arg1) == INTEGER_CST
10897 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10898 &strict_overflow_p)))
10900 if (strict_overflow_p)
10901 fold_overflow_warning (("assuming signed overflow does not "
10902 "occur when simplifying "
10903 "multiplication"),
10904 WARN_STRICT_OVERFLOW_MISC);
10905 return fold_convert_loc (loc, type, tem);
10908 /* Optimize z * conj(z) for integer complex numbers. */
10909 if (TREE_CODE (arg0) == CONJ_EXPR
10910 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10911 return fold_mult_zconjz (loc, type, arg1);
10912 if (TREE_CODE (arg1) == CONJ_EXPR
10913 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10914 return fold_mult_zconjz (loc, type, arg0);
10916 else
10918 /* Maybe fold x * 0 to 0. The expressions aren't the same
10919 when x is NaN, since x * 0 is also NaN. Nor are they the
10920 same in modes with signed zeros, since multiplying a
10921 negative value by 0 gives -0, not +0. */
10922 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10923 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10924 && real_zerop (arg1))
10925 return omit_one_operand_loc (loc, type, arg1, arg0);
10926 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10927 Likewise for complex arithmetic with signed zeros. */
10928 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10929 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10930 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10931 && real_onep (arg1))
10932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10934 /* Transform x * -1.0 into -x. */
10935 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10936 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10937 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10938 && real_minus_onep (arg1))
10939 return fold_convert_loc (loc, type, negate_expr (arg0));
10941 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10942 the result for floating point types due to rounding so it is applied
10943 only if -fassociative-math was specify. */
10944 if (flag_associative_math
10945 && TREE_CODE (arg0) == RDIV_EXPR
10946 && TREE_CODE (arg1) == REAL_CST
10947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10949 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10950 arg1, 0);
10951 if (tem)
10952 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10953 TREE_OPERAND (arg0, 1));
10956 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10957 if (operand_equal_p (arg0, arg1, 0))
10959 tree tem = fold_strip_sign_ops (arg0);
10960 if (tem != NULL_TREE)
10962 tem = fold_convert_loc (loc, type, tem);
10963 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10967 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10968 This is not the same for NaNs or if signed zeros are
10969 involved. */
10970 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10971 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10972 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10973 && TREE_CODE (arg1) == COMPLEX_CST
10974 && real_zerop (TREE_REALPART (arg1)))
10976 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10977 if (real_onep (TREE_IMAGPART (arg1)))
10978 return
10979 fold_build2_loc (loc, COMPLEX_EXPR, type,
10980 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10981 rtype, arg0)),
10982 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10983 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10984 return
10985 fold_build2_loc (loc, COMPLEX_EXPR, type,
10986 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10987 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10988 rtype, arg0)));
10991 /* Optimize z * conj(z) for floating point complex numbers.
10992 Guarded by flag_unsafe_math_optimizations as non-finite
10993 imaginary components don't produce scalar results. */
10994 if (flag_unsafe_math_optimizations
10995 && TREE_CODE (arg0) == CONJ_EXPR
10996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10997 return fold_mult_zconjz (loc, type, arg1);
10998 if (flag_unsafe_math_optimizations
10999 && TREE_CODE (arg1) == CONJ_EXPR
11000 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11001 return fold_mult_zconjz (loc, type, arg0);
11003 if (flag_unsafe_math_optimizations)
11005 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11006 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11008 /* Optimizations of root(...)*root(...). */
11009 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11011 tree rootfn, arg;
11012 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11013 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11015 /* Optimize sqrt(x)*sqrt(x) as x. */
11016 if (BUILTIN_SQRT_P (fcode0)
11017 && operand_equal_p (arg00, arg10, 0)
11018 && ! HONOR_SNANS (TYPE_MODE (type)))
11019 return arg00;
11021 /* Optimize root(x)*root(y) as root(x*y). */
11022 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11023 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11024 return build_call_expr_loc (loc, rootfn, 1, arg);
11027 /* Optimize expN(x)*expN(y) as expN(x+y). */
11028 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11030 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11031 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11032 CALL_EXPR_ARG (arg0, 0),
11033 CALL_EXPR_ARG (arg1, 0));
11034 return build_call_expr_loc (loc, expfn, 1, arg);
11037 /* Optimizations of pow(...)*pow(...). */
11038 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11039 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11040 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11042 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11043 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11044 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11045 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11047 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11048 if (operand_equal_p (arg01, arg11, 0))
11050 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11051 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11052 arg00, arg10);
11053 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11056 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11057 if (operand_equal_p (arg00, arg10, 0))
11059 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11060 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11061 arg01, arg11);
11062 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11066 /* Optimize tan(x)*cos(x) as sin(x). */
11067 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11068 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11069 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11070 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11071 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11072 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11073 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11074 CALL_EXPR_ARG (arg1, 0), 0))
11076 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11078 if (sinfn != NULL_TREE)
11079 return build_call_expr_loc (loc, sinfn, 1,
11080 CALL_EXPR_ARG (arg0, 0));
11083 /* Optimize x*pow(x,c) as pow(x,c+1). */
11084 if (fcode1 == BUILT_IN_POW
11085 || fcode1 == BUILT_IN_POWF
11086 || fcode1 == BUILT_IN_POWL)
11088 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11089 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11090 if (TREE_CODE (arg11) == REAL_CST
11091 && !TREE_OVERFLOW (arg11)
11092 && operand_equal_p (arg0, arg10, 0))
11094 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11095 REAL_VALUE_TYPE c;
11096 tree arg;
11098 c = TREE_REAL_CST (arg11);
11099 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11100 arg = build_real (type, c);
11101 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11105 /* Optimize pow(x,c)*x as pow(x,c+1). */
11106 if (fcode0 == BUILT_IN_POW
11107 || fcode0 == BUILT_IN_POWF
11108 || fcode0 == BUILT_IN_POWL)
11110 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11111 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11112 if (TREE_CODE (arg01) == REAL_CST
11113 && !TREE_OVERFLOW (arg01)
11114 && operand_equal_p (arg1, arg00, 0))
11116 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11117 REAL_VALUE_TYPE c;
11118 tree arg;
11120 c = TREE_REAL_CST (arg01);
11121 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11122 arg = build_real (type, c);
11123 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11127 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11128 if (optimize_function_for_speed_p (cfun)
11129 && operand_equal_p (arg0, arg1, 0))
11131 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11133 if (powfn)
11135 tree arg = build_real (type, dconst2);
11136 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11141 goto associate;
11143 case BIT_IOR_EXPR:
11144 bit_ior:
11145 if (integer_all_onesp (arg1))
11146 return omit_one_operand_loc (loc, type, arg1, arg0);
11147 if (integer_zerop (arg1))
11148 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11149 if (operand_equal_p (arg0, arg1, 0))
11150 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11152 /* ~X | X is -1. */
11153 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11156 t1 = fold_convert_loc (loc, type, integer_zero_node);
11157 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11158 return omit_one_operand_loc (loc, type, t1, arg1);
11161 /* X | ~X is -1. */
11162 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11163 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11165 t1 = fold_convert_loc (loc, type, integer_zero_node);
11166 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11167 return omit_one_operand_loc (loc, type, t1, arg0);
11170 /* Canonicalize (X & C1) | C2. */
11171 if (TREE_CODE (arg0) == BIT_AND_EXPR
11172 && TREE_CODE (arg1) == INTEGER_CST
11173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11175 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11176 int width = TYPE_PRECISION (type), w;
11177 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11178 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11179 hi2 = TREE_INT_CST_HIGH (arg1);
11180 lo2 = TREE_INT_CST_LOW (arg1);
11182 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11183 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11184 return omit_one_operand_loc (loc, type, arg1,
11185 TREE_OPERAND (arg0, 0));
11187 if (width > HOST_BITS_PER_WIDE_INT)
11189 mhi = (unsigned HOST_WIDE_INT) -1
11190 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11191 mlo = -1;
11193 else
11195 mhi = 0;
11196 mlo = (unsigned HOST_WIDE_INT) -1
11197 >> (HOST_BITS_PER_WIDE_INT - width);
11200 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11201 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11202 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11203 TREE_OPERAND (arg0, 0), arg1);
11205 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11206 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11207 mode which allows further optimizations. */
11208 hi1 &= mhi;
11209 lo1 &= mlo;
11210 hi2 &= mhi;
11211 lo2 &= mlo;
11212 hi3 = hi1 & ~hi2;
11213 lo3 = lo1 & ~lo2;
11214 for (w = BITS_PER_UNIT;
11215 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11216 w <<= 1)
11218 unsigned HOST_WIDE_INT mask
11219 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11220 if (((lo1 | lo2) & mask) == mask
11221 && (lo1 & ~mask) == 0 && hi1 == 0)
11223 hi3 = 0;
11224 lo3 = mask;
11225 break;
11228 if (hi3 != hi1 || lo3 != lo1)
11229 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11230 fold_build2_loc (loc, BIT_AND_EXPR, type,
11231 TREE_OPERAND (arg0, 0),
11232 build_int_cst_wide (type,
11233 lo3, hi3)),
11234 arg1);
11237 /* (X & Y) | Y is (X, Y). */
11238 if (TREE_CODE (arg0) == BIT_AND_EXPR
11239 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11240 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11241 /* (X & Y) | X is (Y, X). */
11242 if (TREE_CODE (arg0) == BIT_AND_EXPR
11243 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11244 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11245 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11246 /* X | (X & Y) is (Y, X). */
11247 if (TREE_CODE (arg1) == BIT_AND_EXPR
11248 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11249 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11250 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11251 /* X | (Y & X) is (Y, X). */
11252 if (TREE_CODE (arg1) == BIT_AND_EXPR
11253 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11254 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11255 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11257 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11258 if (t1 != NULL_TREE)
11259 return t1;
11261 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11263 This results in more efficient code for machines without a NAND
11264 instruction. Combine will canonicalize to the first form
11265 which will allow use of NAND instructions provided by the
11266 backend if they exist. */
11267 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11268 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11270 return
11271 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11272 build2 (BIT_AND_EXPR, type,
11273 fold_convert_loc (loc, type,
11274 TREE_OPERAND (arg0, 0)),
11275 fold_convert_loc (loc, type,
11276 TREE_OPERAND (arg1, 0))));
11279 /* See if this can be simplified into a rotate first. If that
11280 is unsuccessful continue in the association code. */
11281 goto bit_rotate;
11283 case BIT_XOR_EXPR:
11284 if (integer_zerop (arg1))
11285 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11286 if (integer_all_onesp (arg1))
11287 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11288 if (operand_equal_p (arg0, arg1, 0))
11289 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11291 /* ~X ^ X is -1. */
11292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11293 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11295 t1 = fold_convert_loc (loc, type, integer_zero_node);
11296 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11297 return omit_one_operand_loc (loc, type, t1, arg1);
11300 /* X ^ ~X is -1. */
11301 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11302 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11304 t1 = fold_convert_loc (loc, type, integer_zero_node);
11305 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11306 return omit_one_operand_loc (loc, type, t1, arg0);
11309 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11310 with a constant, and the two constants have no bits in common,
11311 we should treat this as a BIT_IOR_EXPR since this may produce more
11312 simplifications. */
11313 if (TREE_CODE (arg0) == BIT_AND_EXPR
11314 && TREE_CODE (arg1) == BIT_AND_EXPR
11315 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11316 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11317 && integer_zerop (const_binop (BIT_AND_EXPR,
11318 TREE_OPERAND (arg0, 1),
11319 TREE_OPERAND (arg1, 1), 0)))
11321 code = BIT_IOR_EXPR;
11322 goto bit_ior;
11325 /* (X | Y) ^ X -> Y & ~ X*/
11326 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11327 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11329 tree t2 = TREE_OPERAND (arg0, 1);
11330 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11331 arg1);
11332 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11333 fold_convert_loc (loc, type, t2),
11334 fold_convert_loc (loc, type, t1));
11335 return t1;
11338 /* (Y | X) ^ X -> Y & ~ X*/
11339 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11340 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11342 tree t2 = TREE_OPERAND (arg0, 0);
11343 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11344 arg1);
11345 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11346 fold_convert_loc (loc, type, t2),
11347 fold_convert_loc (loc, type, t1));
11348 return t1;
11351 /* X ^ (X | Y) -> Y & ~ X*/
11352 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11353 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11355 tree t2 = TREE_OPERAND (arg1, 1);
11356 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11357 arg0);
11358 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11359 fold_convert_loc (loc, type, t2),
11360 fold_convert_loc (loc, type, t1));
11361 return t1;
11364 /* X ^ (Y | X) -> Y & ~ X*/
11365 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11366 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11368 tree t2 = TREE_OPERAND (arg1, 0);
11369 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11370 arg0);
11371 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11372 fold_convert_loc (loc, type, t2),
11373 fold_convert_loc (loc, type, t1));
11374 return t1;
11377 /* Convert ~X ^ ~Y to X ^ Y. */
11378 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11379 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11380 return fold_build2_loc (loc, code, type,
11381 fold_convert_loc (loc, type,
11382 TREE_OPERAND (arg0, 0)),
11383 fold_convert_loc (loc, type,
11384 TREE_OPERAND (arg1, 0)));
11386 /* Convert ~X ^ C to X ^ ~C. */
11387 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11388 && TREE_CODE (arg1) == INTEGER_CST)
11389 return fold_build2_loc (loc, code, type,
11390 fold_convert_loc (loc, type,
11391 TREE_OPERAND (arg0, 0)),
11392 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11394 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11395 if (TREE_CODE (arg0) == BIT_AND_EXPR
11396 && integer_onep (TREE_OPERAND (arg0, 1))
11397 && integer_onep (arg1))
11398 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11399 build_int_cst (TREE_TYPE (arg0), 0));
11401 /* Fold (X & Y) ^ Y as ~X & Y. */
11402 if (TREE_CODE (arg0) == BIT_AND_EXPR
11403 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11405 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11406 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11407 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11408 fold_convert_loc (loc, type, arg1));
11410 /* Fold (X & Y) ^ X as ~Y & X. */
11411 if (TREE_CODE (arg0) == BIT_AND_EXPR
11412 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11413 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11415 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11416 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11417 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11418 fold_convert_loc (loc, type, arg1));
11420 /* Fold X ^ (X & Y) as X & ~Y. */
11421 if (TREE_CODE (arg1) == BIT_AND_EXPR
11422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11424 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11425 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11426 fold_convert_loc (loc, type, arg0),
11427 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11429 /* Fold X ^ (Y & X) as ~Y & X. */
11430 if (TREE_CODE (arg1) == BIT_AND_EXPR
11431 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11432 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11434 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11435 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11436 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11437 fold_convert_loc (loc, type, arg0));
11440 /* See if this can be simplified into a rotate first. If that
11441 is unsuccessful continue in the association code. */
11442 goto bit_rotate;
11444 case BIT_AND_EXPR:
11445 if (integer_all_onesp (arg1))
11446 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11447 if (integer_zerop (arg1))
11448 return omit_one_operand_loc (loc, type, arg1, arg0);
11449 if (operand_equal_p (arg0, arg1, 0))
11450 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11452 /* ~X & X is always zero. */
11453 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11454 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11455 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11457 /* X & ~X is always zero. */
11458 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11459 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11460 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11462 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11463 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11464 && TREE_CODE (arg1) == INTEGER_CST
11465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11467 tree tmp1 = fold_convert_loc (loc, type, arg1);
11468 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11469 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11470 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11471 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11472 return
11473 fold_convert_loc (loc, type,
11474 fold_build2_loc (loc, BIT_IOR_EXPR,
11475 type, tmp2, tmp3));
11478 /* (X | Y) & Y is (X, Y). */
11479 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11481 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11482 /* (X | Y) & X is (Y, X). */
11483 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11485 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11486 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11487 /* X & (X | Y) is (Y, X). */
11488 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11489 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11490 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11491 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11492 /* X & (Y | X) is (Y, X). */
11493 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11494 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11495 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11496 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11498 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11499 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11500 && integer_onep (TREE_OPERAND (arg0, 1))
11501 && integer_onep (arg1))
11503 tem = TREE_OPERAND (arg0, 0);
11504 return fold_build2_loc (loc, EQ_EXPR, type,
11505 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11506 build_int_cst (TREE_TYPE (tem), 1)),
11507 build_int_cst (TREE_TYPE (tem), 0));
11509 /* Fold ~X & 1 as (X & 1) == 0. */
11510 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11511 && integer_onep (arg1))
11513 tem = TREE_OPERAND (arg0, 0);
11514 return fold_build2_loc (loc, EQ_EXPR, type,
11515 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11516 build_int_cst (TREE_TYPE (tem), 1)),
11517 build_int_cst (TREE_TYPE (tem), 0));
11520 /* Fold (X ^ Y) & Y as ~X & Y. */
11521 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11522 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11524 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11525 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11526 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11527 fold_convert_loc (loc, type, arg1));
11529 /* Fold (X ^ Y) & X as ~Y & X. */
11530 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11531 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11532 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11534 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11535 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11536 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11537 fold_convert_loc (loc, type, arg1));
11539 /* Fold X & (X ^ Y) as X & ~Y. */
11540 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11541 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11543 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11544 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11545 fold_convert_loc (loc, type, arg0),
11546 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11548 /* Fold X & (Y ^ X) as ~Y & X. */
11549 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11550 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11551 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11553 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11554 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11555 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11556 fold_convert_loc (loc, type, arg0));
11559 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11560 if (t1 != NULL_TREE)
11561 return t1;
11562 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11563 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11564 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11566 unsigned int prec
11567 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11569 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11570 && (~TREE_INT_CST_LOW (arg1)
11571 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11572 return
11573 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11576 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11578 This results in more efficient code for machines without a NOR
11579 instruction. Combine will canonicalize to the first form
11580 which will allow use of NOR instructions provided by the
11581 backend if they exist. */
11582 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11583 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11585 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11586 build2 (BIT_IOR_EXPR, type,
11587 fold_convert_loc (loc, type,
11588 TREE_OPERAND (arg0, 0)),
11589 fold_convert_loc (loc, type,
11590 TREE_OPERAND (arg1, 0))));
11593 /* If arg0 is derived from the address of an object or function, we may
11594 be able to fold this expression using the object or function's
11595 alignment. */
11596 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11598 unsigned HOST_WIDE_INT modulus, residue;
11599 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11601 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11602 integer_onep (arg1));
11604 /* This works because modulus is a power of 2. If this weren't the
11605 case, we'd have to replace it by its greatest power-of-2
11606 divisor: modulus & -modulus. */
11607 if (low < modulus)
11608 return build_int_cst (type, residue & low);
11611 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11612 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11613 if the new mask might be further optimized. */
11614 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11615 || TREE_CODE (arg0) == RSHIFT_EXPR)
11616 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11617 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11618 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11619 < TYPE_PRECISION (TREE_TYPE (arg0))
11620 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11621 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11623 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11624 unsigned HOST_WIDE_INT mask
11625 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11626 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11627 tree shift_type = TREE_TYPE (arg0);
11629 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11630 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11631 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11632 && TYPE_PRECISION (TREE_TYPE (arg0))
11633 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11635 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11636 tree arg00 = TREE_OPERAND (arg0, 0);
11637 /* See if more bits can be proven as zero because of
11638 zero extension. */
11639 if (TREE_CODE (arg00) == NOP_EXPR
11640 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11642 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11643 if (TYPE_PRECISION (inner_type)
11644 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11645 && TYPE_PRECISION (inner_type) < prec)
11647 prec = TYPE_PRECISION (inner_type);
11648 /* See if we can shorten the right shift. */
11649 if (shiftc < prec)
11650 shift_type = inner_type;
11653 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11654 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11655 zerobits <<= prec - shiftc;
11656 /* For arithmetic shift if sign bit could be set, zerobits
11657 can contain actually sign bits, so no transformation is
11658 possible, unless MASK masks them all away. In that
11659 case the shift needs to be converted into logical shift. */
11660 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11661 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11663 if ((mask & zerobits) == 0)
11664 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11665 else
11666 zerobits = 0;
11670 /* ((X << 16) & 0xff00) is (X, 0). */
11671 if ((mask & zerobits) == mask)
11672 return omit_one_operand_loc (loc, type,
11673 build_int_cst (type, 0), arg0);
11675 newmask = mask | zerobits;
11676 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11678 unsigned int prec;
11680 /* Only do the transformation if NEWMASK is some integer
11681 mode's mask. */
11682 for (prec = BITS_PER_UNIT;
11683 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11684 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11685 break;
11686 if (prec < HOST_BITS_PER_WIDE_INT
11687 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11689 tree newmaskt;
11691 if (shift_type != TREE_TYPE (arg0))
11693 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11694 fold_convert_loc (loc, shift_type,
11695 TREE_OPERAND (arg0, 0)),
11696 TREE_OPERAND (arg0, 1));
11697 tem = fold_convert_loc (loc, type, tem);
11699 else
11700 tem = op0;
11701 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11702 if (!tree_int_cst_equal (newmaskt, arg1))
11703 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11708 goto associate;
11710 case RDIV_EXPR:
11711 /* Don't touch a floating-point divide by zero unless the mode
11712 of the constant can represent infinity. */
11713 if (TREE_CODE (arg1) == REAL_CST
11714 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11715 && real_zerop (arg1))
11716 return NULL_TREE;
11718 /* Optimize A / A to 1.0 if we don't care about
11719 NaNs or Infinities. Skip the transformation
11720 for non-real operands. */
11721 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11722 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11723 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11724 && operand_equal_p (arg0, arg1, 0))
11726 tree r = build_real (TREE_TYPE (arg0), dconst1);
11728 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11731 /* The complex version of the above A / A optimization. */
11732 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11733 && operand_equal_p (arg0, arg1, 0))
11735 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11736 if (! HONOR_NANS (TYPE_MODE (elem_type))
11737 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11739 tree r = build_real (elem_type, dconst1);
11740 /* omit_two_operands will call fold_convert for us. */
11741 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11745 /* (-A) / (-B) -> A / B */
11746 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11747 return fold_build2_loc (loc, RDIV_EXPR, type,
11748 TREE_OPERAND (arg0, 0),
11749 negate_expr (arg1));
11750 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11751 return fold_build2_loc (loc, RDIV_EXPR, type,
11752 negate_expr (arg0),
11753 TREE_OPERAND (arg1, 0));
11755 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11756 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11757 && real_onep (arg1))
11758 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11760 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11761 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11762 && real_minus_onep (arg1))
11763 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11764 negate_expr (arg0)));
11766 /* If ARG1 is a constant, we can convert this to a multiply by the
11767 reciprocal. This does not have the same rounding properties,
11768 so only do this if -freciprocal-math. We can actually
11769 always safely do it if ARG1 is a power of two, but it's hard to
11770 tell if it is or not in a portable manner. */
11771 if (TREE_CODE (arg1) == REAL_CST)
11773 if (flag_reciprocal_math
11774 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11775 arg1, 0)))
11776 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11777 /* Find the reciprocal if optimizing and the result is exact. */
11778 if (optimize)
11780 REAL_VALUE_TYPE r;
11781 r = TREE_REAL_CST (arg1);
11782 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11784 tem = build_real (type, r);
11785 return fold_build2_loc (loc, MULT_EXPR, type,
11786 fold_convert_loc (loc, type, arg0), tem);
11790 /* Convert A/B/C to A/(B*C). */
11791 if (flag_reciprocal_math
11792 && TREE_CODE (arg0) == RDIV_EXPR)
11793 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11794 fold_build2_loc (loc, MULT_EXPR, type,
11795 TREE_OPERAND (arg0, 1), arg1));
11797 /* Convert A/(B/C) to (A/B)*C. */
11798 if (flag_reciprocal_math
11799 && TREE_CODE (arg1) == RDIV_EXPR)
11800 return fold_build2_loc (loc, MULT_EXPR, type,
11801 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11802 TREE_OPERAND (arg1, 0)),
11803 TREE_OPERAND (arg1, 1));
11805 /* Convert C1/(X*C2) into (C1/C2)/X. */
11806 if (flag_reciprocal_math
11807 && TREE_CODE (arg1) == MULT_EXPR
11808 && TREE_CODE (arg0) == REAL_CST
11809 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11811 tree tem = const_binop (RDIV_EXPR, arg0,
11812 TREE_OPERAND (arg1, 1), 0);
11813 if (tem)
11814 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11815 TREE_OPERAND (arg1, 0));
11818 if (flag_unsafe_math_optimizations)
11820 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11821 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11823 /* Optimize sin(x)/cos(x) as tan(x). */
11824 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11825 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11826 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11827 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11828 CALL_EXPR_ARG (arg1, 0), 0))
11830 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11832 if (tanfn != NULL_TREE)
11833 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11836 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11837 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11838 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11839 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11840 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11841 CALL_EXPR_ARG (arg1, 0), 0))
11843 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11845 if (tanfn != NULL_TREE)
11847 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11848 CALL_EXPR_ARG (arg0, 0));
11849 return fold_build2_loc (loc, RDIV_EXPR, type,
11850 build_real (type, dconst1), tmp);
11854 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11855 NaNs or Infinities. */
11856 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11857 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11858 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11860 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11861 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11863 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11864 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11865 && operand_equal_p (arg00, arg01, 0))
11867 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11869 if (cosfn != NULL_TREE)
11870 return build_call_expr_loc (loc, cosfn, 1, arg00);
11874 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11875 NaNs or Infinities. */
11876 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11877 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11878 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11880 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11881 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11883 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11884 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11885 && operand_equal_p (arg00, arg01, 0))
11887 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11889 if (cosfn != NULL_TREE)
11891 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11892 return fold_build2_loc (loc, RDIV_EXPR, type,
11893 build_real (type, dconst1),
11894 tmp);
11899 /* Optimize pow(x,c)/x as pow(x,c-1). */
11900 if (fcode0 == BUILT_IN_POW
11901 || fcode0 == BUILT_IN_POWF
11902 || fcode0 == BUILT_IN_POWL)
11904 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11905 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11906 if (TREE_CODE (arg01) == REAL_CST
11907 && !TREE_OVERFLOW (arg01)
11908 && operand_equal_p (arg1, arg00, 0))
11910 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11911 REAL_VALUE_TYPE c;
11912 tree arg;
11914 c = TREE_REAL_CST (arg01);
11915 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11916 arg = build_real (type, c);
11917 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11921 /* Optimize a/root(b/c) into a*root(c/b). */
11922 if (BUILTIN_ROOT_P (fcode1))
11924 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11926 if (TREE_CODE (rootarg) == RDIV_EXPR)
11928 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11929 tree b = TREE_OPERAND (rootarg, 0);
11930 tree c = TREE_OPERAND (rootarg, 1);
11932 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11934 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11935 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11939 /* Optimize x/expN(y) into x*expN(-y). */
11940 if (BUILTIN_EXPONENT_P (fcode1))
11942 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11943 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11944 arg1 = build_call_expr_loc (loc,
11945 expfn, 1,
11946 fold_convert_loc (loc, type, arg));
11947 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11950 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11951 if (fcode1 == BUILT_IN_POW
11952 || fcode1 == BUILT_IN_POWF
11953 || fcode1 == BUILT_IN_POWL)
11955 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11956 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11957 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11958 tree neg11 = fold_convert_loc (loc, type,
11959 negate_expr (arg11));
11960 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11961 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11964 return NULL_TREE;
11966 case TRUNC_DIV_EXPR:
11967 case FLOOR_DIV_EXPR:
11968 /* Simplify A / (B << N) where A and B are positive and B is
11969 a power of 2, to A >> (N + log2(B)). */
11970 strict_overflow_p = false;
11971 if (TREE_CODE (arg1) == LSHIFT_EXPR
11972 && (TYPE_UNSIGNED (type)
11973 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11975 tree sval = TREE_OPERAND (arg1, 0);
11976 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11978 tree sh_cnt = TREE_OPERAND (arg1, 1);
11979 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11981 if (strict_overflow_p)
11982 fold_overflow_warning (("assuming signed overflow does not "
11983 "occur when simplifying A / (B << N)"),
11984 WARN_STRICT_OVERFLOW_MISC);
11986 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11987 sh_cnt, build_int_cst (NULL_TREE, pow2));
11988 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11989 fold_convert_loc (loc, type, arg0), sh_cnt);
11993 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11994 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11995 if (INTEGRAL_TYPE_P (type)
11996 && TYPE_UNSIGNED (type)
11997 && code == FLOOR_DIV_EXPR)
11998 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12000 /* Fall thru */
12002 case ROUND_DIV_EXPR:
12003 case CEIL_DIV_EXPR:
12004 case EXACT_DIV_EXPR:
12005 if (integer_onep (arg1))
12006 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12007 if (integer_zerop (arg1))
12008 return NULL_TREE;
12009 /* X / -1 is -X. */
12010 if (!TYPE_UNSIGNED (type)
12011 && TREE_CODE (arg1) == INTEGER_CST
12012 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12013 && TREE_INT_CST_HIGH (arg1) == -1)
12014 return fold_convert_loc (loc, type, negate_expr (arg0));
12016 /* Convert -A / -B to A / B when the type is signed and overflow is
12017 undefined. */
12018 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12019 && TREE_CODE (arg0) == NEGATE_EXPR
12020 && negate_expr_p (arg1))
12022 if (INTEGRAL_TYPE_P (type))
12023 fold_overflow_warning (("assuming signed overflow does not occur "
12024 "when distributing negation across "
12025 "division"),
12026 WARN_STRICT_OVERFLOW_MISC);
12027 return fold_build2_loc (loc, code, type,
12028 fold_convert_loc (loc, type,
12029 TREE_OPERAND (arg0, 0)),
12030 fold_convert_loc (loc, type,
12031 negate_expr (arg1)));
12033 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12034 && TREE_CODE (arg1) == NEGATE_EXPR
12035 && negate_expr_p (arg0))
12037 if (INTEGRAL_TYPE_P (type))
12038 fold_overflow_warning (("assuming signed overflow does not occur "
12039 "when distributing negation across "
12040 "division"),
12041 WARN_STRICT_OVERFLOW_MISC);
12042 return fold_build2_loc (loc, code, type,
12043 fold_convert_loc (loc, type,
12044 negate_expr (arg0)),
12045 fold_convert_loc (loc, type,
12046 TREE_OPERAND (arg1, 0)));
12049 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12050 operation, EXACT_DIV_EXPR.
12052 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12053 At one time others generated faster code, it's not clear if they do
12054 after the last round to changes to the DIV code in expmed.c. */
12055 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12056 && multiple_of_p (type, arg0, arg1))
12057 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12059 strict_overflow_p = false;
12060 if (TREE_CODE (arg1) == INTEGER_CST
12061 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12062 &strict_overflow_p)))
12064 if (strict_overflow_p)
12065 fold_overflow_warning (("assuming signed overflow does not occur "
12066 "when simplifying division"),
12067 WARN_STRICT_OVERFLOW_MISC);
12068 return fold_convert_loc (loc, type, tem);
12071 return NULL_TREE;
12073 case CEIL_MOD_EXPR:
12074 case FLOOR_MOD_EXPR:
12075 case ROUND_MOD_EXPR:
12076 case TRUNC_MOD_EXPR:
12077 /* X % 1 is always zero, but be sure to preserve any side
12078 effects in X. */
12079 if (integer_onep (arg1))
12080 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12082 /* X % 0, return X % 0 unchanged so that we can get the
12083 proper warnings and errors. */
12084 if (integer_zerop (arg1))
12085 return NULL_TREE;
12087 /* 0 % X is always zero, but be sure to preserve any side
12088 effects in X. Place this after checking for X == 0. */
12089 if (integer_zerop (arg0))
12090 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12092 /* X % -1 is zero. */
12093 if (!TYPE_UNSIGNED (type)
12094 && TREE_CODE (arg1) == INTEGER_CST
12095 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12096 && TREE_INT_CST_HIGH (arg1) == -1)
12097 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12099 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12100 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12101 strict_overflow_p = false;
12102 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12103 && (TYPE_UNSIGNED (type)
12104 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12106 tree c = arg1;
12107 /* Also optimize A % (C << N) where C is a power of 2,
12108 to A & ((C << N) - 1). */
12109 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12110 c = TREE_OPERAND (arg1, 0);
12112 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12114 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12115 build_int_cst (TREE_TYPE (arg1), 1));
12116 if (strict_overflow_p)
12117 fold_overflow_warning (("assuming signed overflow does not "
12118 "occur when simplifying "
12119 "X % (power of two)"),
12120 WARN_STRICT_OVERFLOW_MISC);
12121 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12122 fold_convert_loc (loc, type, arg0),
12123 fold_convert_loc (loc, type, mask));
12127 /* X % -C is the same as X % C. */
12128 if (code == TRUNC_MOD_EXPR
12129 && !TYPE_UNSIGNED (type)
12130 && TREE_CODE (arg1) == INTEGER_CST
12131 && !TREE_OVERFLOW (arg1)
12132 && TREE_INT_CST_HIGH (arg1) < 0
12133 && !TYPE_OVERFLOW_TRAPS (type)
12134 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12135 && !sign_bit_p (arg1, arg1))
12136 return fold_build2_loc (loc, code, type,
12137 fold_convert_loc (loc, type, arg0),
12138 fold_convert_loc (loc, type,
12139 negate_expr (arg1)));
12141 /* X % -Y is the same as X % Y. */
12142 if (code == TRUNC_MOD_EXPR
12143 && !TYPE_UNSIGNED (type)
12144 && TREE_CODE (arg1) == NEGATE_EXPR
12145 && !TYPE_OVERFLOW_TRAPS (type))
12146 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12147 fold_convert_loc (loc, type,
12148 TREE_OPERAND (arg1, 0)));
12150 if (TREE_CODE (arg1) == INTEGER_CST
12151 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12152 &strict_overflow_p)))
12154 if (strict_overflow_p)
12155 fold_overflow_warning (("assuming signed overflow does not occur "
12156 "when simplifying modulus"),
12157 WARN_STRICT_OVERFLOW_MISC);
12158 return fold_convert_loc (loc, type, tem);
12161 return NULL_TREE;
12163 case LROTATE_EXPR:
12164 case RROTATE_EXPR:
12165 if (integer_all_onesp (arg0))
12166 return omit_one_operand_loc (loc, type, arg0, arg1);
12167 goto shift;
12169 case RSHIFT_EXPR:
12170 /* Optimize -1 >> x for arithmetic right shifts. */
12171 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12172 && tree_expr_nonnegative_p (arg1))
12173 return omit_one_operand_loc (loc, type, arg0, arg1);
12174 /* ... fall through ... */
12176 case LSHIFT_EXPR:
12177 shift:
12178 if (integer_zerop (arg1))
12179 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12180 if (integer_zerop (arg0))
12181 return omit_one_operand_loc (loc, type, arg0, arg1);
12183 /* Since negative shift count is not well-defined,
12184 don't try to compute it in the compiler. */
12185 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12186 return NULL_TREE;
12188 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12189 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12190 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12191 && host_integerp (TREE_OPERAND (arg0, 1), false)
12192 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12194 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12195 + TREE_INT_CST_LOW (arg1));
12197 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12198 being well defined. */
12199 if (low >= TYPE_PRECISION (type))
12201 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12202 low = low % TYPE_PRECISION (type);
12203 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12204 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12205 TREE_OPERAND (arg0, 0));
12206 else
12207 low = TYPE_PRECISION (type) - 1;
12210 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12211 build_int_cst (type, low));
12214 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12215 into x & ((unsigned)-1 >> c) for unsigned types. */
12216 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12217 || (TYPE_UNSIGNED (type)
12218 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12219 && host_integerp (arg1, false)
12220 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12221 && host_integerp (TREE_OPERAND (arg0, 1), false)
12222 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12224 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12225 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12226 tree lshift;
12227 tree arg00;
12229 if (low0 == low1)
12231 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12233 lshift = build_int_cst (type, -1);
12234 lshift = int_const_binop (code, lshift, arg1, 0);
12236 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12240 /* Rewrite an LROTATE_EXPR by a constant into an
12241 RROTATE_EXPR by a new constant. */
12242 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12244 tree tem = build_int_cst (TREE_TYPE (arg1),
12245 TYPE_PRECISION (type));
12246 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12247 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12250 /* If we have a rotate of a bit operation with the rotate count and
12251 the second operand of the bit operation both constant,
12252 permute the two operations. */
12253 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12254 && (TREE_CODE (arg0) == BIT_AND_EXPR
12255 || TREE_CODE (arg0) == BIT_IOR_EXPR
12256 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12258 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12259 fold_build2_loc (loc, code, type,
12260 TREE_OPERAND (arg0, 0), arg1),
12261 fold_build2_loc (loc, code, type,
12262 TREE_OPERAND (arg0, 1), arg1));
12264 /* Two consecutive rotates adding up to the precision of the
12265 type can be ignored. */
12266 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12267 && TREE_CODE (arg0) == RROTATE_EXPR
12268 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12269 && TREE_INT_CST_HIGH (arg1) == 0
12270 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12271 && ((TREE_INT_CST_LOW (arg1)
12272 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12273 == (unsigned int) TYPE_PRECISION (type)))
12274 return TREE_OPERAND (arg0, 0);
12276 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12277 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12278 if the latter can be further optimized. */
12279 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12280 && TREE_CODE (arg0) == BIT_AND_EXPR
12281 && TREE_CODE (arg1) == INTEGER_CST
12282 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12284 tree mask = fold_build2_loc (loc, code, type,
12285 fold_convert_loc (loc, type,
12286 TREE_OPERAND (arg0, 1)),
12287 arg1);
12288 tree shift = fold_build2_loc (loc, code, type,
12289 fold_convert_loc (loc, type,
12290 TREE_OPERAND (arg0, 0)),
12291 arg1);
12292 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12293 if (tem)
12294 return tem;
12297 return NULL_TREE;
12299 case MIN_EXPR:
12300 if (operand_equal_p (arg0, arg1, 0))
12301 return omit_one_operand_loc (loc, type, arg0, arg1);
12302 if (INTEGRAL_TYPE_P (type)
12303 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12304 return omit_one_operand_loc (loc, type, arg1, arg0);
12305 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12306 if (tem)
12307 return tem;
12308 goto associate;
12310 case MAX_EXPR:
12311 if (operand_equal_p (arg0, arg1, 0))
12312 return omit_one_operand_loc (loc, type, arg0, arg1);
12313 if (INTEGRAL_TYPE_P (type)
12314 && TYPE_MAX_VALUE (type)
12315 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12316 return omit_one_operand_loc (loc, type, arg1, arg0);
12317 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12318 if (tem)
12319 return tem;
12320 goto associate;
12322 case TRUTH_ANDIF_EXPR:
12323 /* Note that the operands of this must be ints
12324 and their values must be 0 or 1.
12325 ("true" is a fixed value perhaps depending on the language.) */
12326 /* If first arg is constant zero, return it. */
12327 if (integer_zerop (arg0))
12328 return fold_convert_loc (loc, type, arg0);
12329 case TRUTH_AND_EXPR:
12330 /* If either arg is constant true, drop it. */
12331 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12332 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12333 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12334 /* Preserve sequence points. */
12335 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12336 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12337 /* If second arg is constant zero, result is zero, but first arg
12338 must be evaluated. */
12339 if (integer_zerop (arg1))
12340 return omit_one_operand_loc (loc, type, arg1, arg0);
12341 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12342 case will be handled here. */
12343 if (integer_zerop (arg0))
12344 return omit_one_operand_loc (loc, type, arg0, arg1);
12346 /* !X && X is always false. */
12347 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12348 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12349 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12350 /* X && !X is always false. */
12351 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12352 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12353 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12355 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12356 means A >= Y && A != MAX, but in this case we know that
12357 A < X <= MAX. */
12359 if (!TREE_SIDE_EFFECTS (arg0)
12360 && !TREE_SIDE_EFFECTS (arg1))
12362 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12363 if (tem && !operand_equal_p (tem, arg0, 0))
12364 return fold_build2_loc (loc, code, type, tem, arg1);
12366 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12367 if (tem && !operand_equal_p (tem, arg1, 0))
12368 return fold_build2_loc (loc, code, type, arg0, tem);
12371 truth_andor:
12372 /* We only do these simplifications if we are optimizing. */
12373 if (!optimize)
12374 return NULL_TREE;
12376 /* Check for things like (A || B) && (A || C). We can convert this
12377 to A || (B && C). Note that either operator can be any of the four
12378 truth and/or operations and the transformation will still be
12379 valid. Also note that we only care about order for the
12380 ANDIF and ORIF operators. If B contains side effects, this
12381 might change the truth-value of A. */
12382 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12383 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12384 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12385 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12386 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12387 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12389 tree a00 = TREE_OPERAND (arg0, 0);
12390 tree a01 = TREE_OPERAND (arg0, 1);
12391 tree a10 = TREE_OPERAND (arg1, 0);
12392 tree a11 = TREE_OPERAND (arg1, 1);
12393 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12394 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12395 && (code == TRUTH_AND_EXPR
12396 || code == TRUTH_OR_EXPR));
12398 if (operand_equal_p (a00, a10, 0))
12399 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12400 fold_build2_loc (loc, code, type, a01, a11));
12401 else if (commutative && operand_equal_p (a00, a11, 0))
12402 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12403 fold_build2_loc (loc, code, type, a01, a10));
12404 else if (commutative && operand_equal_p (a01, a10, 0))
12405 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12406 fold_build2_loc (loc, code, type, a00, a11));
12408 /* This case if tricky because we must either have commutative
12409 operators or else A10 must not have side-effects. */
12411 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12412 && operand_equal_p (a01, a11, 0))
12413 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12414 fold_build2_loc (loc, code, type, a00, a10),
12415 a01);
12418 /* See if we can build a range comparison. */
12419 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12420 return tem;
12422 /* Check for the possibility of merging component references. If our
12423 lhs is another similar operation, try to merge its rhs with our
12424 rhs. Then try to merge our lhs and rhs. */
12425 if (TREE_CODE (arg0) == code
12426 && 0 != (tem = fold_truthop (loc, code, type,
12427 TREE_OPERAND (arg0, 1), arg1)))
12428 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12430 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12431 return tem;
12433 return NULL_TREE;
12435 case TRUTH_ORIF_EXPR:
12436 /* Note that the operands of this must be ints
12437 and their values must be 0 or true.
12438 ("true" is a fixed value perhaps depending on the language.) */
12439 /* If first arg is constant true, return it. */
12440 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12441 return fold_convert_loc (loc, type, arg0);
12442 case TRUTH_OR_EXPR:
12443 /* If either arg is constant zero, drop it. */
12444 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12445 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12446 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12447 /* Preserve sequence points. */
12448 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12449 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12450 /* If second arg is constant true, result is true, but we must
12451 evaluate first arg. */
12452 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12453 return omit_one_operand_loc (loc, type, arg1, arg0);
12454 /* Likewise for first arg, but note this only occurs here for
12455 TRUTH_OR_EXPR. */
12456 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12457 return omit_one_operand_loc (loc, type, arg0, arg1);
12459 /* !X || X is always true. */
12460 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12461 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12462 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12463 /* X || !X is always true. */
12464 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12465 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12466 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12468 goto truth_andor;
12470 case TRUTH_XOR_EXPR:
12471 /* If the second arg is constant zero, drop it. */
12472 if (integer_zerop (arg1))
12473 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12474 /* If the second arg is constant true, this is a logical inversion. */
12475 if (integer_onep (arg1))
12477 /* Only call invert_truthvalue if operand is a truth value. */
12478 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12479 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12480 else
12481 tem = invert_truthvalue_loc (loc, arg0);
12482 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12484 /* Identical arguments cancel to zero. */
12485 if (operand_equal_p (arg0, arg1, 0))
12486 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12488 /* !X ^ X is always true. */
12489 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12490 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12491 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12493 /* X ^ !X is always true. */
12494 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12495 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12496 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12498 return NULL_TREE;
12500 case EQ_EXPR:
12501 case NE_EXPR:
12502 tem = fold_comparison (loc, code, type, op0, op1);
12503 if (tem != NULL_TREE)
12504 return tem;
12506 /* bool_var != 0 becomes bool_var. */
12507 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12508 && code == NE_EXPR)
12509 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12511 /* bool_var == 1 becomes bool_var. */
12512 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12513 && code == EQ_EXPR)
12514 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12516 /* bool_var != 1 becomes !bool_var. */
12517 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12518 && code == NE_EXPR)
12519 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12520 fold_convert_loc (loc, type, arg0));
12522 /* bool_var == 0 becomes !bool_var. */
12523 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12524 && code == EQ_EXPR)
12525 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12526 fold_convert_loc (loc, type, arg0));
12528 /* If this is an equality comparison of the address of two non-weak,
12529 unaliased symbols neither of which are extern (since we do not
12530 have access to attributes for externs), then we know the result. */
12531 if (TREE_CODE (arg0) == ADDR_EXPR
12532 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12533 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12534 && ! lookup_attribute ("alias",
12535 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12536 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12537 && TREE_CODE (arg1) == ADDR_EXPR
12538 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12539 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12540 && ! lookup_attribute ("alias",
12541 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12542 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12544 /* We know that we're looking at the address of two
12545 non-weak, unaliased, static _DECL nodes.
12547 It is both wasteful and incorrect to call operand_equal_p
12548 to compare the two ADDR_EXPR nodes. It is wasteful in that
12549 all we need to do is test pointer equality for the arguments
12550 to the two ADDR_EXPR nodes. It is incorrect to use
12551 operand_equal_p as that function is NOT equivalent to a
12552 C equality test. It can in fact return false for two
12553 objects which would test as equal using the C equality
12554 operator. */
12555 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12556 return constant_boolean_node (equal
12557 ? code == EQ_EXPR : code != EQ_EXPR,
12558 type);
12561 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12562 a MINUS_EXPR of a constant, we can convert it into a comparison with
12563 a revised constant as long as no overflow occurs. */
12564 if (TREE_CODE (arg1) == INTEGER_CST
12565 && (TREE_CODE (arg0) == PLUS_EXPR
12566 || TREE_CODE (arg0) == MINUS_EXPR)
12567 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12568 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12569 ? MINUS_EXPR : PLUS_EXPR,
12570 fold_convert_loc (loc, TREE_TYPE (arg0),
12571 arg1),
12572 TREE_OPERAND (arg0, 1), 0))
12573 && !TREE_OVERFLOW (tem))
12574 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12576 /* Similarly for a NEGATE_EXPR. */
12577 if (TREE_CODE (arg0) == NEGATE_EXPR
12578 && TREE_CODE (arg1) == INTEGER_CST
12579 && 0 != (tem = negate_expr (arg1))
12580 && TREE_CODE (tem) == INTEGER_CST
12581 && !TREE_OVERFLOW (tem))
12582 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12584 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12585 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12586 && TREE_CODE (arg1) == INTEGER_CST
12587 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12588 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12589 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12590 fold_convert_loc (loc,
12591 TREE_TYPE (arg0),
12592 arg1),
12593 TREE_OPERAND (arg0, 1)));
12595 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12596 if ((TREE_CODE (arg0) == PLUS_EXPR
12597 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12598 || TREE_CODE (arg0) == MINUS_EXPR)
12599 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12600 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12601 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12603 tree val = TREE_OPERAND (arg0, 1);
12604 return omit_two_operands_loc (loc, type,
12605 fold_build2_loc (loc, code, type,
12606 val,
12607 build_int_cst (TREE_TYPE (val),
12608 0)),
12609 TREE_OPERAND (arg0, 0), arg1);
12612 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12613 if (TREE_CODE (arg0) == MINUS_EXPR
12614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12615 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12616 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12618 return omit_two_operands_loc (loc, type,
12619 code == NE_EXPR
12620 ? boolean_true_node : boolean_false_node,
12621 TREE_OPERAND (arg0, 1), arg1);
12624 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12625 for !=. Don't do this for ordered comparisons due to overflow. */
12626 if (TREE_CODE (arg0) == MINUS_EXPR
12627 && integer_zerop (arg1))
12628 return fold_build2_loc (loc, code, type,
12629 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12631 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12632 if (TREE_CODE (arg0) == ABS_EXPR
12633 && (integer_zerop (arg1) || real_zerop (arg1)))
12634 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12636 /* If this is an EQ or NE comparison with zero and ARG0 is
12637 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12638 two operations, but the latter can be done in one less insn
12639 on machines that have only two-operand insns or on which a
12640 constant cannot be the first operand. */
12641 if (TREE_CODE (arg0) == BIT_AND_EXPR
12642 && integer_zerop (arg1))
12644 tree arg00 = TREE_OPERAND (arg0, 0);
12645 tree arg01 = TREE_OPERAND (arg0, 1);
12646 if (TREE_CODE (arg00) == LSHIFT_EXPR
12647 && integer_onep (TREE_OPERAND (arg00, 0)))
12649 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12650 arg01, TREE_OPERAND (arg00, 1));
12651 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12652 build_int_cst (TREE_TYPE (arg0), 1));
12653 return fold_build2_loc (loc, code, type,
12654 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12655 arg1);
12657 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12658 && integer_onep (TREE_OPERAND (arg01, 0)))
12660 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12661 arg00, TREE_OPERAND (arg01, 1));
12662 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12663 build_int_cst (TREE_TYPE (arg0), 1));
12664 return fold_build2_loc (loc, code, type,
12665 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12666 arg1);
12670 /* If this is an NE or EQ comparison of zero against the result of a
12671 signed MOD operation whose second operand is a power of 2, make
12672 the MOD operation unsigned since it is simpler and equivalent. */
12673 if (integer_zerop (arg1)
12674 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12675 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12676 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12677 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12678 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12679 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12681 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12682 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12683 fold_convert_loc (loc, newtype,
12684 TREE_OPERAND (arg0, 0)),
12685 fold_convert_loc (loc, newtype,
12686 TREE_OPERAND (arg0, 1)));
12688 return fold_build2_loc (loc, code, type, newmod,
12689 fold_convert_loc (loc, newtype, arg1));
12692 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12693 C1 is a valid shift constant, and C2 is a power of two, i.e.
12694 a single bit. */
12695 if (TREE_CODE (arg0) == BIT_AND_EXPR
12696 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12697 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12698 == INTEGER_CST
12699 && integer_pow2p (TREE_OPERAND (arg0, 1))
12700 && integer_zerop (arg1))
12702 tree itype = TREE_TYPE (arg0);
12703 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12704 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12706 /* Check for a valid shift count. */
12707 if (TREE_INT_CST_HIGH (arg001) == 0
12708 && TREE_INT_CST_LOW (arg001) < prec)
12710 tree arg01 = TREE_OPERAND (arg0, 1);
12711 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12712 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12713 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12714 can be rewritten as (X & (C2 << C1)) != 0. */
12715 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12717 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12718 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12719 return fold_build2_loc (loc, code, type, tem, arg1);
12721 /* Otherwise, for signed (arithmetic) shifts,
12722 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12723 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12724 else if (!TYPE_UNSIGNED (itype))
12725 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12726 arg000, build_int_cst (itype, 0));
12727 /* Otherwise, of unsigned (logical) shifts,
12728 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12729 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12730 else
12731 return omit_one_operand_loc (loc, type,
12732 code == EQ_EXPR ? integer_one_node
12733 : integer_zero_node,
12734 arg000);
12738 /* If this is an NE comparison of zero with an AND of one, remove the
12739 comparison since the AND will give the correct value. */
12740 if (code == NE_EXPR
12741 && integer_zerop (arg1)
12742 && TREE_CODE (arg0) == BIT_AND_EXPR
12743 && integer_onep (TREE_OPERAND (arg0, 1)))
12744 return fold_convert_loc (loc, type, arg0);
12746 /* If we have (A & C) == C where C is a power of 2, convert this into
12747 (A & C) != 0. Similarly for NE_EXPR. */
12748 if (TREE_CODE (arg0) == BIT_AND_EXPR
12749 && integer_pow2p (TREE_OPERAND (arg0, 1))
12750 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12751 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12752 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12753 integer_zero_node));
12755 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12756 bit, then fold the expression into A < 0 or A >= 0. */
12757 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12758 if (tem)
12759 return tem;
12761 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12762 Similarly for NE_EXPR. */
12763 if (TREE_CODE (arg0) == BIT_AND_EXPR
12764 && TREE_CODE (arg1) == INTEGER_CST
12765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12767 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12768 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12769 TREE_OPERAND (arg0, 1));
12770 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12771 arg1, notc);
12772 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12773 if (integer_nonzerop (dandnotc))
12774 return omit_one_operand_loc (loc, type, rslt, arg0);
12777 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12778 Similarly for NE_EXPR. */
12779 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12780 && TREE_CODE (arg1) == INTEGER_CST
12781 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12783 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12784 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12785 TREE_OPERAND (arg0, 1), notd);
12786 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12787 if (integer_nonzerop (candnotd))
12788 return omit_one_operand_loc (loc, type, rslt, arg0);
12791 /* If this is a comparison of a field, we may be able to simplify it. */
12792 if ((TREE_CODE (arg0) == COMPONENT_REF
12793 || TREE_CODE (arg0) == BIT_FIELD_REF)
12794 /* Handle the constant case even without -O
12795 to make sure the warnings are given. */
12796 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12798 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12799 if (t1)
12800 return t1;
12803 /* Optimize comparisons of strlen vs zero to a compare of the
12804 first character of the string vs zero. To wit,
12805 strlen(ptr) == 0 => *ptr == 0
12806 strlen(ptr) != 0 => *ptr != 0
12807 Other cases should reduce to one of these two (or a constant)
12808 due to the return value of strlen being unsigned. */
12809 if (TREE_CODE (arg0) == CALL_EXPR
12810 && integer_zerop (arg1))
12812 tree fndecl = get_callee_fndecl (arg0);
12814 if (fndecl
12815 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12816 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12817 && call_expr_nargs (arg0) == 1
12818 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12820 tree iref = build_fold_indirect_ref_loc (loc,
12821 CALL_EXPR_ARG (arg0, 0));
12822 return fold_build2_loc (loc, code, type, iref,
12823 build_int_cst (TREE_TYPE (iref), 0));
12827 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12828 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12829 if (TREE_CODE (arg0) == RSHIFT_EXPR
12830 && integer_zerop (arg1)
12831 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12833 tree arg00 = TREE_OPERAND (arg0, 0);
12834 tree arg01 = TREE_OPERAND (arg0, 1);
12835 tree itype = TREE_TYPE (arg00);
12836 if (TREE_INT_CST_HIGH (arg01) == 0
12837 && TREE_INT_CST_LOW (arg01)
12838 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12840 if (TYPE_UNSIGNED (itype))
12842 itype = signed_type_for (itype);
12843 arg00 = fold_convert_loc (loc, itype, arg00);
12845 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12846 type, arg00, build_int_cst (itype, 0));
12850 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12851 if (integer_zerop (arg1)
12852 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12853 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12854 TREE_OPERAND (arg0, 1));
12856 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12857 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12858 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12859 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12860 build_int_cst (TREE_TYPE (arg1), 0));
12861 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12862 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12863 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12864 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12865 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12866 build_int_cst (TREE_TYPE (arg1), 0));
12868 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12869 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12870 && TREE_CODE (arg1) == INTEGER_CST
12871 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12872 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12873 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12874 TREE_OPERAND (arg0, 1), arg1));
12876 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12877 (X & C) == 0 when C is a single bit. */
12878 if (TREE_CODE (arg0) == BIT_AND_EXPR
12879 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12880 && integer_zerop (arg1)
12881 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12883 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12884 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12885 TREE_OPERAND (arg0, 1));
12886 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12887 type, tem, arg1);
12890 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12891 constant C is a power of two, i.e. a single bit. */
12892 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12894 && integer_zerop (arg1)
12895 && integer_pow2p (TREE_OPERAND (arg0, 1))
12896 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12897 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12899 tree arg00 = TREE_OPERAND (arg0, 0);
12900 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12901 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12904 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12905 when is C is a power of two, i.e. a single bit. */
12906 if (TREE_CODE (arg0) == BIT_AND_EXPR
12907 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12908 && integer_zerop (arg1)
12909 && integer_pow2p (TREE_OPERAND (arg0, 1))
12910 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12911 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12913 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12914 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12915 arg000, TREE_OPERAND (arg0, 1));
12916 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12917 tem, build_int_cst (TREE_TYPE (tem), 0));
12920 if (integer_zerop (arg1)
12921 && tree_expr_nonzero_p (arg0))
12923 tree res = constant_boolean_node (code==NE_EXPR, type);
12924 return omit_one_operand_loc (loc, type, res, arg0);
12927 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12928 if (TREE_CODE (arg0) == NEGATE_EXPR
12929 && TREE_CODE (arg1) == NEGATE_EXPR)
12930 return fold_build2_loc (loc, code, type,
12931 TREE_OPERAND (arg0, 0),
12932 TREE_OPERAND (arg1, 0));
12934 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12935 if (TREE_CODE (arg0) == BIT_AND_EXPR
12936 && TREE_CODE (arg1) == BIT_AND_EXPR)
12938 tree arg00 = TREE_OPERAND (arg0, 0);
12939 tree arg01 = TREE_OPERAND (arg0, 1);
12940 tree arg10 = TREE_OPERAND (arg1, 0);
12941 tree arg11 = TREE_OPERAND (arg1, 1);
12942 tree itype = TREE_TYPE (arg0);
12944 if (operand_equal_p (arg01, arg11, 0))
12945 return fold_build2_loc (loc, code, type,
12946 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12947 fold_build2_loc (loc,
12948 BIT_XOR_EXPR, itype,
12949 arg00, arg10),
12950 arg01),
12951 build_int_cst (itype, 0));
12953 if (operand_equal_p (arg01, arg10, 0))
12954 return fold_build2_loc (loc, code, type,
12955 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12956 fold_build2_loc (loc,
12957 BIT_XOR_EXPR, itype,
12958 arg00, arg11),
12959 arg01),
12960 build_int_cst (itype, 0));
12962 if (operand_equal_p (arg00, arg11, 0))
12963 return fold_build2_loc (loc, code, type,
12964 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12965 fold_build2_loc (loc,
12966 BIT_XOR_EXPR, itype,
12967 arg01, arg10),
12968 arg00),
12969 build_int_cst (itype, 0));
12971 if (operand_equal_p (arg00, arg10, 0))
12972 return fold_build2_loc (loc, code, type,
12973 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12974 fold_build2_loc (loc,
12975 BIT_XOR_EXPR, itype,
12976 arg01, arg11),
12977 arg00),
12978 build_int_cst (itype, 0));
12981 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12982 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12984 tree arg00 = TREE_OPERAND (arg0, 0);
12985 tree arg01 = TREE_OPERAND (arg0, 1);
12986 tree arg10 = TREE_OPERAND (arg1, 0);
12987 tree arg11 = TREE_OPERAND (arg1, 1);
12988 tree itype = TREE_TYPE (arg0);
12990 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12991 operand_equal_p guarantees no side-effects so we don't need
12992 to use omit_one_operand on Z. */
12993 if (operand_equal_p (arg01, arg11, 0))
12994 return fold_build2_loc (loc, code, type, arg00, arg10);
12995 if (operand_equal_p (arg01, arg10, 0))
12996 return fold_build2_loc (loc, code, type, arg00, arg11);
12997 if (operand_equal_p (arg00, arg11, 0))
12998 return fold_build2_loc (loc, code, type, arg01, arg10);
12999 if (operand_equal_p (arg00, arg10, 0))
13000 return fold_build2_loc (loc, code, type, arg01, arg11);
13002 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13003 if (TREE_CODE (arg01) == INTEGER_CST
13004 && TREE_CODE (arg11) == INTEGER_CST)
13005 return fold_build2_loc (loc, code, type,
13006 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13007 fold_build2_loc (loc,
13008 BIT_XOR_EXPR, itype,
13009 arg01, arg11)),
13010 arg10);
13013 /* Attempt to simplify equality/inequality comparisons of complex
13014 values. Only lower the comparison if the result is known or
13015 can be simplified to a single scalar comparison. */
13016 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13017 || TREE_CODE (arg0) == COMPLEX_CST)
13018 && (TREE_CODE (arg1) == COMPLEX_EXPR
13019 || TREE_CODE (arg1) == COMPLEX_CST))
13021 tree real0, imag0, real1, imag1;
13022 tree rcond, icond;
13024 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13026 real0 = TREE_OPERAND (arg0, 0);
13027 imag0 = TREE_OPERAND (arg0, 1);
13029 else
13031 real0 = TREE_REALPART (arg0);
13032 imag0 = TREE_IMAGPART (arg0);
13035 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13037 real1 = TREE_OPERAND (arg1, 0);
13038 imag1 = TREE_OPERAND (arg1, 1);
13040 else
13042 real1 = TREE_REALPART (arg1);
13043 imag1 = TREE_IMAGPART (arg1);
13046 rcond = fold_binary_loc (loc, code, type, real0, real1);
13047 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13049 if (integer_zerop (rcond))
13051 if (code == EQ_EXPR)
13052 return omit_two_operands_loc (loc, type, boolean_false_node,
13053 imag0, imag1);
13054 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13056 else
13058 if (code == NE_EXPR)
13059 return omit_two_operands_loc (loc, type, boolean_true_node,
13060 imag0, imag1);
13061 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13065 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13066 if (icond && TREE_CODE (icond) == INTEGER_CST)
13068 if (integer_zerop (icond))
13070 if (code == EQ_EXPR)
13071 return omit_two_operands_loc (loc, type, boolean_false_node,
13072 real0, real1);
13073 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13075 else
13077 if (code == NE_EXPR)
13078 return omit_two_operands_loc (loc, type, boolean_true_node,
13079 real0, real1);
13080 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13085 return NULL_TREE;
13087 case LT_EXPR:
13088 case GT_EXPR:
13089 case LE_EXPR:
13090 case GE_EXPR:
13091 tem = fold_comparison (loc, code, type, op0, op1);
13092 if (tem != NULL_TREE)
13093 return tem;
13095 /* Transform comparisons of the form X +- C CMP X. */
13096 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13098 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13099 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13100 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13101 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13103 tree arg01 = TREE_OPERAND (arg0, 1);
13104 enum tree_code code0 = TREE_CODE (arg0);
13105 int is_positive;
13107 if (TREE_CODE (arg01) == REAL_CST)
13108 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13109 else
13110 is_positive = tree_int_cst_sgn (arg01);
13112 /* (X - c) > X becomes false. */
13113 if (code == GT_EXPR
13114 && ((code0 == MINUS_EXPR && is_positive >= 0)
13115 || (code0 == PLUS_EXPR && is_positive <= 0)))
13117 if (TREE_CODE (arg01) == INTEGER_CST
13118 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13119 fold_overflow_warning (("assuming signed overflow does not "
13120 "occur when assuming that (X - c) > X "
13121 "is always false"),
13122 WARN_STRICT_OVERFLOW_ALL);
13123 return constant_boolean_node (0, type);
13126 /* Likewise (X + c) < X becomes false. */
13127 if (code == LT_EXPR
13128 && ((code0 == PLUS_EXPR && is_positive >= 0)
13129 || (code0 == MINUS_EXPR && is_positive <= 0)))
13131 if (TREE_CODE (arg01) == INTEGER_CST
13132 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13133 fold_overflow_warning (("assuming signed overflow does not "
13134 "occur when assuming that "
13135 "(X + c) < X is always false"),
13136 WARN_STRICT_OVERFLOW_ALL);
13137 return constant_boolean_node (0, type);
13140 /* Convert (X - c) <= X to true. */
13141 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13142 && code == LE_EXPR
13143 && ((code0 == MINUS_EXPR && is_positive >= 0)
13144 || (code0 == PLUS_EXPR && is_positive <= 0)))
13146 if (TREE_CODE (arg01) == INTEGER_CST
13147 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13148 fold_overflow_warning (("assuming signed overflow does not "
13149 "occur when assuming that "
13150 "(X - c) <= X is always true"),
13151 WARN_STRICT_OVERFLOW_ALL);
13152 return constant_boolean_node (1, type);
13155 /* Convert (X + c) >= X to true. */
13156 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13157 && code == GE_EXPR
13158 && ((code0 == PLUS_EXPR && is_positive >= 0)
13159 || (code0 == MINUS_EXPR && is_positive <= 0)))
13161 if (TREE_CODE (arg01) == INTEGER_CST
13162 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13163 fold_overflow_warning (("assuming signed overflow does not "
13164 "occur when assuming that "
13165 "(X + c) >= X is always true"),
13166 WARN_STRICT_OVERFLOW_ALL);
13167 return constant_boolean_node (1, type);
13170 if (TREE_CODE (arg01) == INTEGER_CST)
13172 /* Convert X + c > X and X - c < X to true for integers. */
13173 if (code == GT_EXPR
13174 && ((code0 == PLUS_EXPR && is_positive > 0)
13175 || (code0 == MINUS_EXPR && is_positive < 0)))
13177 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13178 fold_overflow_warning (("assuming signed overflow does "
13179 "not occur when assuming that "
13180 "(X + c) > X is always true"),
13181 WARN_STRICT_OVERFLOW_ALL);
13182 return constant_boolean_node (1, type);
13185 if (code == LT_EXPR
13186 && ((code0 == MINUS_EXPR && is_positive > 0)
13187 || (code0 == PLUS_EXPR && is_positive < 0)))
13189 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13190 fold_overflow_warning (("assuming signed overflow does "
13191 "not occur when assuming that "
13192 "(X - c) < X is always true"),
13193 WARN_STRICT_OVERFLOW_ALL);
13194 return constant_boolean_node (1, type);
13197 /* Convert X + c <= X and X - c >= X to false for integers. */
13198 if (code == LE_EXPR
13199 && ((code0 == PLUS_EXPR && is_positive > 0)
13200 || (code0 == MINUS_EXPR && is_positive < 0)))
13202 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13203 fold_overflow_warning (("assuming signed overflow does "
13204 "not occur when assuming that "
13205 "(X + c) <= X is always false"),
13206 WARN_STRICT_OVERFLOW_ALL);
13207 return constant_boolean_node (0, type);
13210 if (code == GE_EXPR
13211 && ((code0 == MINUS_EXPR && is_positive > 0)
13212 || (code0 == PLUS_EXPR && is_positive < 0)))
13214 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13215 fold_overflow_warning (("assuming signed overflow does "
13216 "not occur when assuming that "
13217 "(X - c) >= X is always false"),
13218 WARN_STRICT_OVERFLOW_ALL);
13219 return constant_boolean_node (0, type);
13224 /* Comparisons with the highest or lowest possible integer of
13225 the specified precision will have known values. */
13227 tree arg1_type = TREE_TYPE (arg1);
13228 unsigned int width = TYPE_PRECISION (arg1_type);
13230 if (TREE_CODE (arg1) == INTEGER_CST
13231 && width <= 2 * HOST_BITS_PER_WIDE_INT
13232 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13234 HOST_WIDE_INT signed_max_hi;
13235 unsigned HOST_WIDE_INT signed_max_lo;
13236 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13238 if (width <= HOST_BITS_PER_WIDE_INT)
13240 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13241 - 1;
13242 signed_max_hi = 0;
13243 max_hi = 0;
13245 if (TYPE_UNSIGNED (arg1_type))
13247 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13248 min_lo = 0;
13249 min_hi = 0;
13251 else
13253 max_lo = signed_max_lo;
13254 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13255 min_hi = -1;
13258 else
13260 width -= HOST_BITS_PER_WIDE_INT;
13261 signed_max_lo = -1;
13262 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13263 - 1;
13264 max_lo = -1;
13265 min_lo = 0;
13267 if (TYPE_UNSIGNED (arg1_type))
13269 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13270 min_hi = 0;
13272 else
13274 max_hi = signed_max_hi;
13275 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13279 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13280 && TREE_INT_CST_LOW (arg1) == max_lo)
13281 switch (code)
13283 case GT_EXPR:
13284 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13286 case GE_EXPR:
13287 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13289 case LE_EXPR:
13290 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13292 case LT_EXPR:
13293 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13295 /* The GE_EXPR and LT_EXPR cases above are not normally
13296 reached because of previous transformations. */
13298 default:
13299 break;
13301 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13302 == max_hi
13303 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13304 switch (code)
13306 case GT_EXPR:
13307 arg1 = const_binop (PLUS_EXPR, arg1,
13308 build_int_cst (TREE_TYPE (arg1), 1), 0);
13309 return fold_build2_loc (loc, EQ_EXPR, type,
13310 fold_convert_loc (loc,
13311 TREE_TYPE (arg1), arg0),
13312 arg1);
13313 case LE_EXPR:
13314 arg1 = const_binop (PLUS_EXPR, arg1,
13315 build_int_cst (TREE_TYPE (arg1), 1), 0);
13316 return fold_build2_loc (loc, NE_EXPR, type,
13317 fold_convert_loc (loc, TREE_TYPE (arg1),
13318 arg0),
13319 arg1);
13320 default:
13321 break;
13323 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13324 == min_hi
13325 && TREE_INT_CST_LOW (arg1) == min_lo)
13326 switch (code)
13328 case LT_EXPR:
13329 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13331 case LE_EXPR:
13332 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13334 case GE_EXPR:
13335 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13337 case GT_EXPR:
13338 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13340 default:
13341 break;
13343 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13344 == min_hi
13345 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13346 switch (code)
13348 case GE_EXPR:
13349 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13350 return fold_build2_loc (loc, NE_EXPR, type,
13351 fold_convert_loc (loc,
13352 TREE_TYPE (arg1), arg0),
13353 arg1);
13354 case LT_EXPR:
13355 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13356 return fold_build2_loc (loc, EQ_EXPR, type,
13357 fold_convert_loc (loc, TREE_TYPE (arg1),
13358 arg0),
13359 arg1);
13360 default:
13361 break;
13364 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13365 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13366 && TYPE_UNSIGNED (arg1_type)
13367 /* We will flip the signedness of the comparison operator
13368 associated with the mode of arg1, so the sign bit is
13369 specified by this mode. Check that arg1 is the signed
13370 max associated with this sign bit. */
13371 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13372 /* signed_type does not work on pointer types. */
13373 && INTEGRAL_TYPE_P (arg1_type))
13375 /* The following case also applies to X < signed_max+1
13376 and X >= signed_max+1 because previous transformations. */
13377 if (code == LE_EXPR || code == GT_EXPR)
13379 tree st;
13380 st = signed_type_for (TREE_TYPE (arg1));
13381 return fold_build2_loc (loc,
13382 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13383 type, fold_convert_loc (loc, st, arg0),
13384 build_int_cst (st, 0));
13390 /* If we are comparing an ABS_EXPR with a constant, we can
13391 convert all the cases into explicit comparisons, but they may
13392 well not be faster than doing the ABS and one comparison.
13393 But ABS (X) <= C is a range comparison, which becomes a subtraction
13394 and a comparison, and is probably faster. */
13395 if (code == LE_EXPR
13396 && TREE_CODE (arg1) == INTEGER_CST
13397 && TREE_CODE (arg0) == ABS_EXPR
13398 && ! TREE_SIDE_EFFECTS (arg0)
13399 && (0 != (tem = negate_expr (arg1)))
13400 && TREE_CODE (tem) == INTEGER_CST
13401 && !TREE_OVERFLOW (tem))
13402 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13403 build2 (GE_EXPR, type,
13404 TREE_OPERAND (arg0, 0), tem),
13405 build2 (LE_EXPR, type,
13406 TREE_OPERAND (arg0, 0), arg1));
13408 /* Convert ABS_EXPR<x> >= 0 to true. */
13409 strict_overflow_p = false;
13410 if (code == GE_EXPR
13411 && (integer_zerop (arg1)
13412 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13413 && real_zerop (arg1)))
13414 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13416 if (strict_overflow_p)
13417 fold_overflow_warning (("assuming signed overflow does not occur "
13418 "when simplifying comparison of "
13419 "absolute value and zero"),
13420 WARN_STRICT_OVERFLOW_CONDITIONAL);
13421 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13424 /* Convert ABS_EXPR<x> < 0 to false. */
13425 strict_overflow_p = false;
13426 if (code == LT_EXPR
13427 && (integer_zerop (arg1) || real_zerop (arg1))
13428 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13430 if (strict_overflow_p)
13431 fold_overflow_warning (("assuming signed overflow does not occur "
13432 "when simplifying comparison of "
13433 "absolute value and zero"),
13434 WARN_STRICT_OVERFLOW_CONDITIONAL);
13435 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13438 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13439 and similarly for >= into !=. */
13440 if ((code == LT_EXPR || code == GE_EXPR)
13441 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13442 && TREE_CODE (arg1) == LSHIFT_EXPR
13443 && integer_onep (TREE_OPERAND (arg1, 0)))
13445 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13446 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13447 TREE_OPERAND (arg1, 1)),
13448 build_int_cst (TREE_TYPE (arg0), 0));
13449 goto fold_binary_exit;
13452 if ((code == LT_EXPR || code == GE_EXPR)
13453 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13454 && CONVERT_EXPR_P (arg1)
13455 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13456 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13458 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13459 fold_convert_loc (loc, TREE_TYPE (arg0),
13460 build2 (RSHIFT_EXPR,
13461 TREE_TYPE (arg0), arg0,
13462 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13463 1))),
13464 build_int_cst (TREE_TYPE (arg0), 0));
13465 goto fold_binary_exit;
13468 return NULL_TREE;
13470 case UNORDERED_EXPR:
13471 case ORDERED_EXPR:
13472 case UNLT_EXPR:
13473 case UNLE_EXPR:
13474 case UNGT_EXPR:
13475 case UNGE_EXPR:
13476 case UNEQ_EXPR:
13477 case LTGT_EXPR:
13478 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13480 t1 = fold_relational_const (code, type, arg0, arg1);
13481 if (t1 != NULL_TREE)
13482 return t1;
13485 /* If the first operand is NaN, the result is constant. */
13486 if (TREE_CODE (arg0) == REAL_CST
13487 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13488 && (code != LTGT_EXPR || ! flag_trapping_math))
13490 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13491 ? integer_zero_node
13492 : integer_one_node;
13493 return omit_one_operand_loc (loc, type, t1, arg1);
13496 /* If the second operand is NaN, the result is constant. */
13497 if (TREE_CODE (arg1) == REAL_CST
13498 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13499 && (code != LTGT_EXPR || ! flag_trapping_math))
13501 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13502 ? integer_zero_node
13503 : integer_one_node;
13504 return omit_one_operand_loc (loc, type, t1, arg0);
13507 /* Simplify unordered comparison of something with itself. */
13508 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13509 && operand_equal_p (arg0, arg1, 0))
13510 return constant_boolean_node (1, type);
13512 if (code == LTGT_EXPR
13513 && !flag_trapping_math
13514 && operand_equal_p (arg0, arg1, 0))
13515 return constant_boolean_node (0, type);
13517 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13519 tree targ0 = strip_float_extensions (arg0);
13520 tree targ1 = strip_float_extensions (arg1);
13521 tree newtype = TREE_TYPE (targ0);
13523 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13524 newtype = TREE_TYPE (targ1);
13526 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13527 return fold_build2_loc (loc, code, type,
13528 fold_convert_loc (loc, newtype, targ0),
13529 fold_convert_loc (loc, newtype, targ1));
13532 return NULL_TREE;
13534 case COMPOUND_EXPR:
13535 /* When pedantic, a compound expression can be neither an lvalue
13536 nor an integer constant expression. */
13537 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13538 return NULL_TREE;
13539 /* Don't let (0, 0) be null pointer constant. */
13540 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13541 : fold_convert_loc (loc, type, arg1);
13542 return pedantic_non_lvalue_loc (loc, tem);
13544 case COMPLEX_EXPR:
13545 if ((TREE_CODE (arg0) == REAL_CST
13546 && TREE_CODE (arg1) == REAL_CST)
13547 || (TREE_CODE (arg0) == INTEGER_CST
13548 && TREE_CODE (arg1) == INTEGER_CST))
13549 return build_complex (type, arg0, arg1);
13550 return NULL_TREE;
13552 case ASSERT_EXPR:
13553 /* An ASSERT_EXPR should never be passed to fold_binary. */
13554 gcc_unreachable ();
13556 default:
13557 return NULL_TREE;
13558 } /* switch (code) */
13559 fold_binary_exit:
13560 protected_set_expr_location (tem, loc);
13561 return tem;
13564 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13565 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13566 of GOTO_EXPR. */
13568 static tree
13569 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13571 switch (TREE_CODE (*tp))
13573 case LABEL_EXPR:
13574 return *tp;
13576 case GOTO_EXPR:
13577 *walk_subtrees = 0;
13579 /* ... fall through ... */
13581 default:
13582 return NULL_TREE;
13586 /* Return whether the sub-tree ST contains a label which is accessible from
13587 outside the sub-tree. */
13589 static bool
13590 contains_label_p (tree st)
13592 return
13593 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13596 /* Fold a ternary expression of code CODE and type TYPE with operands
13597 OP0, OP1, and OP2. Return the folded expression if folding is
13598 successful. Otherwise, return NULL_TREE. */
13600 tree
13601 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13602 tree op0, tree op1, tree op2)
13604 tree tem;
13605 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13606 enum tree_code_class kind = TREE_CODE_CLASS (code);
13608 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13609 && TREE_CODE_LENGTH (code) == 3);
13611 /* Strip any conversions that don't change the mode. This is safe
13612 for every expression, except for a comparison expression because
13613 its signedness is derived from its operands. So, in the latter
13614 case, only strip conversions that don't change the signedness.
13616 Note that this is done as an internal manipulation within the
13617 constant folder, in order to find the simplest representation of
13618 the arguments so that their form can be studied. In any cases,
13619 the appropriate type conversions should be put back in the tree
13620 that will get out of the constant folder. */
13621 if (op0)
13623 arg0 = op0;
13624 STRIP_NOPS (arg0);
13627 if (op1)
13629 arg1 = op1;
13630 STRIP_NOPS (arg1);
13633 switch (code)
13635 case COMPONENT_REF:
13636 if (TREE_CODE (arg0) == CONSTRUCTOR
13637 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13639 unsigned HOST_WIDE_INT idx;
13640 tree field, value;
13641 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13642 if (field == arg1)
13643 return value;
13645 return NULL_TREE;
13647 case COND_EXPR:
13648 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13649 so all simple results must be passed through pedantic_non_lvalue. */
13650 if (TREE_CODE (arg0) == INTEGER_CST)
13652 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13653 tem = integer_zerop (arg0) ? op2 : op1;
13654 /* Only optimize constant conditions when the selected branch
13655 has the same type as the COND_EXPR. This avoids optimizing
13656 away "c ? x : throw", where the throw has a void type.
13657 Avoid throwing away that operand which contains label. */
13658 if ((!TREE_SIDE_EFFECTS (unused_op)
13659 || !contains_label_p (unused_op))
13660 && (! VOID_TYPE_P (TREE_TYPE (tem))
13661 || VOID_TYPE_P (type)))
13662 return pedantic_non_lvalue_loc (loc, tem);
13663 return NULL_TREE;
13665 if (operand_equal_p (arg1, op2, 0))
13666 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13668 /* If we have A op B ? A : C, we may be able to convert this to a
13669 simpler expression, depending on the operation and the values
13670 of B and C. Signed zeros prevent all of these transformations,
13671 for reasons given above each one.
13673 Also try swapping the arguments and inverting the conditional. */
13674 if (COMPARISON_CLASS_P (arg0)
13675 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13676 arg1, TREE_OPERAND (arg0, 1))
13677 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13679 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13680 if (tem)
13681 return tem;
13684 if (COMPARISON_CLASS_P (arg0)
13685 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13686 op2,
13687 TREE_OPERAND (arg0, 1))
13688 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13690 tem = fold_truth_not_expr (loc, arg0);
13691 if (tem && COMPARISON_CLASS_P (tem))
13693 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13694 if (tem)
13695 return tem;
13699 /* If the second operand is simpler than the third, swap them
13700 since that produces better jump optimization results. */
13701 if (truth_value_p (TREE_CODE (arg0))
13702 && tree_swap_operands_p (op1, op2, false))
13704 /* See if this can be inverted. If it can't, possibly because
13705 it was a floating-point inequality comparison, don't do
13706 anything. */
13707 tem = fold_truth_not_expr (loc, arg0);
13708 if (tem)
13709 return fold_build3_loc (loc, code, type, tem, op2, op1);
13712 /* Convert A ? 1 : 0 to simply A. */
13713 if (integer_onep (op1)
13714 && integer_zerop (op2)
13715 /* If we try to convert OP0 to our type, the
13716 call to fold will try to move the conversion inside
13717 a COND, which will recurse. In that case, the COND_EXPR
13718 is probably the best choice, so leave it alone. */
13719 && type == TREE_TYPE (arg0))
13720 return pedantic_non_lvalue_loc (loc, arg0);
13722 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13723 over COND_EXPR in cases such as floating point comparisons. */
13724 if (integer_zerop (op1)
13725 && integer_onep (op2)
13726 && truth_value_p (TREE_CODE (arg0)))
13727 return pedantic_non_lvalue_loc (loc,
13728 fold_convert_loc (loc, type,
13729 invert_truthvalue_loc (loc,
13730 arg0)));
13732 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13733 if (TREE_CODE (arg0) == LT_EXPR
13734 && integer_zerop (TREE_OPERAND (arg0, 1))
13735 && integer_zerop (op2)
13736 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13738 /* sign_bit_p only checks ARG1 bits within A's precision.
13739 If <sign bit of A> has wider type than A, bits outside
13740 of A's precision in <sign bit of A> need to be checked.
13741 If they are all 0, this optimization needs to be done
13742 in unsigned A's type, if they are all 1 in signed A's type,
13743 otherwise this can't be done. */
13744 if (TYPE_PRECISION (TREE_TYPE (tem))
13745 < TYPE_PRECISION (TREE_TYPE (arg1))
13746 && TYPE_PRECISION (TREE_TYPE (tem))
13747 < TYPE_PRECISION (type))
13749 unsigned HOST_WIDE_INT mask_lo;
13750 HOST_WIDE_INT mask_hi;
13751 int inner_width, outer_width;
13752 tree tem_type;
13754 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13755 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13756 if (outer_width > TYPE_PRECISION (type))
13757 outer_width = TYPE_PRECISION (type);
13759 if (outer_width > HOST_BITS_PER_WIDE_INT)
13761 mask_hi = ((unsigned HOST_WIDE_INT) -1
13762 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13763 mask_lo = -1;
13765 else
13767 mask_hi = 0;
13768 mask_lo = ((unsigned HOST_WIDE_INT) -1
13769 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13771 if (inner_width > HOST_BITS_PER_WIDE_INT)
13773 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13774 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13775 mask_lo = 0;
13777 else
13778 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13779 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13781 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13782 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13784 tem_type = signed_type_for (TREE_TYPE (tem));
13785 tem = fold_convert_loc (loc, tem_type, tem);
13787 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13788 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13790 tem_type = unsigned_type_for (TREE_TYPE (tem));
13791 tem = fold_convert_loc (loc, tem_type, tem);
13793 else
13794 tem = NULL;
13797 if (tem)
13798 return
13799 fold_convert_loc (loc, type,
13800 fold_build2_loc (loc, BIT_AND_EXPR,
13801 TREE_TYPE (tem), tem,
13802 fold_convert_loc (loc,
13803 TREE_TYPE (tem),
13804 arg1)));
13807 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13808 already handled above. */
13809 if (TREE_CODE (arg0) == BIT_AND_EXPR
13810 && integer_onep (TREE_OPERAND (arg0, 1))
13811 && integer_zerop (op2)
13812 && integer_pow2p (arg1))
13814 tree tem = TREE_OPERAND (arg0, 0);
13815 STRIP_NOPS (tem);
13816 if (TREE_CODE (tem) == RSHIFT_EXPR
13817 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13818 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13819 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13820 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13821 TREE_OPERAND (tem, 0), arg1);
13824 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13825 is probably obsolete because the first operand should be a
13826 truth value (that's why we have the two cases above), but let's
13827 leave it in until we can confirm this for all front-ends. */
13828 if (integer_zerop (op2)
13829 && TREE_CODE (arg0) == NE_EXPR
13830 && integer_zerop (TREE_OPERAND (arg0, 1))
13831 && integer_pow2p (arg1)
13832 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13833 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13834 arg1, OEP_ONLY_CONST))
13835 return pedantic_non_lvalue_loc (loc,
13836 fold_convert_loc (loc, type,
13837 TREE_OPERAND (arg0, 0)));
13839 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13840 if (integer_zerop (op2)
13841 && truth_value_p (TREE_CODE (arg0))
13842 && truth_value_p (TREE_CODE (arg1)))
13843 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13844 fold_convert_loc (loc, type, arg0),
13845 arg1);
13847 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13848 if (integer_onep (op2)
13849 && truth_value_p (TREE_CODE (arg0))
13850 && truth_value_p (TREE_CODE (arg1)))
13852 /* Only perform transformation if ARG0 is easily inverted. */
13853 tem = fold_truth_not_expr (loc, arg0);
13854 if (tem)
13855 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13856 fold_convert_loc (loc, type, tem),
13857 arg1);
13860 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13861 if (integer_zerop (arg1)
13862 && truth_value_p (TREE_CODE (arg0))
13863 && truth_value_p (TREE_CODE (op2)))
13865 /* Only perform transformation if ARG0 is easily inverted. */
13866 tem = fold_truth_not_expr (loc, arg0);
13867 if (tem)
13868 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13869 fold_convert_loc (loc, type, tem),
13870 op2);
13873 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13874 if (integer_onep (arg1)
13875 && truth_value_p (TREE_CODE (arg0))
13876 && truth_value_p (TREE_CODE (op2)))
13877 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13878 fold_convert_loc (loc, type, arg0),
13879 op2);
13881 return NULL_TREE;
13883 case CALL_EXPR:
13884 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13885 of fold_ternary on them. */
13886 gcc_unreachable ();
13888 case BIT_FIELD_REF:
13889 if ((TREE_CODE (arg0) == VECTOR_CST
13890 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13891 && type == TREE_TYPE (TREE_TYPE (arg0)))
13893 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13894 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13896 if (width != 0
13897 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13898 && (idx % width) == 0
13899 && (idx = idx / width)
13900 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13902 tree elements = NULL_TREE;
13904 if (TREE_CODE (arg0) == VECTOR_CST)
13905 elements = TREE_VECTOR_CST_ELTS (arg0);
13906 else
13908 unsigned HOST_WIDE_INT idx;
13909 tree value;
13911 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13912 elements = tree_cons (NULL_TREE, value, elements);
13914 while (idx-- > 0 && elements)
13915 elements = TREE_CHAIN (elements);
13916 if (elements)
13917 return TREE_VALUE (elements);
13918 else
13919 return fold_convert_loc (loc, type, integer_zero_node);
13923 /* A bit-field-ref that referenced the full argument can be stripped. */
13924 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13925 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13926 && integer_zerop (op2))
13927 return fold_convert_loc (loc, type, arg0);
13929 return NULL_TREE;
13931 default:
13932 return NULL_TREE;
13933 } /* switch (code) */
13936 /* Perform constant folding and related simplification of EXPR.
13937 The related simplifications include x*1 => x, x*0 => 0, etc.,
13938 and application of the associative law.
13939 NOP_EXPR conversions may be removed freely (as long as we
13940 are careful not to change the type of the overall expression).
13941 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13942 but we can constant-fold them if they have constant operands. */
13944 #ifdef ENABLE_FOLD_CHECKING
13945 # define fold(x) fold_1 (x)
13946 static tree fold_1 (tree);
13947 static
13948 #endif
13949 tree
13950 fold (tree expr)
13952 const tree t = expr;
13953 enum tree_code code = TREE_CODE (t);
13954 enum tree_code_class kind = TREE_CODE_CLASS (code);
13955 tree tem;
13956 location_t loc = EXPR_LOCATION (expr);
13958 /* Return right away if a constant. */
13959 if (kind == tcc_constant)
13960 return t;
13962 /* CALL_EXPR-like objects with variable numbers of operands are
13963 treated specially. */
13964 if (kind == tcc_vl_exp)
13966 if (code == CALL_EXPR)
13968 tem = fold_call_expr (loc, expr, false);
13969 return tem ? tem : expr;
13971 return expr;
13974 if (IS_EXPR_CODE_CLASS (kind))
13976 tree type = TREE_TYPE (t);
13977 tree op0, op1, op2;
13979 switch (TREE_CODE_LENGTH (code))
13981 case 1:
13982 op0 = TREE_OPERAND (t, 0);
13983 tem = fold_unary_loc (loc, code, type, op0);
13984 return tem ? tem : expr;
13985 case 2:
13986 op0 = TREE_OPERAND (t, 0);
13987 op1 = TREE_OPERAND (t, 1);
13988 tem = fold_binary_loc (loc, code, type, op0, op1);
13989 return tem ? tem : expr;
13990 case 3:
13991 op0 = TREE_OPERAND (t, 0);
13992 op1 = TREE_OPERAND (t, 1);
13993 op2 = TREE_OPERAND (t, 2);
13994 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13995 return tem ? tem : expr;
13996 default:
13997 break;
14001 switch (code)
14003 case ARRAY_REF:
14005 tree op0 = TREE_OPERAND (t, 0);
14006 tree op1 = TREE_OPERAND (t, 1);
14008 if (TREE_CODE (op1) == INTEGER_CST
14009 && TREE_CODE (op0) == CONSTRUCTOR
14010 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14012 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14013 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14014 unsigned HOST_WIDE_INT begin = 0;
14016 /* Find a matching index by means of a binary search. */
14017 while (begin != end)
14019 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14020 tree index = VEC_index (constructor_elt, elts, middle)->index;
14022 if (TREE_CODE (index) == INTEGER_CST
14023 && tree_int_cst_lt (index, op1))
14024 begin = middle + 1;
14025 else if (TREE_CODE (index) == INTEGER_CST
14026 && tree_int_cst_lt (op1, index))
14027 end = middle;
14028 else if (TREE_CODE (index) == RANGE_EXPR
14029 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14030 begin = middle + 1;
14031 else if (TREE_CODE (index) == RANGE_EXPR
14032 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14033 end = middle;
14034 else
14035 return VEC_index (constructor_elt, elts, middle)->value;
14039 return t;
14042 case CONST_DECL:
14043 return fold (DECL_INITIAL (t));
14045 default:
14046 return t;
14047 } /* switch (code) */
14050 #ifdef ENABLE_FOLD_CHECKING
14051 #undef fold
14053 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14054 static void fold_check_failed (const_tree, const_tree);
14055 void print_fold_checksum (const_tree);
14057 /* When --enable-checking=fold, compute a digest of expr before
14058 and after actual fold call to see if fold did not accidentally
14059 change original expr. */
14061 tree
14062 fold (tree expr)
14064 tree ret;
14065 struct md5_ctx ctx;
14066 unsigned char checksum_before[16], checksum_after[16];
14067 htab_t ht;
14069 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14070 md5_init_ctx (&ctx);
14071 fold_checksum_tree (expr, &ctx, ht);
14072 md5_finish_ctx (&ctx, checksum_before);
14073 htab_empty (ht);
14075 ret = fold_1 (expr);
14077 md5_init_ctx (&ctx);
14078 fold_checksum_tree (expr, &ctx, ht);
14079 md5_finish_ctx (&ctx, checksum_after);
14080 htab_delete (ht);
14082 if (memcmp (checksum_before, checksum_after, 16))
14083 fold_check_failed (expr, ret);
14085 return ret;
14088 void
14089 print_fold_checksum (const_tree expr)
14091 struct md5_ctx ctx;
14092 unsigned char checksum[16], cnt;
14093 htab_t ht;
14095 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14096 md5_init_ctx (&ctx);
14097 fold_checksum_tree (expr, &ctx, ht);
14098 md5_finish_ctx (&ctx, checksum);
14099 htab_delete (ht);
14100 for (cnt = 0; cnt < 16; ++cnt)
14101 fprintf (stderr, "%02x", checksum[cnt]);
14102 putc ('\n', stderr);
14105 static void
14106 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14108 internal_error ("fold check: original tree changed by fold");
14111 static void
14112 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14114 const void **slot;
14115 enum tree_code code;
14116 union tree_node buf;
14117 int i, len;
14119 recursive_label:
14121 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14122 <= sizeof (struct tree_function_decl))
14123 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14124 if (expr == NULL)
14125 return;
14126 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14127 if (*slot != NULL)
14128 return;
14129 *slot = expr;
14130 code = TREE_CODE (expr);
14131 if (TREE_CODE_CLASS (code) == tcc_declaration
14132 && DECL_ASSEMBLER_NAME_SET_P (expr))
14134 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14135 memcpy ((char *) &buf, expr, tree_size (expr));
14136 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14137 expr = (tree) &buf;
14139 else if (TREE_CODE_CLASS (code) == tcc_type
14140 && (TYPE_POINTER_TO (expr)
14141 || TYPE_REFERENCE_TO (expr)
14142 || TYPE_CACHED_VALUES_P (expr)
14143 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14144 || TYPE_NEXT_VARIANT (expr)))
14146 /* Allow these fields to be modified. */
14147 tree tmp;
14148 memcpy ((char *) &buf, expr, tree_size (expr));
14149 expr = tmp = (tree) &buf;
14150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14151 TYPE_POINTER_TO (tmp) = NULL;
14152 TYPE_REFERENCE_TO (tmp) = NULL;
14153 TYPE_NEXT_VARIANT (tmp) = NULL;
14154 if (TYPE_CACHED_VALUES_P (tmp))
14156 TYPE_CACHED_VALUES_P (tmp) = 0;
14157 TYPE_CACHED_VALUES (tmp) = NULL;
14160 md5_process_bytes (expr, tree_size (expr), ctx);
14161 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14162 if (TREE_CODE_CLASS (code) != tcc_type
14163 && TREE_CODE_CLASS (code) != tcc_declaration
14164 && code != TREE_LIST
14165 && code != SSA_NAME)
14166 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14167 switch (TREE_CODE_CLASS (code))
14169 case tcc_constant:
14170 switch (code)
14172 case STRING_CST:
14173 md5_process_bytes (TREE_STRING_POINTER (expr),
14174 TREE_STRING_LENGTH (expr), ctx);
14175 break;
14176 case COMPLEX_CST:
14177 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14178 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14179 break;
14180 case VECTOR_CST:
14181 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14182 break;
14183 default:
14184 break;
14186 break;
14187 case tcc_exceptional:
14188 switch (code)
14190 case TREE_LIST:
14191 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14192 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14193 expr = TREE_CHAIN (expr);
14194 goto recursive_label;
14195 break;
14196 case TREE_VEC:
14197 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14198 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14199 break;
14200 default:
14201 break;
14203 break;
14204 case tcc_expression:
14205 case tcc_reference:
14206 case tcc_comparison:
14207 case tcc_unary:
14208 case tcc_binary:
14209 case tcc_statement:
14210 case tcc_vl_exp:
14211 len = TREE_OPERAND_LENGTH (expr);
14212 for (i = 0; i < len; ++i)
14213 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14214 break;
14215 case tcc_declaration:
14216 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14217 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14218 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14220 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14221 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14222 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14223 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14224 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14226 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14227 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14229 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14231 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14232 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14233 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14235 break;
14236 case tcc_type:
14237 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14238 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14239 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14240 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14241 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14242 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14243 if (INTEGRAL_TYPE_P (expr)
14244 || SCALAR_FLOAT_TYPE_P (expr))
14246 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14247 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14249 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14250 if (TREE_CODE (expr) == RECORD_TYPE
14251 || TREE_CODE (expr) == UNION_TYPE
14252 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14253 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14254 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14255 break;
14256 default:
14257 break;
14261 /* Helper function for outputting the checksum of a tree T. When
14262 debugging with gdb, you can "define mynext" to be "next" followed
14263 by "call debug_fold_checksum (op0)", then just trace down till the
14264 outputs differ. */
14266 void
14267 debug_fold_checksum (const_tree t)
14269 int i;
14270 unsigned char checksum[16];
14271 struct md5_ctx ctx;
14272 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14274 md5_init_ctx (&ctx);
14275 fold_checksum_tree (t, &ctx, ht);
14276 md5_finish_ctx (&ctx, checksum);
14277 htab_empty (ht);
14279 for (i = 0; i < 16; i++)
14280 fprintf (stderr, "%d ", checksum[i]);
14282 fprintf (stderr, "\n");
14285 #endif
14287 /* Fold a unary tree expression with code CODE of type TYPE with an
14288 operand OP0. LOC is the location of the resulting expression.
14289 Return a folded expression if successful. Otherwise, return a tree
14290 expression with code CODE of type TYPE with an operand OP0. */
14292 tree
14293 fold_build1_stat_loc (location_t loc,
14294 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14296 tree tem;
14297 #ifdef ENABLE_FOLD_CHECKING
14298 unsigned char checksum_before[16], checksum_after[16];
14299 struct md5_ctx ctx;
14300 htab_t ht;
14302 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14303 md5_init_ctx (&ctx);
14304 fold_checksum_tree (op0, &ctx, ht);
14305 md5_finish_ctx (&ctx, checksum_before);
14306 htab_empty (ht);
14307 #endif
14309 tem = fold_unary_loc (loc, code, type, op0);
14310 if (!tem)
14312 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14313 SET_EXPR_LOCATION (tem, loc);
14316 #ifdef ENABLE_FOLD_CHECKING
14317 md5_init_ctx (&ctx);
14318 fold_checksum_tree (op0, &ctx, ht);
14319 md5_finish_ctx (&ctx, checksum_after);
14320 htab_delete (ht);
14322 if (memcmp (checksum_before, checksum_after, 16))
14323 fold_check_failed (op0, tem);
14324 #endif
14325 return tem;
14328 /* Fold a binary tree expression with code CODE of type TYPE with
14329 operands OP0 and OP1. LOC is the location of the resulting
14330 expression. Return a folded expression if successful. Otherwise,
14331 return a tree expression with code CODE of type TYPE with operands
14332 OP0 and OP1. */
14334 tree
14335 fold_build2_stat_loc (location_t loc,
14336 enum tree_code code, tree type, tree op0, tree op1
14337 MEM_STAT_DECL)
14339 tree tem;
14340 #ifdef ENABLE_FOLD_CHECKING
14341 unsigned char checksum_before_op0[16],
14342 checksum_before_op1[16],
14343 checksum_after_op0[16],
14344 checksum_after_op1[16];
14345 struct md5_ctx ctx;
14346 htab_t ht;
14348 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14349 md5_init_ctx (&ctx);
14350 fold_checksum_tree (op0, &ctx, ht);
14351 md5_finish_ctx (&ctx, checksum_before_op0);
14352 htab_empty (ht);
14354 md5_init_ctx (&ctx);
14355 fold_checksum_tree (op1, &ctx, ht);
14356 md5_finish_ctx (&ctx, checksum_before_op1);
14357 htab_empty (ht);
14358 #endif
14360 tem = fold_binary_loc (loc, code, type, op0, op1);
14361 if (!tem)
14363 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14364 SET_EXPR_LOCATION (tem, loc);
14367 #ifdef ENABLE_FOLD_CHECKING
14368 md5_init_ctx (&ctx);
14369 fold_checksum_tree (op0, &ctx, ht);
14370 md5_finish_ctx (&ctx, checksum_after_op0);
14371 htab_empty (ht);
14373 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14374 fold_check_failed (op0, tem);
14376 md5_init_ctx (&ctx);
14377 fold_checksum_tree (op1, &ctx, ht);
14378 md5_finish_ctx (&ctx, checksum_after_op1);
14379 htab_delete (ht);
14381 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14382 fold_check_failed (op1, tem);
14383 #endif
14384 return tem;
14387 /* Fold a ternary tree expression with code CODE of type TYPE with
14388 operands OP0, OP1, and OP2. Return a folded expression if
14389 successful. Otherwise, return a tree expression with code CODE of
14390 type TYPE with operands OP0, OP1, and OP2. */
14392 tree
14393 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14394 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14396 tree tem;
14397 #ifdef ENABLE_FOLD_CHECKING
14398 unsigned char checksum_before_op0[16],
14399 checksum_before_op1[16],
14400 checksum_before_op2[16],
14401 checksum_after_op0[16],
14402 checksum_after_op1[16],
14403 checksum_after_op2[16];
14404 struct md5_ctx ctx;
14405 htab_t ht;
14407 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14408 md5_init_ctx (&ctx);
14409 fold_checksum_tree (op0, &ctx, ht);
14410 md5_finish_ctx (&ctx, checksum_before_op0);
14411 htab_empty (ht);
14413 md5_init_ctx (&ctx);
14414 fold_checksum_tree (op1, &ctx, ht);
14415 md5_finish_ctx (&ctx, checksum_before_op1);
14416 htab_empty (ht);
14418 md5_init_ctx (&ctx);
14419 fold_checksum_tree (op2, &ctx, ht);
14420 md5_finish_ctx (&ctx, checksum_before_op2);
14421 htab_empty (ht);
14422 #endif
14424 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14425 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14426 if (!tem)
14428 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14429 SET_EXPR_LOCATION (tem, loc);
14432 #ifdef ENABLE_FOLD_CHECKING
14433 md5_init_ctx (&ctx);
14434 fold_checksum_tree (op0, &ctx, ht);
14435 md5_finish_ctx (&ctx, checksum_after_op0);
14436 htab_empty (ht);
14438 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14439 fold_check_failed (op0, tem);
14441 md5_init_ctx (&ctx);
14442 fold_checksum_tree (op1, &ctx, ht);
14443 md5_finish_ctx (&ctx, checksum_after_op1);
14444 htab_empty (ht);
14446 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14447 fold_check_failed (op1, tem);
14449 md5_init_ctx (&ctx);
14450 fold_checksum_tree (op2, &ctx, ht);
14451 md5_finish_ctx (&ctx, checksum_after_op2);
14452 htab_delete (ht);
14454 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14455 fold_check_failed (op2, tem);
14456 #endif
14457 return tem;
14460 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14461 arguments in ARGARRAY, and a null static chain.
14462 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14463 of type TYPE from the given operands as constructed by build_call_array. */
14465 tree
14466 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14467 int nargs, tree *argarray)
14469 tree tem;
14470 #ifdef ENABLE_FOLD_CHECKING
14471 unsigned char checksum_before_fn[16],
14472 checksum_before_arglist[16],
14473 checksum_after_fn[16],
14474 checksum_after_arglist[16];
14475 struct md5_ctx ctx;
14476 htab_t ht;
14477 int i;
14479 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14480 md5_init_ctx (&ctx);
14481 fold_checksum_tree (fn, &ctx, ht);
14482 md5_finish_ctx (&ctx, checksum_before_fn);
14483 htab_empty (ht);
14485 md5_init_ctx (&ctx);
14486 for (i = 0; i < nargs; i++)
14487 fold_checksum_tree (argarray[i], &ctx, ht);
14488 md5_finish_ctx (&ctx, checksum_before_arglist);
14489 htab_empty (ht);
14490 #endif
14492 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14494 #ifdef ENABLE_FOLD_CHECKING
14495 md5_init_ctx (&ctx);
14496 fold_checksum_tree (fn, &ctx, ht);
14497 md5_finish_ctx (&ctx, checksum_after_fn);
14498 htab_empty (ht);
14500 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14501 fold_check_failed (fn, tem);
14503 md5_init_ctx (&ctx);
14504 for (i = 0; i < nargs; i++)
14505 fold_checksum_tree (argarray[i], &ctx, ht);
14506 md5_finish_ctx (&ctx, checksum_after_arglist);
14507 htab_delete (ht);
14509 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14510 fold_check_failed (NULL_TREE, tem);
14511 #endif
14512 return tem;
14515 /* Perform constant folding and related simplification of initializer
14516 expression EXPR. These behave identically to "fold_buildN" but ignore
14517 potential run-time traps and exceptions that fold must preserve. */
14519 #define START_FOLD_INIT \
14520 int saved_signaling_nans = flag_signaling_nans;\
14521 int saved_trapping_math = flag_trapping_math;\
14522 int saved_rounding_math = flag_rounding_math;\
14523 int saved_trapv = flag_trapv;\
14524 int saved_folding_initializer = folding_initializer;\
14525 flag_signaling_nans = 0;\
14526 flag_trapping_math = 0;\
14527 flag_rounding_math = 0;\
14528 flag_trapv = 0;\
14529 folding_initializer = 1;
14531 #define END_FOLD_INIT \
14532 flag_signaling_nans = saved_signaling_nans;\
14533 flag_trapping_math = saved_trapping_math;\
14534 flag_rounding_math = saved_rounding_math;\
14535 flag_trapv = saved_trapv;\
14536 folding_initializer = saved_folding_initializer;
14538 tree
14539 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14540 tree type, tree op)
14542 tree result;
14543 START_FOLD_INIT;
14545 result = fold_build1_loc (loc, code, type, op);
14547 END_FOLD_INIT;
14548 return result;
14551 tree
14552 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14553 tree type, tree op0, tree op1)
14555 tree result;
14556 START_FOLD_INIT;
14558 result = fold_build2_loc (loc, code, type, op0, op1);
14560 END_FOLD_INIT;
14561 return result;
14564 tree
14565 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14566 tree type, tree op0, tree op1, tree op2)
14568 tree result;
14569 START_FOLD_INIT;
14571 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14573 END_FOLD_INIT;
14574 return result;
14577 tree
14578 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14579 int nargs, tree *argarray)
14581 tree result;
14582 START_FOLD_INIT;
14584 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14586 END_FOLD_INIT;
14587 return result;
14590 #undef START_FOLD_INIT
14591 #undef END_FOLD_INIT
14593 /* Determine if first argument is a multiple of second argument. Return 0 if
14594 it is not, or we cannot easily determined it to be.
14596 An example of the sort of thing we care about (at this point; this routine
14597 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14598 fold cases do now) is discovering that
14600 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14602 is a multiple of
14604 SAVE_EXPR (J * 8)
14606 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14608 This code also handles discovering that
14610 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14612 is a multiple of 8 so we don't have to worry about dealing with a
14613 possible remainder.
14615 Note that we *look* inside a SAVE_EXPR only to determine how it was
14616 calculated; it is not safe for fold to do much of anything else with the
14617 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14618 at run time. For example, the latter example above *cannot* be implemented
14619 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14620 evaluation time of the original SAVE_EXPR is not necessarily the same at
14621 the time the new expression is evaluated. The only optimization of this
14622 sort that would be valid is changing
14624 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14626 divided by 8 to
14628 SAVE_EXPR (I) * SAVE_EXPR (J)
14630 (where the same SAVE_EXPR (J) is used in the original and the
14631 transformed version). */
14634 multiple_of_p (tree type, const_tree top, const_tree bottom)
14636 if (operand_equal_p (top, bottom, 0))
14637 return 1;
14639 if (TREE_CODE (type) != INTEGER_TYPE)
14640 return 0;
14642 switch (TREE_CODE (top))
14644 case BIT_AND_EXPR:
14645 /* Bitwise and provides a power of two multiple. If the mask is
14646 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14647 if (!integer_pow2p (bottom))
14648 return 0;
14649 /* FALLTHRU */
14651 case MULT_EXPR:
14652 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14653 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14655 case PLUS_EXPR:
14656 case MINUS_EXPR:
14657 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14658 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14660 case LSHIFT_EXPR:
14661 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14663 tree op1, t1;
14665 op1 = TREE_OPERAND (top, 1);
14666 /* const_binop may not detect overflow correctly,
14667 so check for it explicitly here. */
14668 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14669 > TREE_INT_CST_LOW (op1)
14670 && TREE_INT_CST_HIGH (op1) == 0
14671 && 0 != (t1 = fold_convert (type,
14672 const_binop (LSHIFT_EXPR,
14673 size_one_node,
14674 op1, 0)))
14675 && !TREE_OVERFLOW (t1))
14676 return multiple_of_p (type, t1, bottom);
14678 return 0;
14680 case NOP_EXPR:
14681 /* Can't handle conversions from non-integral or wider integral type. */
14682 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14683 || (TYPE_PRECISION (type)
14684 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14685 return 0;
14687 /* .. fall through ... */
14689 case SAVE_EXPR:
14690 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14692 case INTEGER_CST:
14693 if (TREE_CODE (bottom) != INTEGER_CST
14694 || integer_zerop (bottom)
14695 || (TYPE_UNSIGNED (type)
14696 && (tree_int_cst_sgn (top) < 0
14697 || tree_int_cst_sgn (bottom) < 0)))
14698 return 0;
14699 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14700 top, bottom, 0));
14702 default:
14703 return 0;
14707 /* Return true if CODE or TYPE is known to be non-negative. */
14709 static bool
14710 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14712 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14713 && truth_value_p (code))
14714 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14715 have a signed:1 type (where the value is -1 and 0). */
14716 return true;
14717 return false;
14720 /* Return true if (CODE OP0) is known to be non-negative. If the return
14721 value is based on the assumption that signed overflow is undefined,
14722 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14723 *STRICT_OVERFLOW_P. */
14725 bool
14726 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14727 bool *strict_overflow_p)
14729 if (TYPE_UNSIGNED (type))
14730 return true;
14732 switch (code)
14734 case ABS_EXPR:
14735 /* We can't return 1 if flag_wrapv is set because
14736 ABS_EXPR<INT_MIN> = INT_MIN. */
14737 if (!INTEGRAL_TYPE_P (type))
14738 return true;
14739 if (TYPE_OVERFLOW_UNDEFINED (type))
14741 *strict_overflow_p = true;
14742 return true;
14744 break;
14746 case NON_LVALUE_EXPR:
14747 case FLOAT_EXPR:
14748 case FIX_TRUNC_EXPR:
14749 return tree_expr_nonnegative_warnv_p (op0,
14750 strict_overflow_p);
14752 case NOP_EXPR:
14754 tree inner_type = TREE_TYPE (op0);
14755 tree outer_type = type;
14757 if (TREE_CODE (outer_type) == REAL_TYPE)
14759 if (TREE_CODE (inner_type) == REAL_TYPE)
14760 return tree_expr_nonnegative_warnv_p (op0,
14761 strict_overflow_p);
14762 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14764 if (TYPE_UNSIGNED (inner_type))
14765 return true;
14766 return tree_expr_nonnegative_warnv_p (op0,
14767 strict_overflow_p);
14770 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14772 if (TREE_CODE (inner_type) == REAL_TYPE)
14773 return tree_expr_nonnegative_warnv_p (op0,
14774 strict_overflow_p);
14775 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14776 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14777 && TYPE_UNSIGNED (inner_type);
14780 break;
14782 default:
14783 return tree_simple_nonnegative_warnv_p (code, type);
14786 /* We don't know sign of `t', so be conservative and return false. */
14787 return false;
14790 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14791 value is based on the assumption that signed overflow is undefined,
14792 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14793 *STRICT_OVERFLOW_P. */
14795 bool
14796 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14797 tree op1, bool *strict_overflow_p)
14799 if (TYPE_UNSIGNED (type))
14800 return true;
14802 switch (code)
14804 case POINTER_PLUS_EXPR:
14805 case PLUS_EXPR:
14806 if (FLOAT_TYPE_P (type))
14807 return (tree_expr_nonnegative_warnv_p (op0,
14808 strict_overflow_p)
14809 && tree_expr_nonnegative_warnv_p (op1,
14810 strict_overflow_p));
14812 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14813 both unsigned and at least 2 bits shorter than the result. */
14814 if (TREE_CODE (type) == INTEGER_TYPE
14815 && TREE_CODE (op0) == NOP_EXPR
14816 && TREE_CODE (op1) == NOP_EXPR)
14818 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14819 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14820 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14821 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14823 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14824 TYPE_PRECISION (inner2)) + 1;
14825 return prec < TYPE_PRECISION (type);
14828 break;
14830 case MULT_EXPR:
14831 if (FLOAT_TYPE_P (type))
14833 /* x * x for floating point x is always non-negative. */
14834 if (operand_equal_p (op0, op1, 0))
14835 return true;
14836 return (tree_expr_nonnegative_warnv_p (op0,
14837 strict_overflow_p)
14838 && tree_expr_nonnegative_warnv_p (op1,
14839 strict_overflow_p));
14842 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14843 both unsigned and their total bits is shorter than the result. */
14844 if (TREE_CODE (type) == INTEGER_TYPE
14845 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14846 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14848 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14849 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14850 : TREE_TYPE (op0);
14851 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14852 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14853 : TREE_TYPE (op1);
14855 bool unsigned0 = TYPE_UNSIGNED (inner0);
14856 bool unsigned1 = TYPE_UNSIGNED (inner1);
14858 if (TREE_CODE (op0) == INTEGER_CST)
14859 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14861 if (TREE_CODE (op1) == INTEGER_CST)
14862 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14864 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14865 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14867 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14868 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14869 : TYPE_PRECISION (inner0);
14871 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14872 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14873 : TYPE_PRECISION (inner1);
14875 return precision0 + precision1 < TYPE_PRECISION (type);
14878 return false;
14880 case BIT_AND_EXPR:
14881 case MAX_EXPR:
14882 return (tree_expr_nonnegative_warnv_p (op0,
14883 strict_overflow_p)
14884 || tree_expr_nonnegative_warnv_p (op1,
14885 strict_overflow_p));
14887 case BIT_IOR_EXPR:
14888 case BIT_XOR_EXPR:
14889 case MIN_EXPR:
14890 case RDIV_EXPR:
14891 case TRUNC_DIV_EXPR:
14892 case CEIL_DIV_EXPR:
14893 case FLOOR_DIV_EXPR:
14894 case ROUND_DIV_EXPR:
14895 return (tree_expr_nonnegative_warnv_p (op0,
14896 strict_overflow_p)
14897 && tree_expr_nonnegative_warnv_p (op1,
14898 strict_overflow_p));
14900 case TRUNC_MOD_EXPR:
14901 case CEIL_MOD_EXPR:
14902 case FLOOR_MOD_EXPR:
14903 case ROUND_MOD_EXPR:
14904 return tree_expr_nonnegative_warnv_p (op0,
14905 strict_overflow_p);
14906 default:
14907 return tree_simple_nonnegative_warnv_p (code, type);
14910 /* We don't know sign of `t', so be conservative and return false. */
14911 return false;
14914 /* Return true if T is known to be non-negative. If the return
14915 value is based on the assumption that signed overflow is undefined,
14916 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14917 *STRICT_OVERFLOW_P. */
14919 bool
14920 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14922 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14923 return true;
14925 switch (TREE_CODE (t))
14927 case INTEGER_CST:
14928 return tree_int_cst_sgn (t) >= 0;
14930 case REAL_CST:
14931 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14933 case FIXED_CST:
14934 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14936 case COND_EXPR:
14937 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14938 strict_overflow_p)
14939 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14940 strict_overflow_p));
14941 default:
14942 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14943 TREE_TYPE (t));
14945 /* We don't know sign of `t', so be conservative and return false. */
14946 return false;
14949 /* Return true if T is known to be non-negative. If the return
14950 value is based on the assumption that signed overflow is undefined,
14951 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14952 *STRICT_OVERFLOW_P. */
14954 bool
14955 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14956 tree arg0, tree arg1, bool *strict_overflow_p)
14958 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14959 switch (DECL_FUNCTION_CODE (fndecl))
14961 CASE_FLT_FN (BUILT_IN_ACOS):
14962 CASE_FLT_FN (BUILT_IN_ACOSH):
14963 CASE_FLT_FN (BUILT_IN_CABS):
14964 CASE_FLT_FN (BUILT_IN_COSH):
14965 CASE_FLT_FN (BUILT_IN_ERFC):
14966 CASE_FLT_FN (BUILT_IN_EXP):
14967 CASE_FLT_FN (BUILT_IN_EXP10):
14968 CASE_FLT_FN (BUILT_IN_EXP2):
14969 CASE_FLT_FN (BUILT_IN_FABS):
14970 CASE_FLT_FN (BUILT_IN_FDIM):
14971 CASE_FLT_FN (BUILT_IN_HYPOT):
14972 CASE_FLT_FN (BUILT_IN_POW10):
14973 CASE_INT_FN (BUILT_IN_FFS):
14974 CASE_INT_FN (BUILT_IN_PARITY):
14975 CASE_INT_FN (BUILT_IN_POPCOUNT):
14976 case BUILT_IN_BSWAP32:
14977 case BUILT_IN_BSWAP64:
14978 /* Always true. */
14979 return true;
14981 CASE_FLT_FN (BUILT_IN_SQRT):
14982 /* sqrt(-0.0) is -0.0. */
14983 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14984 return true;
14985 return tree_expr_nonnegative_warnv_p (arg0,
14986 strict_overflow_p);
14988 CASE_FLT_FN (BUILT_IN_ASINH):
14989 CASE_FLT_FN (BUILT_IN_ATAN):
14990 CASE_FLT_FN (BUILT_IN_ATANH):
14991 CASE_FLT_FN (BUILT_IN_CBRT):
14992 CASE_FLT_FN (BUILT_IN_CEIL):
14993 CASE_FLT_FN (BUILT_IN_ERF):
14994 CASE_FLT_FN (BUILT_IN_EXPM1):
14995 CASE_FLT_FN (BUILT_IN_FLOOR):
14996 CASE_FLT_FN (BUILT_IN_FMOD):
14997 CASE_FLT_FN (BUILT_IN_FREXP):
14998 CASE_FLT_FN (BUILT_IN_LCEIL):
14999 CASE_FLT_FN (BUILT_IN_LDEXP):
15000 CASE_FLT_FN (BUILT_IN_LFLOOR):
15001 CASE_FLT_FN (BUILT_IN_LLCEIL):
15002 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15003 CASE_FLT_FN (BUILT_IN_LLRINT):
15004 CASE_FLT_FN (BUILT_IN_LLROUND):
15005 CASE_FLT_FN (BUILT_IN_LRINT):
15006 CASE_FLT_FN (BUILT_IN_LROUND):
15007 CASE_FLT_FN (BUILT_IN_MODF):
15008 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15009 CASE_FLT_FN (BUILT_IN_RINT):
15010 CASE_FLT_FN (BUILT_IN_ROUND):
15011 CASE_FLT_FN (BUILT_IN_SCALB):
15012 CASE_FLT_FN (BUILT_IN_SCALBLN):
15013 CASE_FLT_FN (BUILT_IN_SCALBN):
15014 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15015 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15016 CASE_FLT_FN (BUILT_IN_SINH):
15017 CASE_FLT_FN (BUILT_IN_TANH):
15018 CASE_FLT_FN (BUILT_IN_TRUNC):
15019 /* True if the 1st argument is nonnegative. */
15020 return tree_expr_nonnegative_warnv_p (arg0,
15021 strict_overflow_p);
15023 CASE_FLT_FN (BUILT_IN_FMAX):
15024 /* True if the 1st OR 2nd arguments are nonnegative. */
15025 return (tree_expr_nonnegative_warnv_p (arg0,
15026 strict_overflow_p)
15027 || (tree_expr_nonnegative_warnv_p (arg1,
15028 strict_overflow_p)));
15030 CASE_FLT_FN (BUILT_IN_FMIN):
15031 /* True if the 1st AND 2nd arguments are nonnegative. */
15032 return (tree_expr_nonnegative_warnv_p (arg0,
15033 strict_overflow_p)
15034 && (tree_expr_nonnegative_warnv_p (arg1,
15035 strict_overflow_p)));
15037 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15038 /* True if the 2nd argument is nonnegative. */
15039 return tree_expr_nonnegative_warnv_p (arg1,
15040 strict_overflow_p);
15042 CASE_FLT_FN (BUILT_IN_POWI):
15043 /* True if the 1st argument is nonnegative or the second
15044 argument is an even integer. */
15045 if (TREE_CODE (arg1) == INTEGER_CST
15046 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15047 return true;
15048 return tree_expr_nonnegative_warnv_p (arg0,
15049 strict_overflow_p);
15051 CASE_FLT_FN (BUILT_IN_POW):
15052 /* True if the 1st argument is nonnegative or the second
15053 argument is an even integer valued real. */
15054 if (TREE_CODE (arg1) == REAL_CST)
15056 REAL_VALUE_TYPE c;
15057 HOST_WIDE_INT n;
15059 c = TREE_REAL_CST (arg1);
15060 n = real_to_integer (&c);
15061 if ((n & 1) == 0)
15063 REAL_VALUE_TYPE cint;
15064 real_from_integer (&cint, VOIDmode, n,
15065 n < 0 ? -1 : 0, 0);
15066 if (real_identical (&c, &cint))
15067 return true;
15070 return tree_expr_nonnegative_warnv_p (arg0,
15071 strict_overflow_p);
15073 default:
15074 break;
15076 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15077 type);
15080 /* Return true if T is known to be non-negative. If the return
15081 value is based on the assumption that signed overflow is undefined,
15082 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15083 *STRICT_OVERFLOW_P. */
15085 bool
15086 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15088 enum tree_code code = TREE_CODE (t);
15089 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15090 return true;
15092 switch (code)
15094 case TARGET_EXPR:
15096 tree temp = TARGET_EXPR_SLOT (t);
15097 t = TARGET_EXPR_INITIAL (t);
15099 /* If the initializer is non-void, then it's a normal expression
15100 that will be assigned to the slot. */
15101 if (!VOID_TYPE_P (t))
15102 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15104 /* Otherwise, the initializer sets the slot in some way. One common
15105 way is an assignment statement at the end of the initializer. */
15106 while (1)
15108 if (TREE_CODE (t) == BIND_EXPR)
15109 t = expr_last (BIND_EXPR_BODY (t));
15110 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15111 || TREE_CODE (t) == TRY_CATCH_EXPR)
15112 t = expr_last (TREE_OPERAND (t, 0));
15113 else if (TREE_CODE (t) == STATEMENT_LIST)
15114 t = expr_last (t);
15115 else
15116 break;
15118 if (TREE_CODE (t) == MODIFY_EXPR
15119 && TREE_OPERAND (t, 0) == temp)
15120 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15121 strict_overflow_p);
15123 return false;
15126 case CALL_EXPR:
15128 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15129 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15131 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15132 get_callee_fndecl (t),
15133 arg0,
15134 arg1,
15135 strict_overflow_p);
15137 case COMPOUND_EXPR:
15138 case MODIFY_EXPR:
15139 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15140 strict_overflow_p);
15141 case BIND_EXPR:
15142 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15143 strict_overflow_p);
15144 case SAVE_EXPR:
15145 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15146 strict_overflow_p);
15148 default:
15149 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15150 TREE_TYPE (t));
15153 /* We don't know sign of `t', so be conservative and return false. */
15154 return false;
15157 /* Return true if T is known to be non-negative. If the return
15158 value is based on the assumption that signed overflow is undefined,
15159 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15160 *STRICT_OVERFLOW_P. */
15162 bool
15163 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15165 enum tree_code code;
15166 if (t == error_mark_node)
15167 return false;
15169 code = TREE_CODE (t);
15170 switch (TREE_CODE_CLASS (code))
15172 case tcc_binary:
15173 case tcc_comparison:
15174 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15175 TREE_TYPE (t),
15176 TREE_OPERAND (t, 0),
15177 TREE_OPERAND (t, 1),
15178 strict_overflow_p);
15180 case tcc_unary:
15181 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15182 TREE_TYPE (t),
15183 TREE_OPERAND (t, 0),
15184 strict_overflow_p);
15186 case tcc_constant:
15187 case tcc_declaration:
15188 case tcc_reference:
15189 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15191 default:
15192 break;
15195 switch (code)
15197 case TRUTH_AND_EXPR:
15198 case TRUTH_OR_EXPR:
15199 case TRUTH_XOR_EXPR:
15200 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15201 TREE_TYPE (t),
15202 TREE_OPERAND (t, 0),
15203 TREE_OPERAND (t, 1),
15204 strict_overflow_p);
15205 case TRUTH_NOT_EXPR:
15206 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15207 TREE_TYPE (t),
15208 TREE_OPERAND (t, 0),
15209 strict_overflow_p);
15211 case COND_EXPR:
15212 case CONSTRUCTOR:
15213 case OBJ_TYPE_REF:
15214 case ASSERT_EXPR:
15215 case ADDR_EXPR:
15216 case WITH_SIZE_EXPR:
15217 case EXC_PTR_EXPR:
15218 case SSA_NAME:
15219 case FILTER_EXPR:
15220 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15222 default:
15223 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15227 /* Return true if `t' is known to be non-negative. Handle warnings
15228 about undefined signed overflow. */
15230 bool
15231 tree_expr_nonnegative_p (tree t)
15233 bool ret, strict_overflow_p;
15235 strict_overflow_p = false;
15236 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15237 if (strict_overflow_p)
15238 fold_overflow_warning (("assuming signed overflow does not occur when "
15239 "determining that expression is always "
15240 "non-negative"),
15241 WARN_STRICT_OVERFLOW_MISC);
15242 return ret;
15246 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15247 For floating point we further ensure that T is not denormal.
15248 Similar logic is present in nonzero_address in rtlanal.h.
15250 If the return value is based on the assumption that signed overflow
15251 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15252 change *STRICT_OVERFLOW_P. */
15254 bool
15255 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15256 bool *strict_overflow_p)
15258 switch (code)
15260 case ABS_EXPR:
15261 return tree_expr_nonzero_warnv_p (op0,
15262 strict_overflow_p);
15264 case NOP_EXPR:
15266 tree inner_type = TREE_TYPE (op0);
15267 tree outer_type = type;
15269 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15270 && tree_expr_nonzero_warnv_p (op0,
15271 strict_overflow_p));
15273 break;
15275 case NON_LVALUE_EXPR:
15276 return tree_expr_nonzero_warnv_p (op0,
15277 strict_overflow_p);
15279 default:
15280 break;
15283 return false;
15286 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15287 For floating point we further ensure that T is not denormal.
15288 Similar logic is present in nonzero_address in rtlanal.h.
15290 If the return value is based on the assumption that signed overflow
15291 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15292 change *STRICT_OVERFLOW_P. */
15294 bool
15295 tree_binary_nonzero_warnv_p (enum tree_code code,
15296 tree type,
15297 tree op0,
15298 tree op1, bool *strict_overflow_p)
15300 bool sub_strict_overflow_p;
15301 switch (code)
15303 case POINTER_PLUS_EXPR:
15304 case PLUS_EXPR:
15305 if (TYPE_OVERFLOW_UNDEFINED (type))
15307 /* With the presence of negative values it is hard
15308 to say something. */
15309 sub_strict_overflow_p = false;
15310 if (!tree_expr_nonnegative_warnv_p (op0,
15311 &sub_strict_overflow_p)
15312 || !tree_expr_nonnegative_warnv_p (op1,
15313 &sub_strict_overflow_p))
15314 return false;
15315 /* One of operands must be positive and the other non-negative. */
15316 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15317 overflows, on a twos-complement machine the sum of two
15318 nonnegative numbers can never be zero. */
15319 return (tree_expr_nonzero_warnv_p (op0,
15320 strict_overflow_p)
15321 || tree_expr_nonzero_warnv_p (op1,
15322 strict_overflow_p));
15324 break;
15326 case MULT_EXPR:
15327 if (TYPE_OVERFLOW_UNDEFINED (type))
15329 if (tree_expr_nonzero_warnv_p (op0,
15330 strict_overflow_p)
15331 && tree_expr_nonzero_warnv_p (op1,
15332 strict_overflow_p))
15334 *strict_overflow_p = true;
15335 return true;
15338 break;
15340 case MIN_EXPR:
15341 sub_strict_overflow_p = false;
15342 if (tree_expr_nonzero_warnv_p (op0,
15343 &sub_strict_overflow_p)
15344 && tree_expr_nonzero_warnv_p (op1,
15345 &sub_strict_overflow_p))
15347 if (sub_strict_overflow_p)
15348 *strict_overflow_p = true;
15350 break;
15352 case MAX_EXPR:
15353 sub_strict_overflow_p = false;
15354 if (tree_expr_nonzero_warnv_p (op0,
15355 &sub_strict_overflow_p))
15357 if (sub_strict_overflow_p)
15358 *strict_overflow_p = true;
15360 /* When both operands are nonzero, then MAX must be too. */
15361 if (tree_expr_nonzero_warnv_p (op1,
15362 strict_overflow_p))
15363 return true;
15365 /* MAX where operand 0 is positive is positive. */
15366 return tree_expr_nonnegative_warnv_p (op0,
15367 strict_overflow_p);
15369 /* MAX where operand 1 is positive is positive. */
15370 else if (tree_expr_nonzero_warnv_p (op1,
15371 &sub_strict_overflow_p)
15372 && tree_expr_nonnegative_warnv_p (op1,
15373 &sub_strict_overflow_p))
15375 if (sub_strict_overflow_p)
15376 *strict_overflow_p = true;
15377 return true;
15379 break;
15381 case BIT_IOR_EXPR:
15382 return (tree_expr_nonzero_warnv_p (op1,
15383 strict_overflow_p)
15384 || tree_expr_nonzero_warnv_p (op0,
15385 strict_overflow_p));
15387 default:
15388 break;
15391 return false;
15394 /* Return true when T is an address and is known to be nonzero.
15395 For floating point we further ensure that T is not denormal.
15396 Similar logic is present in nonzero_address in rtlanal.h.
15398 If the return value is based on the assumption that signed overflow
15399 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15400 change *STRICT_OVERFLOW_P. */
15402 bool
15403 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15405 bool sub_strict_overflow_p;
15406 switch (TREE_CODE (t))
15408 case INTEGER_CST:
15409 return !integer_zerop (t);
15411 case ADDR_EXPR:
15413 tree base = get_base_address (TREE_OPERAND (t, 0));
15415 if (!base)
15416 return false;
15418 /* Weak declarations may link to NULL. Other things may also be NULL
15419 so protect with -fdelete-null-pointer-checks; but not variables
15420 allocated on the stack. */
15421 if (DECL_P (base)
15422 && (flag_delete_null_pointer_checks
15423 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15424 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15426 /* Constants are never weak. */
15427 if (CONSTANT_CLASS_P (base))
15428 return true;
15430 return false;
15433 case COND_EXPR:
15434 sub_strict_overflow_p = false;
15435 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15436 &sub_strict_overflow_p)
15437 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15438 &sub_strict_overflow_p))
15440 if (sub_strict_overflow_p)
15441 *strict_overflow_p = true;
15442 return true;
15444 break;
15446 default:
15447 break;
15449 return false;
15452 /* Return true when T is an address and is known to be nonzero.
15453 For floating point we further ensure that T is not denormal.
15454 Similar logic is present in nonzero_address in rtlanal.h.
15456 If the return value is based on the assumption that signed overflow
15457 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15458 change *STRICT_OVERFLOW_P. */
15460 bool
15461 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15463 tree type = TREE_TYPE (t);
15464 enum tree_code code;
15466 /* Doing something useful for floating point would need more work. */
15467 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15468 return false;
15470 code = TREE_CODE (t);
15471 switch (TREE_CODE_CLASS (code))
15473 case tcc_unary:
15474 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15475 strict_overflow_p);
15476 case tcc_binary:
15477 case tcc_comparison:
15478 return tree_binary_nonzero_warnv_p (code, type,
15479 TREE_OPERAND (t, 0),
15480 TREE_OPERAND (t, 1),
15481 strict_overflow_p);
15482 case tcc_constant:
15483 case tcc_declaration:
15484 case tcc_reference:
15485 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15487 default:
15488 break;
15491 switch (code)
15493 case TRUTH_NOT_EXPR:
15494 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15495 strict_overflow_p);
15497 case TRUTH_AND_EXPR:
15498 case TRUTH_OR_EXPR:
15499 case TRUTH_XOR_EXPR:
15500 return tree_binary_nonzero_warnv_p (code, type,
15501 TREE_OPERAND (t, 0),
15502 TREE_OPERAND (t, 1),
15503 strict_overflow_p);
15505 case COND_EXPR:
15506 case CONSTRUCTOR:
15507 case OBJ_TYPE_REF:
15508 case ASSERT_EXPR:
15509 case ADDR_EXPR:
15510 case WITH_SIZE_EXPR:
15511 case EXC_PTR_EXPR:
15512 case SSA_NAME:
15513 case FILTER_EXPR:
15514 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15516 case COMPOUND_EXPR:
15517 case MODIFY_EXPR:
15518 case BIND_EXPR:
15519 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15520 strict_overflow_p);
15522 case SAVE_EXPR:
15523 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15524 strict_overflow_p);
15526 case CALL_EXPR:
15527 return alloca_call_p (t);
15529 default:
15530 break;
15532 return false;
15535 /* Return true when T is an address and is known to be nonzero.
15536 Handle warnings about undefined signed overflow. */
15538 bool
15539 tree_expr_nonzero_p (tree t)
15541 bool ret, strict_overflow_p;
15543 strict_overflow_p = false;
15544 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15545 if (strict_overflow_p)
15546 fold_overflow_warning (("assuming signed overflow does not occur when "
15547 "determining that expression is always "
15548 "non-zero"),
15549 WARN_STRICT_OVERFLOW_MISC);
15550 return ret;
15553 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15554 attempt to fold the expression to a constant without modifying TYPE,
15555 OP0 or OP1.
15557 If the expression could be simplified to a constant, then return
15558 the constant. If the expression would not be simplified to a
15559 constant, then return NULL_TREE. */
15561 tree
15562 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15564 tree tem = fold_binary (code, type, op0, op1);
15565 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15568 /* Given the components of a unary expression CODE, TYPE and OP0,
15569 attempt to fold the expression to a constant without modifying
15570 TYPE or OP0.
15572 If the expression could be simplified to a constant, then return
15573 the constant. If the expression would not be simplified to a
15574 constant, then return NULL_TREE. */
15576 tree
15577 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15579 tree tem = fold_unary (code, type, op0);
15580 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15583 /* If EXP represents referencing an element in a constant string
15584 (either via pointer arithmetic or array indexing), return the
15585 tree representing the value accessed, otherwise return NULL. */
15587 tree
15588 fold_read_from_constant_string (tree exp)
15590 if ((TREE_CODE (exp) == INDIRECT_REF
15591 || TREE_CODE (exp) == ARRAY_REF)
15592 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15594 tree exp1 = TREE_OPERAND (exp, 0);
15595 tree index;
15596 tree string;
15597 location_t loc = EXPR_LOCATION (exp);
15599 if (TREE_CODE (exp) == INDIRECT_REF)
15600 string = string_constant (exp1, &index);
15601 else
15603 tree low_bound = array_ref_low_bound (exp);
15604 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15606 /* Optimize the special-case of a zero lower bound.
15608 We convert the low_bound to sizetype to avoid some problems
15609 with constant folding. (E.g. suppose the lower bound is 1,
15610 and its mode is QI. Without the conversion,l (ARRAY
15611 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15612 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15613 if (! integer_zerop (low_bound))
15614 index = size_diffop_loc (loc, index,
15615 fold_convert_loc (loc, sizetype, low_bound));
15617 string = exp1;
15620 if (string
15621 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15622 && TREE_CODE (string) == STRING_CST
15623 && TREE_CODE (index) == INTEGER_CST
15624 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15625 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15626 == MODE_INT)
15627 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15628 return build_int_cst_type (TREE_TYPE (exp),
15629 (TREE_STRING_POINTER (string)
15630 [TREE_INT_CST_LOW (index)]));
15632 return NULL;
15635 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15636 an integer constant, real, or fixed-point constant.
15638 TYPE is the type of the result. */
15640 static tree
15641 fold_negate_const (tree arg0, tree type)
15643 tree t = NULL_TREE;
15645 switch (TREE_CODE (arg0))
15647 case INTEGER_CST:
15649 unsigned HOST_WIDE_INT low;
15650 HOST_WIDE_INT high;
15651 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15652 TREE_INT_CST_HIGH (arg0),
15653 &low, &high);
15654 t = force_fit_type_double (type, low, high, 1,
15655 (overflow | TREE_OVERFLOW (arg0))
15656 && !TYPE_UNSIGNED (type));
15657 break;
15660 case REAL_CST:
15661 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15662 break;
15664 case FIXED_CST:
15666 FIXED_VALUE_TYPE f;
15667 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15668 &(TREE_FIXED_CST (arg0)), NULL,
15669 TYPE_SATURATING (type));
15670 t = build_fixed (type, f);
15671 /* Propagate overflow flags. */
15672 if (overflow_p | TREE_OVERFLOW (arg0))
15673 TREE_OVERFLOW (t) = 1;
15674 break;
15677 default:
15678 gcc_unreachable ();
15681 return t;
15684 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15685 an integer constant or real constant.
15687 TYPE is the type of the result. */
15689 tree
15690 fold_abs_const (tree arg0, tree type)
15692 tree t = NULL_TREE;
15694 switch (TREE_CODE (arg0))
15696 case INTEGER_CST:
15697 /* If the value is unsigned, then the absolute value is
15698 the same as the ordinary value. */
15699 if (TYPE_UNSIGNED (type))
15700 t = arg0;
15701 /* Similarly, if the value is non-negative. */
15702 else if (INT_CST_LT (integer_minus_one_node, arg0))
15703 t = arg0;
15704 /* If the value is negative, then the absolute value is
15705 its negation. */
15706 else
15708 unsigned HOST_WIDE_INT low;
15709 HOST_WIDE_INT high;
15710 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15711 TREE_INT_CST_HIGH (arg0),
15712 &low, &high);
15713 t = force_fit_type_double (type, low, high, -1,
15714 overflow | TREE_OVERFLOW (arg0));
15716 break;
15718 case REAL_CST:
15719 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15720 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15721 else
15722 t = arg0;
15723 break;
15725 default:
15726 gcc_unreachable ();
15729 return t;
15732 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15733 constant. TYPE is the type of the result. */
15735 static tree
15736 fold_not_const (tree arg0, tree type)
15738 tree t = NULL_TREE;
15740 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15742 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15743 ~TREE_INT_CST_HIGH (arg0), 0,
15744 TREE_OVERFLOW (arg0));
15746 return t;
15749 /* Given CODE, a relational operator, the target type, TYPE and two
15750 constant operands OP0 and OP1, return the result of the
15751 relational operation. If the result is not a compile time
15752 constant, then return NULL_TREE. */
15754 static tree
15755 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15757 int result, invert;
15759 /* From here on, the only cases we handle are when the result is
15760 known to be a constant. */
15762 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15764 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15765 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15767 /* Handle the cases where either operand is a NaN. */
15768 if (real_isnan (c0) || real_isnan (c1))
15770 switch (code)
15772 case EQ_EXPR:
15773 case ORDERED_EXPR:
15774 result = 0;
15775 break;
15777 case NE_EXPR:
15778 case UNORDERED_EXPR:
15779 case UNLT_EXPR:
15780 case UNLE_EXPR:
15781 case UNGT_EXPR:
15782 case UNGE_EXPR:
15783 case UNEQ_EXPR:
15784 result = 1;
15785 break;
15787 case LT_EXPR:
15788 case LE_EXPR:
15789 case GT_EXPR:
15790 case GE_EXPR:
15791 case LTGT_EXPR:
15792 if (flag_trapping_math)
15793 return NULL_TREE;
15794 result = 0;
15795 break;
15797 default:
15798 gcc_unreachable ();
15801 return constant_boolean_node (result, type);
15804 return constant_boolean_node (real_compare (code, c0, c1), type);
15807 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15809 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15810 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15811 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15814 /* Handle equality/inequality of complex constants. */
15815 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15817 tree rcond = fold_relational_const (code, type,
15818 TREE_REALPART (op0),
15819 TREE_REALPART (op1));
15820 tree icond = fold_relational_const (code, type,
15821 TREE_IMAGPART (op0),
15822 TREE_IMAGPART (op1));
15823 if (code == EQ_EXPR)
15824 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15825 else if (code == NE_EXPR)
15826 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15827 else
15828 return NULL_TREE;
15831 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15833 To compute GT, swap the arguments and do LT.
15834 To compute GE, do LT and invert the result.
15835 To compute LE, swap the arguments, do LT and invert the result.
15836 To compute NE, do EQ and invert the result.
15838 Therefore, the code below must handle only EQ and LT. */
15840 if (code == LE_EXPR || code == GT_EXPR)
15842 tree tem = op0;
15843 op0 = op1;
15844 op1 = tem;
15845 code = swap_tree_comparison (code);
15848 /* Note that it is safe to invert for real values here because we
15849 have already handled the one case that it matters. */
15851 invert = 0;
15852 if (code == NE_EXPR || code == GE_EXPR)
15854 invert = 1;
15855 code = invert_tree_comparison (code, false);
15858 /* Compute a result for LT or EQ if args permit;
15859 Otherwise return T. */
15860 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15862 if (code == EQ_EXPR)
15863 result = tree_int_cst_equal (op0, op1);
15864 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15865 result = INT_CST_LT_UNSIGNED (op0, op1);
15866 else
15867 result = INT_CST_LT (op0, op1);
15869 else
15870 return NULL_TREE;
15872 if (invert)
15873 result ^= 1;
15874 return constant_boolean_node (result, type);
15877 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15878 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15879 itself. */
15881 tree
15882 fold_build_cleanup_point_expr (tree type, tree expr)
15884 /* If the expression does not have side effects then we don't have to wrap
15885 it with a cleanup point expression. */
15886 if (!TREE_SIDE_EFFECTS (expr))
15887 return expr;
15889 /* If the expression is a return, check to see if the expression inside the
15890 return has no side effects or the right hand side of the modify expression
15891 inside the return. If either don't have side effects set we don't need to
15892 wrap the expression in a cleanup point expression. Note we don't check the
15893 left hand side of the modify because it should always be a return decl. */
15894 if (TREE_CODE (expr) == RETURN_EXPR)
15896 tree op = TREE_OPERAND (expr, 0);
15897 if (!op || !TREE_SIDE_EFFECTS (op))
15898 return expr;
15899 op = TREE_OPERAND (op, 1);
15900 if (!TREE_SIDE_EFFECTS (op))
15901 return expr;
15904 return build1 (CLEANUP_POINT_EXPR, type, expr);
15907 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15908 of an indirection through OP0, or NULL_TREE if no simplification is
15909 possible. */
15911 tree
15912 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15914 tree sub = op0;
15915 tree subtype;
15917 STRIP_NOPS (sub);
15918 subtype = TREE_TYPE (sub);
15919 if (!POINTER_TYPE_P (subtype))
15920 return NULL_TREE;
15922 if (TREE_CODE (sub) == ADDR_EXPR)
15924 tree op = TREE_OPERAND (sub, 0);
15925 tree optype = TREE_TYPE (op);
15926 /* *&CONST_DECL -> to the value of the const decl. */
15927 if (TREE_CODE (op) == CONST_DECL)
15928 return DECL_INITIAL (op);
15929 /* *&p => p; make sure to handle *&"str"[cst] here. */
15930 if (type == optype)
15932 tree fop = fold_read_from_constant_string (op);
15933 if (fop)
15934 return fop;
15935 else
15936 return op;
15938 /* *(foo *)&fooarray => fooarray[0] */
15939 else if (TREE_CODE (optype) == ARRAY_TYPE
15940 && type == TREE_TYPE (optype))
15942 tree type_domain = TYPE_DOMAIN (optype);
15943 tree min_val = size_zero_node;
15944 if (type_domain && TYPE_MIN_VALUE (type_domain))
15945 min_val = TYPE_MIN_VALUE (type_domain);
15946 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15947 SET_EXPR_LOCATION (op0, loc);
15948 return op0;
15950 /* *(foo *)&complexfoo => __real__ complexfoo */
15951 else if (TREE_CODE (optype) == COMPLEX_TYPE
15952 && type == TREE_TYPE (optype))
15953 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15954 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15955 else if (TREE_CODE (optype) == VECTOR_TYPE
15956 && type == TREE_TYPE (optype))
15958 tree part_width = TYPE_SIZE (type);
15959 tree index = bitsize_int (0);
15960 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15964 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15965 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15966 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15968 tree op00 = TREE_OPERAND (sub, 0);
15969 tree op01 = TREE_OPERAND (sub, 1);
15970 tree op00type;
15972 STRIP_NOPS (op00);
15973 op00type = TREE_TYPE (op00);
15974 if (TREE_CODE (op00) == ADDR_EXPR
15975 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15976 && type == TREE_TYPE (TREE_TYPE (op00type)))
15978 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15979 tree part_width = TYPE_SIZE (type);
15980 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15981 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15982 tree index = bitsize_int (indexi);
15984 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15985 return fold_build3_loc (loc,
15986 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15987 part_width, index);
15993 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15994 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15995 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15997 tree op00 = TREE_OPERAND (sub, 0);
15998 tree op01 = TREE_OPERAND (sub, 1);
15999 tree op00type;
16001 STRIP_NOPS (op00);
16002 op00type = TREE_TYPE (op00);
16003 if (TREE_CODE (op00) == ADDR_EXPR
16004 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16005 && type == TREE_TYPE (TREE_TYPE (op00type)))
16007 tree size = TYPE_SIZE_UNIT (type);
16008 if (tree_int_cst_equal (size, op01))
16009 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16010 TREE_OPERAND (op00, 0));
16014 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16015 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16016 && type == TREE_TYPE (TREE_TYPE (subtype)))
16018 tree type_domain;
16019 tree min_val = size_zero_node;
16020 sub = build_fold_indirect_ref_loc (loc, sub);
16021 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16022 if (type_domain && TYPE_MIN_VALUE (type_domain))
16023 min_val = TYPE_MIN_VALUE (type_domain);
16024 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16025 SET_EXPR_LOCATION (op0, loc);
16026 return op0;
16029 return NULL_TREE;
16032 /* Builds an expression for an indirection through T, simplifying some
16033 cases. */
16035 tree
16036 build_fold_indirect_ref_loc (location_t loc, tree t)
16038 tree type = TREE_TYPE (TREE_TYPE (t));
16039 tree sub = fold_indirect_ref_1 (loc, type, t);
16041 if (sub)
16042 return sub;
16044 t = build1 (INDIRECT_REF, type, t);
16045 SET_EXPR_LOCATION (t, loc);
16046 return t;
16049 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16051 tree
16052 fold_indirect_ref_loc (location_t loc, tree t)
16054 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16056 if (sub)
16057 return sub;
16058 else
16059 return t;
16062 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16063 whose result is ignored. The type of the returned tree need not be
16064 the same as the original expression. */
16066 tree
16067 fold_ignored_result (tree t)
16069 if (!TREE_SIDE_EFFECTS (t))
16070 return integer_zero_node;
16072 for (;;)
16073 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16075 case tcc_unary:
16076 t = TREE_OPERAND (t, 0);
16077 break;
16079 case tcc_binary:
16080 case tcc_comparison:
16081 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16082 t = TREE_OPERAND (t, 0);
16083 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16084 t = TREE_OPERAND (t, 1);
16085 else
16086 return t;
16087 break;
16089 case tcc_expression:
16090 switch (TREE_CODE (t))
16092 case COMPOUND_EXPR:
16093 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16094 return t;
16095 t = TREE_OPERAND (t, 0);
16096 break;
16098 case COND_EXPR:
16099 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16100 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16101 return t;
16102 t = TREE_OPERAND (t, 0);
16103 break;
16105 default:
16106 return t;
16108 break;
16110 default:
16111 return t;
16115 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16116 This can only be applied to objects of a sizetype. */
16118 tree
16119 round_up_loc (location_t loc, tree value, int divisor)
16121 tree div = NULL_TREE;
16123 gcc_assert (divisor > 0);
16124 if (divisor == 1)
16125 return value;
16127 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16128 have to do anything. Only do this when we are not given a const,
16129 because in that case, this check is more expensive than just
16130 doing it. */
16131 if (TREE_CODE (value) != INTEGER_CST)
16133 div = build_int_cst (TREE_TYPE (value), divisor);
16135 if (multiple_of_p (TREE_TYPE (value), value, div))
16136 return value;
16139 /* If divisor is a power of two, simplify this to bit manipulation. */
16140 if (divisor == (divisor & -divisor))
16142 if (TREE_CODE (value) == INTEGER_CST)
16144 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16145 unsigned HOST_WIDE_INT high;
16146 bool overflow_p;
16148 if ((low & (divisor - 1)) == 0)
16149 return value;
16151 overflow_p = TREE_OVERFLOW (value);
16152 high = TREE_INT_CST_HIGH (value);
16153 low &= ~(divisor - 1);
16154 low += divisor;
16155 if (low == 0)
16157 high++;
16158 if (high == 0)
16159 overflow_p = true;
16162 return force_fit_type_double (TREE_TYPE (value), low, high,
16163 -1, overflow_p);
16165 else
16167 tree t;
16169 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16170 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16171 t = build_int_cst (TREE_TYPE (value), -divisor);
16172 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16175 else
16177 if (!div)
16178 div = build_int_cst (TREE_TYPE (value), divisor);
16179 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16180 value = size_binop_loc (loc, MULT_EXPR, value, div);
16183 return value;
16186 /* Likewise, but round down. */
16188 tree
16189 round_down_loc (location_t loc, tree value, int divisor)
16191 tree div = NULL_TREE;
16193 gcc_assert (divisor > 0);
16194 if (divisor == 1)
16195 return value;
16197 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16198 have to do anything. Only do this when we are not given a const,
16199 because in that case, this check is more expensive than just
16200 doing it. */
16201 if (TREE_CODE (value) != INTEGER_CST)
16203 div = build_int_cst (TREE_TYPE (value), divisor);
16205 if (multiple_of_p (TREE_TYPE (value), value, div))
16206 return value;
16209 /* If divisor is a power of two, simplify this to bit manipulation. */
16210 if (divisor == (divisor & -divisor))
16212 tree t;
16214 t = build_int_cst (TREE_TYPE (value), -divisor);
16215 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16217 else
16219 if (!div)
16220 div = build_int_cst (TREE_TYPE (value), divisor);
16221 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16222 value = size_binop_loc (loc, MULT_EXPR, value, div);
16225 return value;
16228 /* Returns the pointer to the base of the object addressed by EXP and
16229 extracts the information about the offset of the access, storing it
16230 to PBITPOS and POFFSET. */
16232 static tree
16233 split_address_to_core_and_offset (tree exp,
16234 HOST_WIDE_INT *pbitpos, tree *poffset)
16236 tree core;
16237 enum machine_mode mode;
16238 int unsignedp, volatilep;
16239 HOST_WIDE_INT bitsize;
16240 location_t loc = EXPR_LOCATION (exp);
16242 if (TREE_CODE (exp) == ADDR_EXPR)
16244 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16245 poffset, &mode, &unsignedp, &volatilep,
16246 false);
16247 core = build_fold_addr_expr_loc (loc, core);
16249 else
16251 core = exp;
16252 *pbitpos = 0;
16253 *poffset = NULL_TREE;
16256 return core;
16259 /* Returns true if addresses of E1 and E2 differ by a constant, false
16260 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16262 bool
16263 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16265 tree core1, core2;
16266 HOST_WIDE_INT bitpos1, bitpos2;
16267 tree toffset1, toffset2, tdiff, type;
16269 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16270 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16272 if (bitpos1 % BITS_PER_UNIT != 0
16273 || bitpos2 % BITS_PER_UNIT != 0
16274 || !operand_equal_p (core1, core2, 0))
16275 return false;
16277 if (toffset1 && toffset2)
16279 type = TREE_TYPE (toffset1);
16280 if (type != TREE_TYPE (toffset2))
16281 toffset2 = fold_convert (type, toffset2);
16283 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16284 if (!cst_and_fits_in_hwi (tdiff))
16285 return false;
16287 *diff = int_cst_value (tdiff);
16289 else if (toffset1 || toffset2)
16291 /* If only one of the offsets is non-constant, the difference cannot
16292 be a constant. */
16293 return false;
16295 else
16296 *diff = 0;
16298 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16299 return true;
16302 /* Simplify the floating point expression EXP when the sign of the
16303 result is not significant. Return NULL_TREE if no simplification
16304 is possible. */
16306 tree
16307 fold_strip_sign_ops (tree exp)
16309 tree arg0, arg1;
16310 location_t loc = EXPR_LOCATION (exp);
16312 switch (TREE_CODE (exp))
16314 case ABS_EXPR:
16315 case NEGATE_EXPR:
16316 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16317 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16319 case MULT_EXPR:
16320 case RDIV_EXPR:
16321 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16322 return NULL_TREE;
16323 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16324 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16325 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16326 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16327 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16328 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16329 break;
16331 case COMPOUND_EXPR:
16332 arg0 = TREE_OPERAND (exp, 0);
16333 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16334 if (arg1)
16335 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16336 break;
16338 case COND_EXPR:
16339 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16340 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16341 if (arg0 || arg1)
16342 return fold_build3_loc (loc,
16343 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16344 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16345 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16346 break;
16348 case CALL_EXPR:
16350 const enum built_in_function fcode = builtin_mathfn_code (exp);
16351 switch (fcode)
16353 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16354 /* Strip copysign function call, return the 1st argument. */
16355 arg0 = CALL_EXPR_ARG (exp, 0);
16356 arg1 = CALL_EXPR_ARG (exp, 1);
16357 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16359 default:
16360 /* Strip sign ops from the argument of "odd" math functions. */
16361 if (negate_mathfn_p (fcode))
16363 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16364 if (arg0)
16365 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16367 break;
16370 break;
16372 default:
16373 break;
16375 return NULL_TREE;